aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDmitry Kopylov <kopylovd@gmail.com>2022-02-10 16:48:18 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:48:18 +0300
commit7230275728d34873cba1ba78bb68669b0c5faa31 (patch)
treeb222e5ac2e2e98872661c51ccceee5da0d291e13
parentb2f5101486cc0de2e979c8ba9ada2109785bf5fd (diff)
downloadydb-7230275728d34873cba1ba78bb68669b0c5faa31.tar.gz
Restoring authorship annotation for Dmitry Kopylov <kopylovd@gmail.com>. Commit 2 of 2.
-rw-r--r--build/plugins/_test_const.py64
-rw-r--r--build/plugins/gobuild.py2
-rw-r--r--build/plugins/pybuild.py14
-rw-r--r--build/plugins/res.py4
-rw-r--r--build/plugins/ya.make8
-rw-r--r--build/plugins/ytest.py534
-rw-r--r--build/plugins/ytest2.py2
-rw-r--r--build/sanitize-blacklist.txt2
-rw-r--r--build/scripts/append_file.py14
-rwxr-xr-xbuild/scripts/build_mn.py2
-rwxr-xr-xbuild/scripts/build_pln_header.py2
-rwxr-xr-xbuild/scripts/cat.py18
-rw-r--r--build/scripts/check_config_h.py8
-rw-r--r--build/scripts/compile_cuda.py18
-rw-r--r--build/scripts/coverage-info.py52
-rw-r--r--build/scripts/f2c.py30
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py10
-rw-r--r--build/scripts/fs_tools.py62
-rw-r--r--build/scripts/gen_mx_table.py32
-rw-r--r--build/scripts/gen_py_reg.py20
-rw-r--r--build/scripts/link_fat_obj.py16
-rwxr-xr-xbuild/scripts/mkver.py16
-rw-r--r--build/scripts/perl_wrapper.py30
-rw-r--r--build/scripts/preprocess.py4
-rw-r--r--build/scripts/resolve_java_srcs.py20
-rw-r--r--build/scripts/run_llvm_dsymutil.py10
-rwxr-xr-xbuild/scripts/run_tool.py10
-rw-r--r--build/scripts/wrapper.py10
-rw-r--r--build/scripts/xargs.py20
-rw-r--r--build/scripts/ya.make84
-rw-r--r--build/scripts/yield_line.py8
-rw-r--r--build/ya.conf.json30
-rw-r--r--build/ymake.core.conf84
-rw-r--r--contrib/python/PyHamcrest/ya.make4
-rw-r--r--contrib/python/PyYAML/py2/ya.make4
-rw-r--r--contrib/python/PyYAML/py3/ya.make4
-rw-r--r--contrib/python/PyYAML/ya.make4
-rw-r--r--contrib/python/ipython/py2/bin/ya.make2
-rw-r--r--contrib/python/jedi/ya.make4
-rw-r--r--contrib/python/pexpect/ya.make2
-rw-r--r--contrib/python/prompt-toolkit/ya.make4
-rw-r--r--contrib/python/py/LICENSE36
-rw-r--r--contrib/python/py/py/__init__.py270
-rw-r--r--contrib/python/py/py/__metainfo.py4
-rw-r--r--contrib/python/py/py/_builtin.py276
-rw-r--r--contrib/python/py/py/_code/__init__.py2
-rw-r--r--contrib/python/py/py/_code/_assertionnew.py636
-rw-r--r--contrib/python/py/py/_code/_assertionold.py1108
-rw-r--r--contrib/python/py/py/_code/_py2traceback.py158
-rw-r--r--contrib/python/py/py/_code/assertion.py178
-rw-r--r--contrib/python/py/py/_code/code.py1554
-rw-r--r--contrib/python/py/py/_code/source.py810
-rw-r--r--contrib/python/py/py/_error.py172
-rw-r--r--contrib/python/py/py/_io/__init__.py2
-rw-r--r--contrib/python/py/py/_io/capture.py736
-rw-r--r--contrib/python/py/py/_io/saferepr.py142
-rw-r--r--contrib/python/py/py/_io/terminalwriter.py676
-rw-r--r--contrib/python/py/py/_log/__init__.py4
-rw-r--r--contrib/python/py/py/_log/log.py376
-rw-r--r--contrib/python/py/py/_log/warning.py146
-rw-r--r--contrib/python/py/py/_path/__init__.py2
-rw-r--r--contrib/python/py/py/_path/cacheutil.py228
-rw-r--r--contrib/python/py/py/_path/common.py782
-rw-r--r--contrib/python/py/py/_path/local.py1654
-rw-r--r--contrib/python/py/py/_path/svnurl.py758
-rw-r--r--contrib/python/py/py/_path/svnwc.py2466
-rw-r--r--contrib/python/py/py/_process/__init__.py2
-rw-r--r--contrib/python/py/py/_process/cmdexec.py98
-rw-r--r--contrib/python/py/py/_process/forkedfunc.py240
-rw-r--r--contrib/python/py/py/_process/killproc.py46
-rw-r--r--contrib/python/py/py/_std.py36
-rw-r--r--contrib/python/py/py/_xmlgen.py486
-rw-r--r--contrib/python/py/py/test.py20
-rw-r--r--contrib/python/requests/requests/__init__.py102
-rw-r--r--contrib/python/requests/requests/adapters.py764
-rw-r--r--contrib/python/requests/requests/api.py190
-rw-r--r--contrib/python/requests/requests/auth.py352
-rw-r--r--contrib/python/requests/requests/certs.py22
-rw-r--r--contrib/python/requests/requests/compat.py108
-rw-r--r--contrib/python/requests/requests/cookies.py762
-rw-r--r--contrib/python/requests/requests/exceptions.py176
-rw-r--r--contrib/python/requests/requests/hooks.py60
-rw-r--r--contrib/python/requests/requests/models.py1422
-rw-r--r--contrib/python/requests/requests/sessions.py960
-rw-r--r--contrib/python/requests/requests/status_codes.py166
-rw-r--r--contrib/python/requests/requests/structures.py192
-rw-r--r--contrib/python/requests/requests/utils.py1132
-rw-r--r--contrib/python/requests/ya.make28
-rw-r--r--contrib/python/six/six.py244
-rw-r--r--contrib/python/six/ya.make4
-rw-r--r--contrib/python/traitlets/ya.make4
-rw-r--r--contrib/python/ya.make28
-rw-r--r--contrib/tools/ya.make2
-rw-r--r--library/cpp/blockcodecs/ut/ya.make4
-rw-r--r--library/cpp/codecs/static/tools/tests/ya.make4
-rw-r--r--library/cpp/messagebus/rain_check/test/TestRainCheck.py4
-rw-r--r--library/cpp/messagebus/test/TestMessageBus.py4
-rw-r--r--library/cpp/testing/common/env.cpp8
-rw-r--r--library/cpp/testing/unittest/fat/test_port_manager.cpp2
-rw-r--r--library/cpp/testing/unittest/registar.cpp56
-rw-r--r--library/cpp/testing/unittest/registar.h18
-rw-r--r--library/cpp/testing/unittest/utmain.cpp252
-rw-r--r--library/python/certifi/certifi/__init__.py2
-rw-r--r--library/python/certifi/certifi/binary.py4
-rw-r--r--library/python/filelock/__init__.py102
-rw-r--r--library/python/filelock/ut/lib/test_filelock.py24
-rw-r--r--library/python/find_root/__init__.py38
-rw-r--r--library/python/find_root/ya.make8
-rw-r--r--library/python/fs/__init__.py94
-rw-r--r--library/python/fs/test/test_fs.py350
-rw-r--r--library/python/fs/test/ya.make26
-rw-r--r--library/python/fs/ya.make28
-rw-r--r--library/python/pytest/allure/conftest.py16
-rw-r--r--library/python/pytest/empty/main.c14
-rw-r--r--library/python/pytest/empty/ya.make10
-rw-r--r--library/python/pytest/main.py38
-rw-r--r--library/python/pytest/plugins/collection.py70
-rw-r--r--library/python/pytest/plugins/conftests.py38
-rw-r--r--library/python/pytest/plugins/fixtures.py120
-rw-r--r--library/python/pytest/plugins/ya.make28
-rw-r--r--library/python/pytest/plugins/ya.py862
-rw-r--r--library/python/pytest/pytest.yatest.ini4
-rw-r--r--library/python/pytest/ya.make28
-rw-r--r--library/python/pytest/yatest_tools.py262
-rw-r--r--library/python/strings/strings.py52
-rw-r--r--library/python/testing/import_test/import_test.py6
-rw-r--r--library/python/testing/recipe/__init__.py32
-rw-r--r--library/python/testing/ya.make24
-rw-r--r--library/python/testing/yatest_common/ya.make42
-rw-r--r--library/python/testing/yatest_common/yatest/__init__.py4
-rw-r--r--library/python/testing/yatest_common/yatest/common/benchmark.py34
-rw-r--r--library/python/testing/yatest_common/yatest/common/canonical.py268
-rw-r--r--library/python/testing/yatest_common/yatest/common/environment.py2
-rw-r--r--library/python/testing/yatest_common/yatest/common/errors.py30
-rw-r--r--library/python/testing/yatest_common/yatest/common/legacy.py22
-rw-r--r--library/python/testing/yatest_common/yatest/common/path.py44
-rw-r--r--library/python/testing/yatest_common/yatest/common/process.py576
-rw-r--r--library/python/testing/yatest_common/yatest/common/runtime.py322
-rw-r--r--library/python/testing/yatest_common/yatest/common/runtime_java.py10
-rw-r--r--library/python/testing/yatest_common/yatest/common/tags.py10
-rw-r--r--library/python/testing/yatest_lib/external.py304
-rw-r--r--library/python/testing/yatest_lib/tools.py14
-rw-r--r--library/python/testing/yatest_lib/ya.make16
-rw-r--r--library/python/windows/__init__.py42
-rw-r--r--library/python/ya.make22
-rw-r--r--tools/archiver/tests/ya.make14
-rw-r--r--tools/ya.make2
-rw-r--r--util/system/ut/ya.make4
-rwxr-xr-xydb/library/yql/parser/proto_ast/gen_parser.sh4
-rw-r--r--ydb/tests/functional/hive/ya.make4
150 files changed, 13256 insertions, 13256 deletions
diff --git a/build/plugins/_test_const.py b/build/plugins/_test_const.py
index af591ccc08..0d03cc3d17 100644
--- a/build/plugins/_test_const.py
+++ b/build/plugins/_test_const.py
@@ -1,7 +1,7 @@
# coding: utf-8
import re
import sys
-
+
RESTART_TEST_INDICATOR = '##restart-test##'
INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
@@ -120,16 +120,16 @@ class TestRequirementsConstants(Enum):
class TestSize(Enum):
- Small = 'small'
- Medium = 'medium'
- Large = 'large'
-
+ Small = 'small'
+ Medium = 'medium'
+ Large = 'large'
+
DefaultTimeouts = {
- Small: 60,
- Medium: 600,
- Large: 3600,
- }
-
+ Small: 60,
+ Medium: 600,
+ Large: 3600,
+ }
+
DefaultPriorities = {
Small: -1,
Medium: -2,
@@ -178,22 +178,22 @@ class TestSize(Enum):
},
}
- @classmethod
- def sizes(cls):
+ @classmethod
+ def sizes(cls):
return cls.DefaultTimeouts.keys()
-
- @classmethod
- def get_default_timeout(cls, size):
+
+ @classmethod
+ def get_default_timeout(cls, size):
if size in cls.DefaultTimeouts:
return cls.DefaultTimeouts[size]
- raise Exception("Unknown test size '{}'".format(size))
-
+ raise Exception("Unknown test size '{}'".format(size))
+
@classmethod
def get_default_priorities(cls, size):
if size in cls.DefaultPriorities:
return cls.DefaultPriorities[size]
raise Exception("Unknown test size '{}'".format(size))
-
+
@classmethod
def get_default_requirements(cls, size):
if size in cls.DefaultRequirements:
@@ -208,25 +208,25 @@ class TestSize(Enum):
class TestRunExitCode(Enum):
- Skipped = 2
+ Skipped = 2
Failed = 3
- TimeOut = 10
+ TimeOut = 10
InfrastructureError = 12
-
-
+
+
class YaTestTags(Enum):
- Manual = "ya:manual"
- Notags = "ya:notags"
- Norestart = "ya:norestart"
- Dirty = "ya:dirty"
- Noretries = "ya:noretries"
- Fat = "ya:fat"
- RunWithAsserts = "ya:relwithdebinfo"
- Privileged = "ya:privileged"
+ Manual = "ya:manual"
+ Notags = "ya:notags"
+ Norestart = "ya:norestart"
+ Dirty = "ya:dirty"
+ Noretries = "ya:noretries"
+ Fat = "ya:fat"
+ RunWithAsserts = "ya:relwithdebinfo"
+ Privileged = "ya:privileged"
ExoticPlatform = "ya:exotic_platform"
NotAutocheck = "ya:not_autocheck"
-
-
+
+
class Status(object):
GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
SKIPPED = -100
diff --git a/build/plugins/gobuild.py b/build/plugins/gobuild.py
index 06d9ce4acb..8df96ebc55 100644
--- a/build/plugins/gobuild.py
+++ b/build/plugins/gobuild.py
@@ -172,7 +172,7 @@ def on_go_process_srcs(unit):
basedirs[basedir].append(f)
for basedir in basedirs:
unit.onadd_check(['gofmt'] + basedirs[basedir])
-
+
# Go coverage instrumentation (NOTE! go_files list is modified here)
if is_test_module and unit.enabled('GO_TEST_COVER'):
cover_info = []
diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py
index 51babe298e..f32a2d39a0 100644
--- a/build/plugins/pybuild.py
+++ b/build/plugins/pybuild.py
@@ -110,13 +110,13 @@ def has_pyx(args):
def get_srcdir(path, unit):
return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
-def add_python_lint_checks(unit, py_ver, files):
+def add_python_lint_checks(unit, py_ver, files):
def get_resolved_files():
- resolved_files = []
- for path in files:
- resolved = unit.resolve_arc_path([path])
+ resolved_files = []
+ for path in files:
+ resolved = unit.resolve_arc_path([path])
if resolved.startswith('$S'): # path was resolved as source file.
- resolved_files.append(resolved)
+ resolved_files.append(resolved)
return resolved_files
if unit.get('LINT_LEVEL_VALUE') == "none":
@@ -141,7 +141,7 @@ def add_python_lint_checks(unit, py_ver, files):
resolved_files = get_resolved_files()
flake8_cfg = 'build/config/tests/flake8/flake8.conf'
unit.onadd_check(["flake8.py{}".format(py_ver), flake8_cfg] + resolved_files)
-
+
def is_py3(unit):
return unit.get("PYTHON3") == "yes"
@@ -532,7 +532,7 @@ def _check_test_srcs(*args):
def ontest_srcs(unit, *args):
_check_test_srcs(*args)
if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no':
- unit.onpy_srcs(["NAMESPACE", "__tests__"] + list(args))
+ unit.onpy_srcs(["NAMESPACE", "__tests__"] + list(args))
def onpy_doctests(unit, *args):
diff --git a/build/plugins/res.py b/build/plugins/res.py
index 25fba06383..a937caba81 100644
--- a/build/plugins/res.py
+++ b/build/plugins/res.py
@@ -48,8 +48,8 @@ def onfat_resource(unit, *args):
unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output])
unit.onsrcs(['GLOBAL', output])
-
-
+
+
def onresource_files(unit, *args):
"""
@usage: RESOURCE_FILES([DONT_PARSE] [PREFIX {prefix}] [STRIP prefix_to_strip] {path})
diff --git a/build/plugins/ya.make b/build/plugins/ya.make
index 1a7623643e..4ad5f5988e 100644
--- a/build/plugins/ya.make
+++ b/build/plugins/ya.make
@@ -5,15 +5,15 @@ PY2_LIBRARY()
PY_SRCS(
code_generator.py
ssqls.py
- swig.py
+ swig.py
- _common.py
+ _common.py
_custom_command.py
_import_wrapper.py
_requirements.py
_test_const.py
-)
-
+)
+
PEERDIR(build/plugins/lib)
END()
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index 610a69e41e..8970837f0f 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -1,21 +1,21 @@
-import os
-import re
-import sys
+import os
+import re
+import sys
import json
import copy
import base64
import shlex
-import _common
+import _common
import lib._metric_resolvers as mr
import _test_const as consts
import _requirements as reqs
import StringIO
import subprocess
-import collections
+import collections
+
+import ymake
-import ymake
-
MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
MDS_SHEME = 'mds'
CANON_DATA_DIR_NAME = 'canondata'
@@ -26,7 +26,7 @@ CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
VALID_NETWORK_REQUIREMENTS = ("full", "restricted")
-VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
+VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
BLOCK_SEPARATOR = '============================================================='
SPLIT_FACTOR_MAX_VALUE = 1000
SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
@@ -122,27 +122,27 @@ def validate_requirement(req_name, value, test_size, is_force_sandbox, in_autoch
def validate_test(unit, kw):
- def get_list(key):
- return deserialize_list(kw.get(key, ""))
-
+ def get_list(key):
+ return deserialize_list(kw.get(key, ""))
+
valid_kw = copy.deepcopy(kw)
- errors = []
+ errors = []
warnings = []
-
+
if valid_kw.get('SCRIPT-REL-PATH') == 'boost.test':
project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
if not project_path.startswith(("contrib", "mail", "maps", "tools/idl", "metrika", "devtools", "mds", "yandex_io", "smart_devices")):
- errors.append("BOOSTTEST is not allowed here")
+ errors.append("BOOSTTEST is not allowed here")
elif valid_kw.get('SCRIPT-REL-PATH') == 'gtest':
project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
if not project_path.startswith(("contrib", "devtools", "mail", "mds", "taxi")):
errors.append("GTEST_UGLY is not allowed here, use GTEST instead")
-
+
size_timeout = collections.OrderedDict(sorted(consts.TestSize.DefaultTimeouts.items(), key=lambda t: t[1]))
-
+
size = valid_kw.get('SIZE', consts.TestSize.Small).lower()
# TODO: use set instead list
- tags = get_list("TAG")
+ tags = get_list("TAG")
requirements_orig = get_list("REQUIREMENTS")
in_autocheck = "ya:not_autocheck" not in tags and 'ya:manual' not in tags
is_fat = 'ya:fat' in tags
@@ -150,15 +150,15 @@ def validate_test(unit, kw):
is_ytexec_run = 'ya:yt' in tags
is_fuzzing = valid_kw.get("FUZZING", False)
is_kvm = 'kvm' in requirements_orig
- requirements = {}
+ requirements = {}
list_requirements = ('sb_vault')
for req in requirements_orig:
if req in ('kvm', ):
requirements[req] = str(True)
continue
- if ":" in req:
- req_name, req_value = req.split(":", 1)
+ if ":" in req:
+ req_name, req_value = req.split(":", 1)
if req_name in list_requirements:
requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
else:
@@ -171,9 +171,9 @@ def validate_test(unit, kw):
requirements[req_name] = req_value
else:
requirements[req_name] = req_value
- else:
- errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req))
-
+ else:
+ errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req))
+
if not errors:
for req_name, req_value in requirements.items():
error_msg = validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run)
@@ -207,29 +207,29 @@ def validate_test(unit, kw):
if 'ya:privileged' in tags and 'container' not in requirements:
errors.append("Only tests with 'container' requirement can have 'ya:privileged' tag")
- if size not in size_timeout:
+ if size not in size_timeout:
errors.append("Unknown test size: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(size.upper(), ", ".join([sz.upper() for sz in size_timeout.keys()])))
- else:
- try:
+ else:
+ try:
timeout = int(valid_kw.get('TEST-TIMEOUT', size_timeout[size]) or size_timeout[size])
script_rel_path = valid_kw.get('SCRIPT-REL-PATH')
- if timeout < 0:
- raise Exception("Timeout must be > 0")
+ if timeout < 0:
+ raise Exception("Timeout must be > 0")
if size_timeout[size] < timeout and in_autocheck and script_rel_path != 'java.style':
- suggested_size = None
- for s, t in size_timeout.items():
- if timeout <= t:
- suggested_size = s
- break
-
- if suggested_size:
+ suggested_size = None
+ for s, t in size_timeout.items():
+ if timeout <= t:
+ suggested_size = s
+ break
+
+ if suggested_size:
suggested_size = ", suggested size: [[imp]]{}[[rst]]".format(suggested_size.upper())
- else:
- suggested_size = ""
+ else:
+ suggested_size = ""
errors.append("Max allowed timeout for test size [[imp]]{}[[rst]] is [[imp]]{} sec[[rst]]{}".format(size.upper(), size_timeout[size], suggested_size))
- except Exception as e:
- errors.append("Error when parsing test timeout: [[bad]]{}[[rst]]".format(e))
-
+ except Exception as e:
+ errors.append("Error when parsing test timeout: [[bad]]{}[[rst]]".format(e))
+
requiremtens_list = []
for req_name, req_value in requirements.iteritems():
requiremtens_list.append(req_name + ":" + req_value)
@@ -265,46 +265,46 @@ def validate_test(unit, kw):
tags.append("ya:yt_research_pool")
if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test":
- errors.append("PYTEST_SCRIPT is deprecated")
-
+ errors.append("PYTEST_SCRIPT is deprecated")
+
partition = valid_kw.get('TEST_PARTITION', 'SEQUENTIAL')
if partition not in PARTITION_MODS:
raise ValueError('partition mode should be one of {}, detected: {}'.format(PARTITION_MODS, partition))
if valid_kw.get('SPLIT-FACTOR'):
if valid_kw.get('FORK-MODE') == 'none':
- errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro')
+ errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro')
value = 1
- try:
+ try:
value = int(valid_kw.get('SPLIT-FACTOR'))
- if value <= 0:
- raise ValueError("must be > 0")
+ if value <= 0:
+ raise ValueError("must be > 0")
if value > SPLIT_FACTOR_MAX_VALUE:
raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
- except ValueError as e:
- errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e))
-
+ except ValueError as e:
+ errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e))
+
if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
nfiles = count_entries(valid_kw.get('TEST-FILES'))
if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
- unit_path = get_norm_unit_path(unit)
+ unit_path = get_norm_unit_path(unit)
if not is_fat and "ya:noretries" in tags and not is_ytexec_run \
- and not unit_path.startswith("devtools/") \
- and not unit_path.startswith("infra/kernel/") \
- and not unit_path.startswith("yt/python/yt") \
- and not unit_path.startswith("infra/yp_dns_api/tests") \
- and not unit_path.startswith("yp/tests"):
- errors.append("Only LARGE tests can have 'ya:noretries' tag")
-
+ and not unit_path.startswith("devtools/") \
+ and not unit_path.startswith("infra/kernel/") \
+ and not unit_path.startswith("yt/python/yt") \
+ and not unit_path.startswith("infra/yp_dns_api/tests") \
+ and not unit_path.startswith("yp/tests"):
+ errors.append("Only LARGE tests can have 'ya:noretries' tag")
+
if errors:
return None, warnings, errors
-
+
return valid_kw, warnings, errors
-
+
def get_norm_unit_path(unit, extra=None):
path = _common.strip_roots(unit.path())
@@ -320,25 +320,25 @@ def dump_test(unit, kw):
for e in errors:
ymake.report_configure_error(e)
if valid_kw is None:
- return None
+ return None
string_handler = StringIO.StringIO()
for k, v in valid_kw.iteritems():
print >>string_handler, k + ': ' + v
- print >>string_handler, BLOCK_SEPARATOR
+ print >>string_handler, BLOCK_SEPARATOR
data = string_handler.getvalue()
string_handler.close()
return data
-def serialize_list(lst):
- lst = filter(None, lst)
- return '\"' + ';'.join(lst) + '\"' if lst else ''
-
-
-def deserialize_list(val):
- return filter(None, val.replace('"', "").split(";"))
-
-
+def serialize_list(lst):
+ lst = filter(None, lst)
+ return '\"' + ';'.join(lst) + '\"' if lst else ''
+
+
+def deserialize_list(val):
+ return filter(None, val.replace('"', "").split(";"))
+
+
def count_entries(x):
# see (de)serialize_list
assert x is None or isinstance(x, str), type(x)
@@ -347,11 +347,11 @@ def count_entries(x):
return x.count(";") + 1
-def get_values_list(unit, key):
+def get_values_list(unit, key):
res = map(str.strip, (unit.get(key) or '').replace('$' + key, '').strip().split())
- return [r for r in res if r and r not in ['""', "''"]]
-
-
+ return [r for r in res if r and r not in ['""', "''"]]
+
+
def get_norm_paths(unit, key):
# return paths without trailing (back)slash
return [x.rstrip('\\/') for x in get_values_list(unit, key)]
@@ -413,18 +413,18 @@ def get_project_tidy_config(unit):
return get_default_tidy_config(unit)
-def onadd_ytest(unit, *args):
+def onadd_ytest(unit, *args):
keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
"FORK_SUBTESTS": 0, "FORK_TESTS": 0}
flat_args, spec_args = _common.sort_by_keywords(keywords, args)
-
- test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
-
+
+ test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
+
if flat_args[1] == "fuzz.test":
unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
- elif flat_args[1] == "go.test":
- data, _ = get_canonical_test_resources(unit)
- test_data += data
+ elif flat_args[1] == "go.test":
+ data, _ = get_canonical_test_resources(unit)
+ test_data += data
elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
# XXX
# Current ymake implementation doesn't allow to call macro inside the 'when' body
@@ -473,39 +473,39 @@ def onadd_ytest(unit, *args):
unit_path = get_norm_unit_path(unit)
- test_record = {
- 'TEST-NAME': flat_args[0],
- 'SCRIPT-REL-PATH': flat_args[1],
- 'TESTED-PROJECT-NAME': unit.name(),
- 'TESTED-PROJECT-FILENAME': unit.filename(),
+ test_record = {
+ 'TEST-NAME': flat_args[0],
+ 'SCRIPT-REL-PATH': flat_args[1],
+ 'TESTED-PROJECT-NAME': unit.name(),
+ 'TESTED-PROJECT-FILENAME': unit.filename(),
'SOURCE-FOLDER-PATH': unit_path,
# TODO get rid of BUILD-FOLDER-PATH
'BUILD-FOLDER-PATH': unit_path,
'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
'GLOBAL-LIBRARY-PATH': unit.global_filename(),
- 'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
# 'TEST-PRESERVE-ENV': 'da',
- 'TEST-DATA': serialize_list(test_data),
+ 'TEST-DATA': serialize_list(test_data),
'TEST-TIMEOUT': test_timeout,
- 'FORK-MODE': fork_mode,
- 'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
+ 'FORK-MODE': fork_mode,
+ 'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
'SIZE': test_size,
'TAG': test_tags,
'REQUIREMENTS': serialize_list(test_requirements),
- 'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
+ 'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
- 'BLOB': unit.get('TEST_BLOB_DATA') or '',
- 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'BLOB': unit.get('TEST_BLOB_DATA') or '',
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '',
'TEST_IOS_RUNTIME_TYPE': unit.get('TEST_IOS_RUNTIME_TYPE_VALUE') or '',
'ANDROID_APK_TEST_ACTIVITY': unit.get('ANDROID_APK_TEST_ACTIVITY_VALUE') or '',
'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL',
'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '',
- }
+ }
if flat_args[1] == "go.bench":
if "ya:run_go_benchmark" not in test_record["TAG"]:
@@ -519,9 +519,9 @@ def onadd_ytest(unit, *args):
test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))
data = dump_test(unit, test_record)
- if data:
- unit.set_property(["DART_DATA", data])
- save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
def java_srcdirs_to_data(unit, var):
@@ -543,15 +543,15 @@ def java_srcdirs_to_data(unit, var):
return serialize_list(extra_data)
-def onadd_check(unit, *args):
+def onadd_check(unit, *args):
if unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
return
flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
"SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
- check_type = flat_args[0]
+ check_type = flat_args[0]
test_dir = get_norm_unit_path(unit)
-
+
test_timeout = ''
fork_mode = ''
extra_test_data = ''
@@ -584,15 +584,15 @@ def onadd_check(unit, *args):
if ymake_java_test:
extra_test_data = java_srcdirs_to_data(unit, 'ALL_SRCDIRS')
extra_test_dart_data['JDK_RESOURCE'] = 'JDK' + (unit.get('JDK_VERSION') or '_DEFAULT')
- elif check_type == "gofmt":
- script_rel_path = check_type
- go_files = flat_args[1:]
- if go_files:
- test_dir = os.path.dirname(go_files[0]).lstrip("$S/")
- else:
- script_rel_path = check_type
-
- use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ elif check_type == "gofmt":
+ script_rel_path = check_type
+ go_files = flat_args[1:]
+ if go_files:
+ test_dir = os.path.dirname(go_files[0]).lstrip("$S/")
+ else:
+ script_rel_path = check_type
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
uid_ext = ''
if check_type in ("check.data", "check.resource"):
if unit.get("VALIDATE_DATA") == "no":
@@ -613,118 +613,118 @@ def onadd_check(unit, *args):
else:
test_files = serialize_list(flat_args[1:])
- test_record = {
- 'TEST-NAME': check_type.lower(),
+ test_record = {
+ 'TEST-NAME': check_type.lower(),
'TEST-TIMEOUT': test_timeout,
- 'SCRIPT-REL-PATH': script_rel_path,
- 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
- 'SOURCE-FOLDER-PATH': test_dir,
- 'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
'TEST-DATA': extra_test_data,
"SBR-UID-EXT": uid_ext,
- 'SPLIT-FACTOR': '',
+ 'SPLIT-FACTOR': '',
'TEST_PARTITION': 'SEQUENTIAL',
'FORK-MODE': fork_mode,
- 'FORK-TEST-FILES': '',
- 'SIZE': 'SMALL',
- 'TAG': '',
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
'REQUIREMENTS': '',
- 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
- 'OLD_PYTEST': 'no',
- 'PYTHON-PATHS': '',
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
# TODO remove FILES, see DEVTOOLS-7052
'FILES': test_files,
'TEST-FILES': test_files,
'NO_JBUILD': 'yes' if ymake_java_test else 'no',
- }
+ }
test_record.update(extra_test_dart_data)
data = dump_test(unit, test_record)
- if data:
- unit.set_property(["DART_DATA", data])
- save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
-
-
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+
+
def on_register_no_check_imports(unit):
s = unit.get('NO_CHECK_IMPORTS_FOR_VALUE')
if s not in ('', 'None'):
unit.onresource(['-', 'py/no_check_imports/{}="{}"'.format(_common.pathid(s), s)])
-def onadd_check_py_imports(unit, *args):
+def onadd_check_py_imports(unit, *args):
if unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
return
- if unit.get('NO_CHECK_IMPORTS_FOR_VALUE').strip() == "":
- return
+ if unit.get('NO_CHECK_IMPORTS_FOR_VALUE').strip() == "":
+ return
unit.onpeerdir(['library/python/testing/import_test'])
- check_type = "py.imports"
+ check_type = "py.imports"
test_dir = get_norm_unit_path(unit)
-
- use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
- test_record = {
- 'TEST-NAME': "pyimports",
- 'TEST-TIMEOUT': '',
- 'SCRIPT-REL-PATH': check_type,
- 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
- 'SOURCE-FOLDER-PATH': test_dir,
- 'CUSTOM-DEPENDENCIES': '',
- 'TEST-DATA': '',
+ test_record = {
+ 'TEST-NAME': "pyimports",
+ 'TEST-TIMEOUT': '',
+ 'SCRIPT-REL-PATH': check_type,
+ 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': '',
+ 'TEST-DATA': '',
'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
- 'SPLIT-FACTOR': '',
+ 'SPLIT-FACTOR': '',
'TEST_PARTITION': 'SEQUENTIAL',
- 'FORK-MODE': '',
- 'FORK-TEST-FILES': '',
- 'SIZE': 'SMALL',
- 'TAG': '',
- 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
- 'OLD_PYTEST': 'no',
- 'PYTHON-PATHS': '',
+ 'FORK-MODE': '',
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
# TODO remove FILES, see DEVTOOLS-7052
'FILES': test_files,
'TEST-FILES': test_files,
- }
- if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None":
- test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"])
- else:
- test_record["NO-CHECK"] = ''
+ }
+ if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None":
+ test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"])
+ else:
+ test_record["NO-CHECK"] = ''
data = dump_test(unit, test_record)
- if data:
- unit.set_property(["DART_DATA", data])
- save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
-
-
-def onadd_pytest_script(unit, *args):
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+
+
+def onadd_pytest_script(unit, *args):
if unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
return
- unit.set(["PYTEST_BIN", "no"])
- custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
- timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
-
- if timeout:
- timeout = timeout[0]
- else:
- timeout = '0'
- test_type = args[0]
- fork_mode = unit.get('TEST_FORK_MODE').split() or ''
- split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
- test_size = unit.get('TEST_SIZE_NAME') or ''
-
- test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
- tags = _get_test_tags(unit)
+ unit.set(["PYTEST_BIN", "no"])
+ custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
+ timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
+
+ if timeout:
+ timeout = timeout[0]
+ else:
+ timeout = '0'
+ test_type = args[0]
+ fork_mode = unit.get('TEST_FORK_MODE').split() or ''
+ split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
+ test_size = unit.get('TEST_SIZE_NAME') or ''
+
+ test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
+ tags = _get_test_tags(unit)
requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
data, data_files = get_canonical_test_resources(unit)
test_data += data
- python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
- binary_path = None
- test_cwd = unit.get('TEST_CWD_VALUE') or ''
+ python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
+ binary_path = None
+ test_cwd = unit.get('TEST_CWD_VALUE') or ''
_dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
-
-
-def onadd_pytest_bin(unit, *args):
+
+
+def onadd_pytest_bin(unit, *args):
if unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
return
@@ -734,10 +734,10 @@ def onadd_pytest_bin(unit, *args):
'Unknown arguments found while processing add_pytest_bin macro: {!r}'
.format(flat)
)
-
+
runner_bin = kws.get('RUNNER_BIN', [None])[0]
test_type = 'py3test.bin' if (unit.get("PYTHON3") == 'yes') else "pytest.bin"
-
+
add_test_to_dart(unit, test_type, runner_bin=runner_bin)
@@ -745,31 +745,31 @@ def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None):
if unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
return
- custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
- timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
- if timeout:
- timeout = timeout[0]
- else:
- timeout = '0'
- fork_mode = unit.get('TEST_FORK_MODE').split() or ''
- split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
- test_size = unit.get('TEST_SIZE_NAME') or ''
- test_cwd = unit.get('TEST_CWD_VALUE') or ''
-
+ custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
+ timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
+ if timeout:
+ timeout = timeout[0]
+ else:
+ timeout = '0'
+ fork_mode = unit.get('TEST_FORK_MODE').split() or ''
+ split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
+ test_size = unit.get('TEST_SIZE_NAME') or ''
+ test_cwd = unit.get('TEST_CWD_VALUE') or ''
+
unit_path = unit.path()
- test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
- tags = _get_test_tags(unit)
+ test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
+ tags = _get_test_tags(unit)
requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
data, data_files = get_canonical_test_resources(unit)
test_data += data
- python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
+ python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
- if not binary_path:
+ if not binary_path:
binary_path = os.path.join(unit_path, unit.filename())
_dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
-
-
+
+
def extract_java_system_properties(unit, args):
if len(args) % 2:
return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
@@ -841,7 +841,7 @@ def onjava_test(unit, *args):
'SOURCE-FOLDER-PATH': path,
'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)]),
'SCRIPT-REL-PATH': script_rel_path,
- 'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '',
+ 'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '',
'TESTED-PROJECT-NAME': path,
'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
# 'TEST-PRESERVE-ENV': 'da',
@@ -849,7 +849,7 @@ def onjava_test(unit, *args):
'FORK-MODE': unit.get('TEST_FORK_MODE') or '',
'SPLIT-FACTOR': unit.get('TEST_SPLIT_FACTOR') or '',
'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')),
- 'TAG': serialize_list(_get_test_tags(unit)),
+ 'TAG': serialize_list(_get_test_tags(unit)),
'SIZE': unit.get('TEST_SIZE_NAME') or '',
'REQUIREMENTS': serialize_list(get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')),
'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
@@ -860,7 +860,7 @@ def onjava_test(unit, *args):
'JVM_ARGS': serialize_list(get_values_list(unit, 'JVM_ARGS_VALUE')),
'SYSTEM_PROPERTIES': props,
'TEST-CWD': test_cwd,
- 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
'JAVA_CLASSPATH_CMD_TYPE': java_cp_arg_type,
'NO_JBUILD': 'yes' if ymake_java_test else 'no',
'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or '_DEFAULT'),
@@ -879,8 +879,8 @@ def onjava_test(unit, *args):
test_record['TEST_JAR'] = '{}/{}.jar'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
data = dump_test(unit, test_record)
- if data:
- unit.set_property(['DART_DATA', data])
+ if data:
+ unit.set_property(['DART_DATA', data])
def onjava_test_deps(unit, *args):
@@ -927,50 +927,50 @@ def onjava_test_deps(unit, *args):
unit.set_property(['DART_DATA', data])
-def _get_test_tags(unit, spec_args=None):
- if spec_args is None:
- spec_args = {}
- tags = spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE')
- # DEVTOOLS-7571
- if unit.get('SKIP_TEST_VALUE') and 'ya:fat' in tags and "ya:not_autocheck" not in tags:
- tags.append("ya:not_autocheck")
-
- return tags
-
-
-def _dump_test(
- unit,
- test_type,
- test_files,
- timeout,
- test_dir,
- custom_deps,
- test_data,
- python_paths,
- split_factor,
- fork_mode,
- test_size,
- tags,
+def _get_test_tags(unit, spec_args=None):
+ if spec_args is None:
+ spec_args = {}
+ tags = spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE')
+ # DEVTOOLS-7571
+ if unit.get('SKIP_TEST_VALUE') and 'ya:fat' in tags and "ya:not_autocheck" not in tags:
+ tags.append("ya:not_autocheck")
+
+ return tags
+
+
+def _dump_test(
+ unit,
+ test_type,
+ test_files,
+ timeout,
+ test_dir,
+ custom_deps,
+ test_data,
+ python_paths,
+ split_factor,
+ fork_mode,
+ test_size,
+ tags,
requirements,
- binary_path='',
- old_pytest=False,
- test_cwd=None,
+ binary_path='',
+ old_pytest=False,
+ test_cwd=None,
runner_bin=None,
yt_spec=None,
data_files=None
-):
-
- if test_type == "PY_TEST":
- script_rel_path = "py.test"
- else:
- script_rel_path = test_type
-
+):
+
+ if test_type == "PY_TEST":
+ script_rel_path = "py.test"
+ else:
+ script_rel_path = test_type
+
unit_path = unit.path()
- fork_test_files = unit.get('FORK_TEST_FILES_MODE')
- fork_mode = ' '.join(fork_mode) if fork_mode else ''
- use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
- if test_cwd:
- test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip()
+ fork_test_files = unit.get('FORK_TEST_FILES_MODE')
+ fork_mode = ' '.join(fork_mode) if fork_mode else ''
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ if test_cwd:
+ test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip()
test_name = os.path.basename(binary_path)
test_record = {
'TEST-NAME': os.path.splitext(test_name)[0],
@@ -1000,7 +1000,7 @@ def _dump_test(
'BLOB': unit.get('TEST_BLOB_DATA') or '',
'CANONIZE_SUB_PATH': unit.get('CANONIZE_SUB_PATH') or '',
}
- if binary_path:
+ if binary_path:
test_record['BINARY-PATH'] = _common.strip_roots(binary_path)
if runner_bin:
test_record['TEST-RUNNER-BIN'] = runner_bin
@@ -1010,34 +1010,34 @@ def _dump_test(
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
-
-
-def onsetup_pytest_bin(unit, *args):
- use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == "yes"
- if use_arcadia_python:
- unit.onresource(['-', 'PY_MAIN={}'.format("library.python.pytest.main:main")]) # XXX
+
+
+def onsetup_pytest_bin(unit, *args):
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == "yes"
+ if use_arcadia_python:
+ unit.onresource(['-', 'PY_MAIN={}'.format("library.python.pytest.main:main")]) # XXX
unit.onadd_pytest_bin(list(args))
- else:
- unit.onno_platform()
- unit.onadd_pytest_script(["PY_TEST"])
-
-
-def onrun(unit, *args):
- exectest_cmd = unit.get(["EXECTEST_COMMAND_VALUE"]) or ''
+ else:
+ unit.onno_platform()
+ unit.onadd_pytest_script(["PY_TEST"])
+
+
+def onrun(unit, *args):
+ exectest_cmd = unit.get(["EXECTEST_COMMAND_VALUE"]) or ''
exectest_cmd += "\n" + subprocess.list2cmdline(args)
- unit.set(["EXECTEST_COMMAND_VALUE", exectest_cmd])
-
-
-def onsetup_exectest(unit, *args):
+ unit.set(["EXECTEST_COMMAND_VALUE", exectest_cmd])
+
+
+def onsetup_exectest(unit, *args):
command = unit.get(["EXECTEST_COMMAND_VALUE"])
if command is None:
ymake.report_configure_error("EXECTEST must have at least one RUN macro")
return
command = command.replace("$EXECTEST_COMMAND_VALUE", "")
- if "PYTHON_BIN" in command:
- unit.ondepends('contrib/tools/python')
- unit.set(["TEST_BLOB_DATA", base64.b64encode(command)])
- add_test_to_dart(unit, "exectest", binary_path=os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
+ if "PYTHON_BIN" in command:
+ unit.ondepends('contrib/tools/python')
+ unit.set(["TEST_BLOB_DATA", base64.b64encode(command)])
+ add_test_to_dart(unit, "exectest", binary_path=os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
def onsetup_run_python(unit):
diff --git a/build/plugins/ytest2.py b/build/plugins/ytest2.py
index 1963f4311a..0a34263c35 100644
--- a/build/plugins/ytest2.py
+++ b/build/plugins/ytest2.py
@@ -31,7 +31,7 @@ def ytest_base(unit, related_prj_dir, related_prj_name, args):
data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
unit.set(['TEST-DATA', data])
- related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
+ related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
related_dirs_value = []
for rel in related_dirs_list:
related_dirs_value.extend(['--test-related-path', rel])
diff --git a/build/sanitize-blacklist.txt b/build/sanitize-blacklist.txt
index 1006e35d57..c1fb5e539f 100644
--- a/build/sanitize-blacklist.txt
+++ b/build/sanitize-blacklist.txt
@@ -1,4 +1,4 @@
-src:*contrib/tools/python/src*
+src:*contrib/tools/python/src*
src:*contrib/tools/python3/src*
src:*contrib/libs/luajit*
type:std::__*::locale::id=init
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py
index 0b594a53e8..6b5d53bc71 100644
--- a/build/scripts/append_file.py
+++ b/build/scripts/append_file.py
@@ -1,9 +1,9 @@
-import sys
-
-
-if __name__ == "__main__":
-
- file_path = sys.argv[1]
- with open(file_path, "a") as f:
+import sys
+
+
+if __name__ == "__main__":
+
+ file_path = sys.argv[1]
+ with open(file_path, "a") as f:
for text in sys.argv[2:]:
print >>f, text
diff --git a/build/scripts/build_mn.py b/build/scripts/build_mn.py
index f3ede12de7..5bb03c247c 100755
--- a/build/scripts/build_mn.py
+++ b/build/scripts/build_mn.py
@@ -321,7 +321,7 @@ def BuildMnF(argv):
if __name__ == '__main__':
if len(sys.argv) < 2:
- print >>sys.stderr, "Usage: build_mn.py <funcName> <args...>"
+ print >>sys.stderr, "Usage: build_mn.py <funcName> <args...>"
sys.exit(1)
if (sys.argv[2:]):
diff --git a/build/scripts/build_pln_header.py b/build/scripts/build_pln_header.py
index 0d09907f9b..c73693f444 100755
--- a/build/scripts/build_pln_header.py
+++ b/build/scripts/build_pln_header.py
@@ -6,7 +6,7 @@ import os
def BuildPlnHeader():
if len(sys.argv) < 2:
- print >>sys.stderr, "Usage: build_pln_header.py <absolute/path/to/OutFile>"
+ print >>sys.stderr, "Usage: build_pln_header.py <absolute/path/to/OutFile>"
sys.exit(1)
print >>sys.stdout, "Build Pln Header..."
diff --git a/build/scripts/cat.py b/build/scripts/cat.py
index 6153dd2a8a..0c3f73d96f 100755
--- a/build/scripts/cat.py
+++ b/build/scripts/cat.py
@@ -3,13 +3,13 @@ import sys
from shutil import copyfileobj as copy
import os.path
-if __name__ == '__main__':
- for filename in sys.argv[1:] or ["-"]:
- if filename == "-":
- copy(sys.stdin, sys.stdout)
+if __name__ == '__main__':
+ for filename in sys.argv[1:] or ["-"]:
+ if filename == "-":
+ copy(sys.stdin, sys.stdout)
else:
- if os.path.exists(filename):
- with open(filename, 'rb') as file:
- copy(file, sys.stdout)
- else:
- sys.stderr.write('cat.py: {0}: No such file or directory\n'.format(filename))
+ if os.path.exists(filename):
+ with open(filename, 'rb') as file:
+ copy(file, sys.stdout)
+ else:
+ sys.stderr.write('cat.py: {0}: No such file or directory\n'.format(filename))
diff --git a/build/scripts/check_config_h.py b/build/scripts/check_config_h.py
index 0fe56908df..07bc12e230 100644
--- a/build/scripts/check_config_h.py
+++ b/build/scripts/check_config_h.py
@@ -83,7 +83,7 @@ static_assert(sizeof(wchar_t) == SIZEOF_WCHAR_T, "fixme 16");
//TODO
#endif
"""
-if __name__ == '__main__':
- with open(sys.argv[2], 'w') as f:
- f.write('#include <' + sys.argv[1] + '>\n\n')
- f.write(data)
+if __name__ == '__main__':
+ with open(sys.argv[2], 'w') as f:
+ f.write('#include <' + sys.argv[1] + '>\n\n')
+ f.write(data)
diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py
index 7e98e5bc19..c0bec50b2a 100644
--- a/build/scripts/compile_cuda.py
+++ b/build/scripts/compile_cuda.py
@@ -21,29 +21,29 @@ def main():
except ValueError:
skip_nocxxinc = False
- spl = sys.argv.index('--cflags')
+ spl = sys.argv.index('--cflags')
mtime0 = sys.argv[1]
command = sys.argv[2: spl]
- cflags = sys.argv[spl + 1:]
+ cflags = sys.argv[spl + 1:]
dump_args = False
if '--y_dump_args' in command:
command.remove('--y_dump_args')
dump_args = True
- executable = command[0]
- if not os.path.exists(executable):
- print >> sys.stderr, '{} not found'.format(executable)
- sys.exit(1)
+ executable = command[0]
+ if not os.path.exists(executable):
+ print >> sys.stderr, '{} not found'.format(executable)
+ sys.exit(1)
if is_clang(command):
# nvcc concatenates the sources for clang, and clang reports unused
# things from .h files as if they they were defined in a .cpp file.
cflags += ['-Wno-unused-function', '-Wno-unused-parameter']
- if not is_clang(command) and '-fopenmp=libomp' in cflags:
- cflags.append('-fopenmp')
- cflags.remove('-fopenmp=libomp')
+ if not is_clang(command) and '-fopenmp=libomp' in cflags:
+ cflags.append('-fopenmp')
+ cflags.remove('-fopenmp=libomp')
skip_list = [
'-gline-tables-only',
diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py
index c64c3efd48..94491d9256 100644
--- a/build/scripts/coverage-info.py
+++ b/build/scripts/coverage-info.py
@@ -96,26 +96,26 @@ def chunks(l, n):
yield l[i:i + n]
-def combine_info_files(lcov, files, out_file):
- chunk_size = 50
- files = list(set(files))
-
- for chunk in chunks(files, chunk_size):
- combine_cmd = [lcov]
- if os.path.exists(out_file):
- chunk.append(out_file)
- for trace in chunk:
- assert os.path.exists(trace), "Trace file does not exist: {} (cwd={})".format(trace, os.getcwd())
- combine_cmd += ["-a", os.path.abspath(trace)]
- print >>sys.stderr, '## lcov', ' '.join(combine_cmd[1:])
- out_file_tmp = "combined.tmp"
- with open(out_file_tmp, "w") as stdout:
- subprocess.check_call(combine_cmd, stdout=stdout)
- if os.path.exists(out_file):
- os.remove(out_file)
- os.rename(out_file_tmp, out_file)
-
-
+def combine_info_files(lcov, files, out_file):
+ chunk_size = 50
+ files = list(set(files))
+
+ for chunk in chunks(files, chunk_size):
+ combine_cmd = [lcov]
+ if os.path.exists(out_file):
+ chunk.append(out_file)
+ for trace in chunk:
+ assert os.path.exists(trace), "Trace file does not exist: {} (cwd={})".format(trace, os.getcwd())
+ combine_cmd += ["-a", os.path.abspath(trace)]
+ print >>sys.stderr, '## lcov', ' '.join(combine_cmd[1:])
+ out_file_tmp = "combined.tmp"
+ with open(out_file_tmp, "w") as stdout:
+ subprocess.check_call(combine_cmd, stdout=stdout)
+ if os.path.exists(out_file):
+ os.remove(out_file)
+ os.rename(out_file_tmp, out_file)
+
+
def probe_path_global(path, source_root, prefix_filter, exclude_files):
if path.endswith('_ut.cpp'):
return None
@@ -186,7 +186,7 @@ def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_exe
source_fname = gcda_name[:-len(GCDA_EXT)]
for suff in suffixes(source_fname):
if suff in fname2gcno:
- gcda_new_name = suff + GCDA_EXT
+ gcda_new_name = suff + GCDA_EXT
gcda_item.name = gcda_new_name
gcda_tf.extract(gcda_item)
if os.path.getsize(gcda_new_name) > 0:
@@ -196,9 +196,9 @@ def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_exe
geninfo_executable,
'--gcov-tool', gcov_tool,
gcda_new_name,
- '-o', coverage_info + '.tmp'
+ '-o', coverage_info + '.tmp'
]
- gen_info(geninfo_cmd, coverage_info)
+ gen_info(geninfo_cmd, coverage_info)
def gen_cobertura(tool, output, combined_info):
@@ -251,9 +251,9 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt
print_stat(da, fnda, teamcity_stat_file)
if lcov_args:
- output_trace = "combined.info"
- combine_info_files(os.path.join(source_root, 'devtools', 'lcov', 'lcov'), lcov_args, output_trace)
- cmd = [os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), '-p', source_root, '--ignore-errors', 'source', '-o', output_dir, output_trace]
+ output_trace = "combined.info"
+ combine_info_files(os.path.join(source_root, 'devtools', 'lcov', 'lcov'), lcov_args, output_trace)
+ cmd = [os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), '-p', source_root, '--ignore-errors', 'source', '-o', output_dir, output_trace]
print >>sys.stderr, '## genhtml', ' '.join(cmd)
subprocess.check_call(cmd)
if lcov_cobertura:
diff --git a/build/scripts/f2c.py b/build/scripts/f2c.py
index 2668bd1a37..7021e1391f 100644
--- a/build/scripts/f2c.py
+++ b/build/scripts/f2c.py
@@ -28,31 +28,31 @@ def mkdir_p(directory):
os.makedirs(directory)
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
- parser.add_argument('-t', '--tool')
- parser.add_argument('-c', '--input')
- parser.add_argument('-o', '--output')
+ parser.add_argument('-t', '--tool')
+ parser.add_argument('-c', '--input')
+ parser.add_argument('-o', '--output')
- args = parser.parse_args()
+ args = parser.parse_args()
tmpdir = args.output + '.f2c'
mkdir_p(tmpdir)
- # should parse includes, really
+ # should parse includes, really
p = subprocess.Popen(
[args.tool, '-w', '-R', '-a', '-I' + os.path.dirname(args.input), '-T' + tmpdir],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
- stdout, stderr = p.communicate(input=open(args.input).read())
- ret = p.wait()
+ stdout, stderr = p.communicate(input=open(args.input).read())
+ ret = p.wait()
- if ret:
- print >>sys.stderr, 'f2c failed: %s, %s' % (stderr, ret)
- sys.exit(ret)
+ if ret:
+ print >>sys.stderr, 'f2c failed: %s, %s' % (stderr, ret)
+ sys.exit(ret)
- if 'Error' in stderr:
+ if 'Error' in stderr:
print >>sys.stderr, stderr
- with open(args.output, 'w') as f:
+ with open(args.output, 'w') as f:
f.write(header)
- f.write(stdout)
+ f.write(stdout)
f.write(footer)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
index a9beb5ba41..a99542e174 100755
--- a/build/scripts/fetch_from_sandbox.py
+++ b/build/scripts/fetch_from_sandbox.py
@@ -121,13 +121,13 @@ def get_resource_info(resource_id, touch=False, no_links=False):
if no_links:
headers.update({'X-No-Links': '1'})
return _query(url)
-
-
-def get_resource_http_links(resource_id):
+
+
+def get_resource_http_links(resource_id):
url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id), '/data/http'))
return [r['url'] + ORIGIN_SUFFIX for r in _query(url)]
-
-
+
+
def fetch_via_script(script, resource_id):
return subprocess.check_output([script, str(resource_id)]).rstrip()
diff --git a/build/scripts/fs_tools.py b/build/scripts/fs_tools.py
index 979aa72f0c..dec4c349c8 100644
--- a/build/scripts/fs_tools.py
+++ b/build/scripts/fs_tools.py
@@ -23,28 +23,28 @@ def link_or_copy(src, dst):
raise
-if __name__ == '__main__':
- mode = sys.argv[1]
+if __name__ == '__main__':
+ mode = sys.argv[1]
args = pcf.get_args(sys.argv[2:])
- if mode == 'copy':
+ if mode == 'copy':
shutil.copy(args[0], args[1])
elif mode == 'copy_tree_no_link':
dst = args[1]
shutil.copytree(args[0], dst, ignore=lambda dirname, names: [n for n in names if os.path.islink(os.path.join(dirname, n))])
- elif mode == 'copy_files':
- src = args[0]
- dst = args[1]
- files = open(args[2]).read().strip().split()
- for f in files:
- s = os.path.join(src, f)
- d = os.path.join(dst, f)
- if os.path.exists(d):
- continue
- try:
- os.makedirs(os.path.dirname(d))
- except OSError:
- pass
+ elif mode == 'copy_files':
+ src = args[0]
+ dst = args[1]
+ files = open(args[2]).read().strip().split()
+ for f in files:
+ s = os.path.join(src, f)
+ d = os.path.join(dst, f)
+ if os.path.exists(d):
+ continue
+ try:
+ os.makedirs(os.path.dirname(d))
+ except OSError:
+ pass
shutil.copy(s, d)
elif mode == 'copy_all_files':
src = args[0]
@@ -61,23 +61,23 @@ if __name__ == '__main__':
except OSError:
pass
shutil.copy(os.path.join(root, f), file_dst)
- elif mode == 'rename_if_exists':
- if os.path.exists(args[0]):
- shutil.move(args[0], args[1])
- elif mode == 'rename':
+ elif mode == 'rename_if_exists':
+ if os.path.exists(args[0]):
+ shutil.move(args[0], args[1])
+ elif mode == 'rename':
targetdir = os.path.dirname(args[1])
if targetdir and not os.path.exists(targetdir):
os.makedirs(os.path.dirname(args[1]))
shutil.move(args[0], args[1])
- elif mode == 'remove':
- for f in args:
- try:
- if os.path.isfile(f) or os.path.islink(f):
- os.remove(f)
- else:
- shutil.rmtree(f)
- except OSError:
- pass
+ elif mode == 'remove':
+ for f in args:
+ try:
+ if os.path.isfile(f) or os.path.islink(f):
+ os.remove(f)
+ else:
+ shutil.rmtree(f)
+ except OSError:
+ pass
elif mode == 'link_or_copy':
link_or_copy(args[0], args[1])
elif mode == 'link_or_copy_to_dir':
@@ -100,5 +100,5 @@ if __name__ == '__main__':
os.makedirs(args[0])
except OSError:
pass
- else:
- raise Exception('unsupported tool %s' % mode)
+ else:
+ raise Exception('unsupported tool %s' % mode)
diff --git a/build/scripts/gen_mx_table.py b/build/scripts/gen_mx_table.py
index 45f392a3e5..187c21c539 100644
--- a/build/scripts/gen_mx_table.py
+++ b/build/scripts/gen_mx_table.py
@@ -50,26 +50,26 @@ namespace {
yabs_mx_calc_table_t yabs_mx_calc_table = {YABS_MX_CALC_VERSION, 10000, 0, yabs_funcs};
"""
-if __name__ == '__main__':
- init = []
- body = []
- defs = {}
+if __name__ == '__main__':
+ init = []
+ body = []
+ defs = {}
- for i in sys.argv[1:]:
- name = i.replace('.', '_')
- num = long(name.split('_')[1])
+ for i in sys.argv[1:]:
+ name = i.replace('.', '_')
+ num = long(name.split('_')[1])
- init.append('(*this)[%s] = new TFml(ar.ObjectBlobByKey("%s"));' % (num, '/' + i))
+ init.append('(*this)[%s] = new TFml(ar.ObjectBlobByKey("%s"));' % (num, '/' + i))
- f1 = 'static void yabs_%s(size_t count, const float** args, double* res) {Singleton<TFormulas>()->at(%s).DoCalcRelevs(args, res, count);}' % (name, num)
- f2 = 'static size_t yabs_%s_factor_count() {return Singleton<TFormulas>()->at(%s).MaxFactorIndex() + 1;}' % (name, num)
+ f1 = 'static void yabs_%s(size_t count, const float** args, double* res) {Singleton<TFormulas>()->at(%s).DoCalcRelevs(args, res, count);}' % (name, num)
+ f2 = 'static size_t yabs_%s_factor_count() {return Singleton<TFormulas>()->at(%s).MaxFactorIndex() + 1;}' % (name, num)
- body.append(f1)
- body.append(f2)
+ body.append(f1)
+ body.append(f2)
- d1 = 'yabs_%s' % name
- d2 = 'yabs_%s_factor_count' % name
+ d1 = 'yabs_%s' % name
+ d2 = 'yabs_%s_factor_count' % name
- defs[num] = '{%s, %s}' % (d1, d2)
+ defs[num] = '{%s, %s}' % (d1, d2)
- print tmpl % ('\n'.join(init), '\n\n'.join(body), ',\n'.join((defs.get(i, '{nullptr, nullptr}') for i in range(0, 10000))))
+ print tmpl % ('\n'.join(init), '\n\n'.join(body), ',\n'.join((defs.get(i, '{nullptr, nullptr}') for i in range(0, 10000))))
diff --git a/build/scripts/gen_py_reg.py b/build/scripts/gen_py_reg.py
index 2c2495c14e..1560135ae8 100644
--- a/build/scripts/gen_py_reg.py
+++ b/build/scripts/gen_py_reg.py
@@ -19,14 +19,14 @@ def mangle(name):
return name
return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
-if __name__ == '__main__':
- if len(sys.argv) != 3:
- print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
- print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
- sys.exit(1)
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
+ print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
+ sys.exit(1)
- with open(sys.argv[2], 'w') as f:
- modname = sys.argv[1]
- initname = 'init' + mangle(modname)
- code = template.replace('{0}', modname).replace('{1}', initname)
- f.write(code)
+ with open(sys.argv[2], 'w') as f:
+ modname = sys.argv[1]
+ initname = 'init' + mangle(modname)
+ code = template.replace('{0}', modname).replace('{1}', initname)
+ f.write(code)
diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py
index 9bf6254255..c189668b9e 100644
--- a/build/scripts/link_fat_obj.py
+++ b/build/scripts/link_fat_obj.py
@@ -8,11 +8,11 @@ YA_ARG_PREFIX = '-Ya,'
def get_args():
- parser = argparse.ArgumentParser()
+ parser = argparse.ArgumentParser()
parser.add_argument('--obj')
parser.add_argument('--globals-lib')
- parser.add_argument('--lib', required=True)
- parser.add_argument('--arch', required=True)
+ parser.add_argument('--lib', required=True)
+ parser.add_argument('--arch', required=True)
parser.add_argument('--build-root', default=None)
parser.add_argument('--with-own-obj', action='store_true', default=False)
parser.add_argument('--with-global-srcs', action='store_true', default=False)
@@ -46,8 +46,8 @@ def main():
auto_input = groups['input']
# Outputs
- lib_output = args.lib
- obj_output = args.obj
+ lib_output = args.lib
+ obj_output = args.obj
# Dependencies
global_srcs = groups['global_srcs']
@@ -69,10 +69,10 @@ def main():
if args.with_global_srcs:
do_archive += global_srcs
- def call(c):
+ def call(c):
proc = subprocess.Popen(c, shell=False, stderr=sys.stderr, stdout=sys.stdout, cwd=args.build_root)
- proc.communicate()
- return proc.returncode
+ proc.communicate()
+ return proc.returncode
if obj_output:
link_res = call(do_link)
diff --git a/build/scripts/mkver.py b/build/scripts/mkver.py
index 91197942af..321cdaade1 100755
--- a/build/scripts/mkver.py
+++ b/build/scripts/mkver.py
@@ -1,12 +1,12 @@
import sys
-if __name__ == '__main__':
- with open(sys.argv[1], 'r') as f:
- data = f.readline()
+if __name__ == '__main__':
+ with open(sys.argv[1], 'r') as f:
+ data = f.readline()
- beg = data.find('(') + 1
- end = data.find(')')
- version = data[beg:end]
+ beg = data.find('(') + 1
+ end = data.find(')')
+ version = data[beg:end]
- print '#pragma once'
- print '#define DEBIAN_VERSION "%s"' % version
+ print '#pragma once'
+ print '#define DEBIAN_VERSION "%s"' % version
diff --git a/build/scripts/perl_wrapper.py b/build/scripts/perl_wrapper.py
index fdb9beb1db..cb4027f1d3 100644
--- a/build/scripts/perl_wrapper.py
+++ b/build/scripts/perl_wrapper.py
@@ -2,23 +2,23 @@ import os
import sys
import shutil
-if __name__ == '__main__':
- path = sys.argv[1]
- to = sys.argv[-1]
- fr = sys.argv[-2]
- to_dir = os.path.dirname(to)
+if __name__ == '__main__':
+ path = sys.argv[1]
+ to = sys.argv[-1]
+ fr = sys.argv[-2]
+ to_dir = os.path.dirname(to)
- os.chdir(to_dir)
+ os.chdir(to_dir)
- f1 = os.path.basename(fr)
- fr_ = os.path.dirname(fr)
- f2 = os.path.basename(fr_)
- fr_ = os.path.dirname(fr_)
+ f1 = os.path.basename(fr)
+ fr_ = os.path.dirname(fr)
+ f2 = os.path.basename(fr_)
+ fr_ = os.path.dirname(fr_)
- os.makedirs(f2)
- shutil.copyfile(fr, os.path.join(f2, f1))
+ os.makedirs(f2)
+ shutil.copyfile(fr, os.path.join(f2, f1))
- if path[0] != '/':
- path = os.path.join(os.path.dirname(__file__), path)
+ if path[0] != '/':
+ path = os.path.join(os.path.dirname(__file__), path)
- os.execv(path, [path] + sys.argv[2:])
+ os.execv(path, [path] + sys.argv[2:])
diff --git a/build/scripts/preprocess.py b/build/scripts/preprocess.py
index 1d724c6819..4657bef732 100644
--- a/build/scripts/preprocess.py
+++ b/build/scripts/preprocess.py
@@ -44,5 +44,5 @@ def subst_headers(path, headers):
f.write(prev)
-if __name__ == '__main__':
- subst_headers(sys.argv[1], ['stack.hh', 'position.hh', 'location.hh'])
+if __name__ == '__main__':
+ subst_headers(sys.argv[1], ['stack.hh', 'position.hh', 'location.hh'])
diff --git a/build/scripts/resolve_java_srcs.py b/build/scripts/resolve_java_srcs.py
index bd50c96af9..a2e6c20012 100644
--- a/build/scripts/resolve_java_srcs.py
+++ b/build/scripts/resolve_java_srcs.py
@@ -17,15 +17,15 @@ def list_all_files(directory, prefix='/', hidden_files=False):
def pattern_to_regexp(p):
return '^' + \
('/' if not p.startswith('**') else '') + \
- re.escape(p).replace(
- r'\*\*\/', '[_DIR_]'
- ).replace(
- r'\*', '[_FILE_]'
- ).replace(
- '[_DIR_]', '(.*/)?'
- ).replace(
- '[_FILE_]', '([^/]*)'
- ) + '$'
+ re.escape(p).replace(
+ r'\*\*\/', '[_DIR_]'
+ ).replace(
+ r'\*', '[_FILE_]'
+ ).replace(
+ '[_DIR_]', '(.*/)?'
+ ).replace(
+ '[_FILE_]', '([^/]*)'
+ ) + '$'
def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, resolve_kotlin=False, resolve_groovy=False):
@@ -87,7 +87,7 @@ def do_it(directory, sources_file, resources_file, kotlin_sources_file, groovy_s
if groovy_sources_file:
open(groovy_sources_file, mode).writelines(i + '\n' for i in g + j)
-
+
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--directory', required=True)
diff --git a/build/scripts/run_llvm_dsymutil.py b/build/scripts/run_llvm_dsymutil.py
index 862cc74979..4f43362ad9 100644
--- a/build/scripts/run_llvm_dsymutil.py
+++ b/build/scripts/run_llvm_dsymutil.py
@@ -3,9 +3,9 @@ import sys
import subprocess
-if __name__ == '__main__':
- with open(os.devnull, 'w') as fnull:
- p = subprocess.Popen(sys.argv[1:], shell=False, stderr=fnull, stdout=sys.stdout)
+if __name__ == '__main__':
+ with open(os.devnull, 'w') as fnull:
+ p = subprocess.Popen(sys.argv[1:], shell=False, stderr=fnull, stdout=sys.stdout)
- p.communicate()
- sys.exit(p.returncode)
+ p.communicate()
+ sys.exit(p.returncode)
diff --git a/build/scripts/run_tool.py b/build/scripts/run_tool.py
index 948e7f50d1..00e3ff6f1e 100755
--- a/build/scripts/run_tool.py
+++ b/build/scripts/run_tool.py
@@ -2,8 +2,8 @@ import sys
import subprocess
import os
-
-if __name__ == '__main__':
- env = os.environ.copy()
- env['ASAN_OPTIONS'] = 'detect_leaks=0'
- subprocess.check_call(sys.argv[sys.argv.index('--') + 1:], env=env)
+
+if __name__ == '__main__':
+ env = os.environ.copy()
+ env['ASAN_OPTIONS'] = 'detect_leaks=0'
+ subprocess.check_call(sys.argv[sys.argv.index('--') + 1:], env=env)
diff --git a/build/scripts/wrapper.py b/build/scripts/wrapper.py
index 7259ef5864..1e9d7955a5 100644
--- a/build/scripts/wrapper.py
+++ b/build/scripts/wrapper.py
@@ -2,10 +2,10 @@ import os
import sys
-if __name__ == '__main__':
- path = sys.argv[1]
+if __name__ == '__main__':
+ path = sys.argv[1]
- if path[0] != '/':
- path = os.path.join(os.path.dirname(__file__), path)
+ if path[0] != '/':
+ path = os.path.join(os.path.dirname(__file__), path)
- os.execv(path, [path] + sys.argv[2:])
+ os.execv(path, [path] + sys.argv[2:])
diff --git a/build/scripts/xargs.py b/build/scripts/xargs.py
index 8a6e93e027..5d68929ecc 100644
--- a/build/scripts/xargs.py
+++ b/build/scripts/xargs.py
@@ -2,17 +2,17 @@ import sys
import os
import subprocess
-if __name__ == '__main__':
- pos = sys.argv.index('--')
- fname = sys.argv[pos + 1]
- cmd = sys.argv[pos + 2:]
+if __name__ == '__main__':
+ pos = sys.argv.index('--')
+ fname = sys.argv[pos + 1]
+ cmd = sys.argv[pos + 2:]
- with open(fname, 'r') as f:
- args = [x.strip() for x in f]
+ with open(fname, 'r') as f:
+ args = [x.strip() for x in f]
- os.remove(fname)
+ os.remove(fname)
- p = subprocess.Popen(cmd + args, shell=False, stderr=sys.stderr, stdout=sys.stdout)
- p.communicate()
+ p = subprocess.Popen(cmd + args, shell=False, stderr=sys.stderr, stdout=sys.stdout)
+ p.communicate()
- sys.exit(p.returncode)
+ sys.exit(p.returncode)
diff --git a/build/scripts/ya.make b/build/scripts/ya.make
index 5f19477098..710165e40d 100644
--- a/build/scripts/ya.make
+++ b/build/scripts/ya.make
@@ -10,44 +10,44 @@ TEST_SRCS(
build_java_with_error_prone2.py
build_mn.py
build_pln_header.py
- cat.py
+ cat.py
cgo1_wrapper.py
- check_config_h.py
+ check_config_h.py
collect_java_srcs.py
- compile_cuda.py
- compile_java.py
+ compile_cuda.py
+ compile_java.py
compile_jsrc.py
compile_pysrc.py
- configure_file.py
+ configure_file.py
copy_files_to_dir.py
- copy_to_dir.py
- coverage-info.py
+ copy_to_dir.py
+ coverage-info.py
cpp_flatc_wrapper.py
- create_jcoverage_report.py
+ create_jcoverage_report.py
extract_asrc.py
extract_docs.py
extract_jacoco_report.py
- f2c.py
+ f2c.py
fail_module_cmd.py
fetch_from.py
- fetch_from_external.py
+ fetch_from_external.py
fetch_from_mds.py
fetch_from_npm.py
- fetch_from_sandbox.py
- fetch_resource.py
+ fetch_from_sandbox.py
+ fetch_resource.py
filter_zip.py
- find_and_tar.py
- fix_msvc_output.py
- fs_tools.py
+ find_and_tar.py
+ fix_msvc_output.py
+ fs_tools.py
gen_aar_gradle_script.py
gen_java_codenav_entry.py
gen_java_codenav_protobuf.py
- gen_mx_table.py
+ gen_mx_table.py
gen_py3_reg.py
- gen_py_reg.py
+ gen_py_reg.py
gen_test_apk_gradle_script.py
- gen_ub.py
- generate_pom.py
+ gen_ub.py
+ generate_pom.py
go_proto_wrapper.py
go_tool.py
ios_wrapper.py
@@ -55,43 +55,43 @@ TEST_SRCS(
link_asrc.py
link_dyn_lib.py
link_exe.py
- link_fat_obj.py
- link_lib.py
+ link_fat_obj.py
+ link_lib.py
llvm_opt_wrapper.py
- merge_coverage_data.py
+ merge_coverage_data.py
merge_files.py
- mkdir.py
+ mkdir.py
mkdocs_builder_wrapper.py
- mkver.py
+ mkver.py
pack_ios.py
- pack_jcoverage_resources.py
- perl_wrapper.py
+ pack_jcoverage_resources.py
+ perl_wrapper.py
postprocess_go_fbs.py
- preprocess.py
- py_compile.py
+ preprocess.py
+ py_compile.py
run_ios_simulator.py
- run_javac.py
+ run_javac.py
run_junit.py
- run_llvm_dsymutil.py
- run_msvc_wine.py
- run_tool.py
+ run_llvm_dsymutil.py
+ run_msvc_wine.py
+ run_tool.py
sky.py
- stdout2stderr.py
- symlink.py
+ stdout2stderr.py
+ symlink.py
tar_directory.py
tar_sources.py
- tared_protoc.py
- touch.py
- unpacking_jtest_runner.py
+ tared_protoc.py
+ touch.py
+ unpacking_jtest_runner.py
vcs_info.py
with_coverage.py
with_crash_on_timeout.py
- with_pathsep_resolve.py
+ with_pathsep_resolve.py
wrap_groovyc.py
- wrapper.py
- writer.py
- xargs.py
- yield_line.py
+ wrapper.py
+ writer.py
+ xargs.py
+ yield_line.py
yndexer.py
)
diff --git a/build/scripts/yield_line.py b/build/scripts/yield_line.py
index ff47af4e3d..9c1c539146 100644
--- a/build/scripts/yield_line.py
+++ b/build/scripts/yield_line.py
@@ -1,7 +1,7 @@
import sys
-if __name__ == '__main__':
- pos = sys.argv.index('--')
+if __name__ == '__main__':
+ pos = sys.argv.index('--')
- with open(sys.argv[pos + 1], 'a') as f:
- f.write(' '.join(sys.argv[pos + 2:]) + '\n')
+ with open(sys.argv[pos + 1], 'a') as f:
+ f.write(' '.join(sys.argv[pos + 2:]) + '\n')
diff --git a/build/ya.conf.json b/build/ya.conf.json
index 5dd350dbdc..5f7cc875d6 100644
--- a/build/ya.conf.json
+++ b/build/ya.conf.json
@@ -3901,7 +3901,7 @@
"default": true
}
]
- },
+ },
"mvn": {
"tools": {
"mvn": {
@@ -3930,13 +3930,13 @@
}
]
},
- "fast_diff": {
- "tools": {
+ "fast_diff": {
+ "tools": {
"fast_diff": {
"bottle": "fast_diff",
"executable": "fast_diff"
}
- },
+ },
"platforms": [
{
"host": {
@@ -3945,7 +3945,7 @@
"default": true
}
]
- },
+ },
"cuda": {
"tools": {
"cuda": {
@@ -4284,14 +4284,14 @@
"default": true
}
]
- },
+ },
"allure_commandline": {
- "tools": {
+ "tools": {
"allure": {
"bottle": "allure_commandline",
"executable": "allure"
}
- },
+ },
"platforms": [
{
"host": {
@@ -7111,7 +7111,7 @@
"ant"
]
}
- },
+ },
"maven": {
"formula": {
"sandbox_id": 39222824,
@@ -7125,10 +7125,10 @@
}
},
"fast_diff": {
- "formula": {
- "sandbox_id": 42519064,
- "match": "fast_diff"
- }
+ "formula": {
+ "sandbox_id": 42519064,
+ "match": "fast_diff"
+ }
},
"kiwi_protos": {
"formula": {
@@ -7137,7 +7137,7 @@
}
},
"allure_commandline": {
- "formula": {
+ "formula": {
"sandbox_id": 569859192,
"match": "Allure Commandline"
},
@@ -7146,7 +7146,7 @@
"bin",
"allure"
]
- }
+ }
},
"ctags": {
"formula": {
diff --git a/build/ymake.core.conf b/build/ymake.core.conf
index d7541e352b..081833998b 100644
--- a/build/ymake.core.conf
+++ b/build/ymake.core.conf
@@ -1593,8 +1593,8 @@ module _LINK_UNIT: _BASE_UNIT {
CHECK_PROVIDES()
}
-MODULE_TYPE=UNKNOWN
-
+MODULE_TYPE=UNKNOWN
+
macro ADD_CLANG_TIDY() {
ADD_YTEST($MODULE_PREFIX$REALPRJNAME clang_tidy)
SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_project_map.json)
@@ -1960,8 +1960,8 @@ module BOOSTTEST: _BASE_PROGRAM {
}
ADD_YTEST($MODULE_PREFIX$REALPRJNAME $BOOST_TEST_TYPE_STRING)
SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS canondata/result.json)
-}
-
+}
+
# tag:deprecated
### @usage BOOSTTEST_WITH_MAIN([name]) #deprecated
###
@@ -2024,8 +2024,8 @@ TEST_SRCS_VALUE=
### Documentation: https://wiki.yandex-team.ru/yatool/test/#testynapytest
macro TEST_SRCS(Tests...) {
SET_APPEND(TEST_SRCS_VALUE $Tests)
-}
-
+}
+
macro DISABLE_DATA_VALIDATION() {
DISABLE(VALIDATE_DATA)
}
@@ -2044,8 +2044,8 @@ TEST_DATA_VALUE=
macro DATA(Data...) {
SET_APPEND(TEST_DATA_VALUE $Data)
ADD_CHECK(check.data $Data)
-}
-
+}
+
# tag:test
TEST_TAGS_VALUE=
### @usage: TAG ([tag...])
@@ -2056,8 +2056,8 @@ TEST_TAGS_VALUE=
### Documentation: https://wiki.yandex-team.ru/yatool/test/#obshhieponjatija
macro TAG(Tags...) {
SET_APPEND(TEST_TAGS_VALUE $Tags)
-}
-
+}
+
# tag:test
TEST_REQUIREMENTS_VALUE=
### @usage: REQUIREMENTS([cpu:<count>] [disk_usage:<size>] [ram:<size>] [ram_disk:<size>] [container:<id>] [network:<restricted|full>] [dns:dns64])
@@ -2106,17 +2106,17 @@ TEST_PYTHON_PATH_VALUE=
### Set path to Python that will be used to runs scripts in tests
macro PYTHON_PATH(Path) {
SET(TEST_PYTHON_PATH_VALUE $Path)
-}
-
+}
+
# tag:test
-SKIP_TEST_VALUE=
+SKIP_TEST_VALUE=
### @usage: SKIP_TEST(Reason)
###
### Skip the suite defined by test module. Provide a reason to be output in test execution report.
macro SKIP_TEST(Reason...) {
- SET(SKIP_TEST_VALUE $Reason)
-}
-
+ SET(SKIP_TEST_VALUE $Reason)
+}
+
# tag:test
LINT_LEVEL_VALUE=extended
### @usage: NO_LINT()
@@ -2124,7 +2124,7 @@ LINT_LEVEL_VALUE=extended
### Do not check for style files included in PY_SRCS, TEST_SRCS, JAVA_SRCS.
macro NO_LINT() {
SET(LINT_LEVEL_VALUE none)
-}
+}
### @usage: LINT(<none|base|strict>)
###
@@ -2132,7 +2132,7 @@ macro NO_LINT() {
macro LINT(level) {
SET(LINT_LEVEL_VALUE $level)
}
-
+
# tag:python-specific tag:test
### @usage: NO_DOCTESTS()
###
@@ -2158,8 +2158,8 @@ module _BASE_PYTEST: _BASE_PY_PROGRAM {
}
SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS canondata/result.json)
-}
-
+}
+
# tag:python-specific tag:deprecated tag:test
### @usage: PYTEST_BIN() #deprecated
###
@@ -2167,8 +2167,8 @@ module _BASE_PYTEST: _BASE_PY_PROGRAM {
module PYTEST_BIN: _BASE_PYTEST {
.NODE_TYPE=Program
SETUP_PYTEST_BIN()
-}
-
+}
+
# tag:python-specific tag:test
### @usage: PY2TEST([name])
###
@@ -2188,8 +2188,8 @@ module PY2TEST: PYTEST_BIN {
}
SET(MODULE_LANG PY2)
ASSERT(_OK You are using deprecated Python2-only code (PY2TEST). Please consider rewriting to Python 3.)
-}
-
+}
+
# tag:python-specific tag:deprecated tag:test
### @usage: PY3TEST_BIN() #deprecated
###
@@ -2198,7 +2198,7 @@ module PY3TEST_BIN: _BASE_PY3_PROGRAM {
.NODE_TYPE=Program
.FINAL_TARGET=no
.ALLOWED=YT_SPEC NO_DOCTESTS
- SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS canondata/result.json)
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS canondata/result.json)
SET(MODULE_LANG PY3)
SETUP_PYTEST_BIN()
PEERDIR+=library/python/pytest
@@ -2258,8 +2258,8 @@ module GTEST_UGLY: _BASE_PROGRAM {
PEERDIR(contrib/restricted/googletest/googlemock contrib/restricted/googletest/googletest)
ADD_YTEST($MODULE_PREFIX$REALPRJNAME gtest)
SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS canondata/result.json)
-}
-
+}
+
# tag:test
### @usage: EXECTEST()
###
@@ -2291,11 +2291,11 @@ module EXECTEST: _BARE_UNIT {
.FINAL_TARGET=no
.ALLOWED=YT_SPEC
.RESTRICTED=FORK_TEST_FILES
- SET(MODULE_SUFFIX .pkg.fake)
- SETUP_EXECTEST()
+ SET(MODULE_SUFFIX .pkg.fake)
+ SETUP_EXECTEST()
SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS canondata/result.json)
-}
-
+}
+
# tag:cpp-specific tag:test
### @usage: Y_BENCHMARK([benchmarkname])
###
@@ -4111,9 +4111,9 @@ module _BASE_PY_PROGRAM: _BASE_PROGRAM {
.CMD=PY_PROGRAM_LINK_EXE
_ARCADIA_PYTHON_ADDINCL()
- when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") {
- NO_CHECK_IMPORTS_FOR_VALUE=
- }
+ when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") {
+ NO_CHECK_IMPORTS_FOR_VALUE=
+ }
ADD_CHECK_PY_IMPORTS()
when ($NO_PYTHON_INCLS != "yes") {
@@ -4176,9 +4176,9 @@ module _BASE_PY3_PROGRAM: _BASE_PROGRAM {
PEERDIR += contrib/tools/python3/src/Modules/_sqlite
}
- when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") {
- NO_CHECK_IMPORTS_FOR_VALUE=
- }
+ when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") {
+ NO_CHECK_IMPORTS_FOR_VALUE=
+ }
ADD_CHECK_PY_IMPORTS()
when ($ARCH_PPC64LE == "yes") {
@@ -5555,10 +5555,10 @@ FORK_TEST_FILES_MODE=
### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/
macro FORK_TEST_FILES() {
SET(FORK_TEST_FILES_MODE on)
-}
-
+}
+
# tag:test
-TEST_SIZE_NAME=SMALL
+TEST_SIZE_NAME=SMALL
### @usage: SIZE(SMALL/MEDIUM/LARGE)
###
### Set the 'size' for the test. Each 'size' has own set of resrtictions, SMALL bein the most restricted and LARGE being the list.
@@ -7262,8 +7262,8 @@ TOUCH_PACKAGE=$YMAKE_PYTHON ${input:"build/scripts/touch.py"} ${kv;hide:"pc ligh
_P_PK=${kv;hide:"p PK"}
TOUCH_PACKAGE_MF=$GENERATE_MF && $TOUCH_PACKAGE $_P_PK
TOUCH_JAVA_UNIT=$YMAKE_PYTHON ${input:"build/scripts/touch.py"} ${kv;hide:"java $CURDIR"} $TARGET
-
-NO_CHECK_IMPORTS_FOR_VALUE=None
+
+NO_CHECK_IMPORTS_FOR_VALUE=None
### @usage: NO_CHECK_IMPORTS([patterns])
###
### Do not run checks on imports of Python modules.
diff --git a/contrib/python/PyHamcrest/ya.make b/contrib/python/PyHamcrest/ya.make
index a1abdd22bf..c24f0fbef8 100644
--- a/contrib/python/PyHamcrest/ya.make
+++ b/contrib/python/PyHamcrest/ya.make
@@ -75,8 +75,8 @@ PY_SRCS(
hamcrest/__init__.py
)
-NO_LINT()
-
+NO_LINT()
+
END()
RECURSE_FOR_TESTS(
diff --git a/contrib/python/PyYAML/py2/ya.make b/contrib/python/PyYAML/py2/ya.make
index f7226c09ef..1ec5c0c301 100644
--- a/contrib/python/PyYAML/py2/ya.make
+++ b/contrib/python/PyYAML/py2/ya.make
@@ -44,8 +44,8 @@ RESOURCE_FILES(
.dist-info/top_level.txt
)
-NO_LINT()
-
+NO_LINT()
+
NO_COMPILER_WARNINGS()
END()
diff --git a/contrib/python/PyYAML/py3/ya.make b/contrib/python/PyYAML/py3/ya.make
index 733ba14fd1..0401c04651 100644
--- a/contrib/python/PyYAML/py3/ya.make
+++ b/contrib/python/PyYAML/py3/ya.make
@@ -44,8 +44,8 @@ RESOURCE_FILES(
.dist-info/top_level.txt
)
-NO_LINT()
-
+NO_LINT()
+
NO_COMPILER_WARNINGS()
END()
diff --git a/contrib/python/PyYAML/ya.make b/contrib/python/PyYAML/ya.make
index 2f6ad8f377..a604fce51f 100644
--- a/contrib/python/PyYAML/ya.make
+++ b/contrib/python/PyYAML/ya.make
@@ -14,8 +14,8 @@ ELSE()
)
ENDIF()
-NO_LINT()
-
+NO_LINT()
+
END()
RECURSE(
diff --git a/contrib/python/ipython/py2/bin/ya.make b/contrib/python/ipython/py2/bin/ya.make
index 91c2577e6d..0ff960df6f 100644
--- a/contrib/python/ipython/py2/bin/ya.make
+++ b/contrib/python/ipython/py2/bin/ya.make
@@ -7,5 +7,5 @@ PEERDIR(
)
PY_MAIN(IPython:start_ipython)
-
+
END()
diff --git a/contrib/python/jedi/ya.make b/contrib/python/jedi/ya.make
index a43ef8c772..eff2fef2a7 100644
--- a/contrib/python/jedi/ya.make
+++ b/contrib/python/jedi/ya.make
@@ -92,6 +92,6 @@ RESOURCE_FILES(
jedi/evaluate/compiled/fake/posix.pym
)
-NO_LINT()
-
+NO_LINT()
+
END()
diff --git a/contrib/python/pexpect/ya.make b/contrib/python/pexpect/ya.make
index 323c5d7c43..a5bb92fcac 100644
--- a/contrib/python/pexpect/ya.make
+++ b/contrib/python/pexpect/ya.make
@@ -36,7 +36,7 @@ IF (PYTHON3)
pexpect/_async.py
)
ENDIF()
-
+
RESOURCE_FILES(
PREFIX contrib/python/pexpect/
.dist-info/METADATA
diff --git a/contrib/python/prompt-toolkit/ya.make b/contrib/python/prompt-toolkit/ya.make
index a790421800..f1f936eb3f 100644
--- a/contrib/python/prompt-toolkit/ya.make
+++ b/contrib/python/prompt-toolkit/ya.make
@@ -10,8 +10,8 @@ ELSE()
PEERDIR(contrib/python/prompt-toolkit/py3)
ENDIF()
-NO_LINT()
-
+NO_LINT()
+
END()
RECURSE(
diff --git a/contrib/python/py/LICENSE b/contrib/python/py/LICENSE
index 2e8709a73b..31ecdfb1db 100644
--- a/contrib/python/py/LICENSE
+++ b/contrib/python/py/LICENSE
@@ -1,19 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in all
- copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- SOFTWARE.
-
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/contrib/python/py/py/__init__.py b/contrib/python/py/py/__init__.py
index 3cbf23355a..b892ce1a2a 100644
--- a/contrib/python/py/py/__init__.py
+++ b/contrib/python/py/py/__init__.py
@@ -1,15 +1,15 @@
-"""
+"""
pylib: rapid testing and development utils
-
-this module uses apipkg.py for lazy-loading sub modules
-and classes. The initpkg-dictionary below specifies
-name->value mappings where value can be another namespace
-dictionary or an import path.
-
-(c) Holger Krekel and others, 2004-2014
-"""
+
+this module uses apipkg.py for lazy-loading sub modules
+and classes. The initpkg-dictionary below specifies
+name->value mappings where value can be another namespace
+dictionary or an import path.
+
+(c) Holger Krekel and others, 2004-2014
+"""
from py._error import error
-
+
try:
from py._vendored_packages import apipkg
lib_not_mangled_by_packagers = True
@@ -18,139 +18,139 @@ except ImportError:
import apipkg
lib_not_mangled_by_packagers = False
vendor_prefix = ''
-
+
try:
from ._version import version as __version__
except ImportError:
# broken installation, we don't even try
__version__ = "unknown"
-
+
apipkg.initpkg(__name__, attr={'_apipkg': apipkg, 'error': error}, exportdefs={
- # access to all standard lib modules
- 'std': '._std:std',
-
- '_pydir' : '.__metainfo:pydir',
- 'version': 'py:__version__', # backward compatibility
-
- # pytest-2.0 has a flat namespace, we use alias modules
- # to keep old references compatible
- 'test' : 'pytest',
-
- # hook into the top-level standard library
- 'process' : {
- '__doc__' : '._process:__doc__',
- 'cmdexec' : '._process.cmdexec:cmdexec',
- 'kill' : '._process.killproc:kill',
- 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
- },
-
- 'apipkg' : {
+ # access to all standard lib modules
+ 'std': '._std:std',
+
+ '_pydir' : '.__metainfo:pydir',
+ 'version': 'py:__version__', # backward compatibility
+
+ # pytest-2.0 has a flat namespace, we use alias modules
+ # to keep old references compatible
+ 'test' : 'pytest',
+
+ # hook into the top-level standard library
+ 'process' : {
+ '__doc__' : '._process:__doc__',
+ 'cmdexec' : '._process.cmdexec:cmdexec',
+ 'kill' : '._process.killproc:kill',
+ 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
+ },
+
+ 'apipkg' : {
'initpkg' : vendor_prefix + 'apipkg:initpkg',
'ApiModule' : vendor_prefix + 'apipkg:ApiModule',
- },
-
- 'iniconfig' : {
+ },
+
+ 'iniconfig' : {
'IniConfig' : vendor_prefix + 'iniconfig:IniConfig',
'ParseError' : vendor_prefix + 'iniconfig:ParseError',
- },
-
- 'path' : {
- '__doc__' : '._path:__doc__',
- 'svnwc' : '._path.svnwc:SvnWCCommandPath',
- 'svnurl' : '._path.svnurl:SvnCommandPath',
- 'local' : '._path.local:LocalPath',
- 'SvnAuth' : '._path.svnwc:SvnAuth',
- },
-
- # python inspection/code-generation API
- 'code' : {
- '__doc__' : '._code:__doc__',
- 'compile' : '._code.source:compile_',
- 'Source' : '._code.source:Source',
- 'Code' : '._code.code:Code',
- 'Frame' : '._code.code:Frame',
- 'ExceptionInfo' : '._code.code:ExceptionInfo',
- 'Traceback' : '._code.code:Traceback',
- 'getfslineno' : '._code.source:getfslineno',
- 'getrawcode' : '._code.code:getrawcode',
- 'patch_builtins' : '._code.code:patch_builtins',
- 'unpatch_builtins' : '._code.code:unpatch_builtins',
- '_AssertionError' : '._code.assertion:AssertionError',
- '_reinterpret_old' : '._code.assertion:reinterpret_old',
- '_reinterpret' : '._code.assertion:reinterpret',
- '_reprcompare' : '._code.assertion:_reprcompare',
- '_format_explanation' : '._code.assertion:_format_explanation',
- },
-
- # backports and additions of builtins
- 'builtin' : {
- '__doc__' : '._builtin:__doc__',
- 'enumerate' : '._builtin:enumerate',
- 'reversed' : '._builtin:reversed',
- 'sorted' : '._builtin:sorted',
- 'any' : '._builtin:any',
- 'all' : '._builtin:all',
- 'set' : '._builtin:set',
- 'frozenset' : '._builtin:frozenset',
- 'BaseException' : '._builtin:BaseException',
- 'GeneratorExit' : '._builtin:GeneratorExit',
- '_sysex' : '._builtin:_sysex',
- 'print_' : '._builtin:print_',
- '_reraise' : '._builtin:_reraise',
- '_tryimport' : '._builtin:_tryimport',
- 'exec_' : '._builtin:exec_',
- '_basestring' : '._builtin:_basestring',
- '_totext' : '._builtin:_totext',
- '_isbytes' : '._builtin:_isbytes',
- '_istext' : '._builtin:_istext',
- '_getimself' : '._builtin:_getimself',
- '_getfuncdict' : '._builtin:_getfuncdict',
- '_getcode' : '._builtin:_getcode',
- 'builtins' : '._builtin:builtins',
- 'execfile' : '._builtin:execfile',
- 'callable' : '._builtin:callable',
- 'bytes' : '._builtin:bytes',
- 'text' : '._builtin:text',
- },
-
- # input-output helping
- 'io' : {
- '__doc__' : '._io:__doc__',
- 'dupfile' : '._io.capture:dupfile',
- 'TextIO' : '._io.capture:TextIO',
- 'BytesIO' : '._io.capture:BytesIO',
- 'FDCapture' : '._io.capture:FDCapture',
- 'StdCapture' : '._io.capture:StdCapture',
- 'StdCaptureFD' : '._io.capture:StdCaptureFD',
- 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
- 'ansi_print' : '._io.terminalwriter:ansi_print',
- 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
- 'saferepr' : '._io.saferepr:saferepr',
- },
-
- # small and mean xml/html generation
- 'xml' : {
- '__doc__' : '._xmlgen:__doc__',
- 'html' : '._xmlgen:html',
- 'Tag' : '._xmlgen:Tag',
- 'raw' : '._xmlgen:raw',
- 'Namespace' : '._xmlgen:Namespace',
- 'escape' : '._xmlgen:escape',
- },
-
- 'log' : {
- # logging API ('producers' and 'consumers' connected via keywords)
- '__doc__' : '._log:__doc__',
- '_apiwarn' : '._log.warning:_apiwarn',
- 'Producer' : '._log.log:Producer',
- 'setconsumer' : '._log.log:setconsumer',
- '_setstate' : '._log.log:setstate',
- '_getstate' : '._log.log:getstate',
- 'Path' : '._log.log:Path',
- 'STDOUT' : '._log.log:STDOUT',
- 'STDERR' : '._log.log:STDERR',
- 'Syslog' : '._log.log:Syslog',
- },
-
-})
+ },
+
+ 'path' : {
+ '__doc__' : '._path:__doc__',
+ 'svnwc' : '._path.svnwc:SvnWCCommandPath',
+ 'svnurl' : '._path.svnurl:SvnCommandPath',
+ 'local' : '._path.local:LocalPath',
+ 'SvnAuth' : '._path.svnwc:SvnAuth',
+ },
+
+ # python inspection/code-generation API
+ 'code' : {
+ '__doc__' : '._code:__doc__',
+ 'compile' : '._code.source:compile_',
+ 'Source' : '._code.source:Source',
+ 'Code' : '._code.code:Code',
+ 'Frame' : '._code.code:Frame',
+ 'ExceptionInfo' : '._code.code:ExceptionInfo',
+ 'Traceback' : '._code.code:Traceback',
+ 'getfslineno' : '._code.source:getfslineno',
+ 'getrawcode' : '._code.code:getrawcode',
+ 'patch_builtins' : '._code.code:patch_builtins',
+ 'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
+ },
+
+ # backports and additions of builtins
+ 'builtin' : {
+ '__doc__' : '._builtin:__doc__',
+ 'enumerate' : '._builtin:enumerate',
+ 'reversed' : '._builtin:reversed',
+ 'sorted' : '._builtin:sorted',
+ 'any' : '._builtin:any',
+ 'all' : '._builtin:all',
+ 'set' : '._builtin:set',
+ 'frozenset' : '._builtin:frozenset',
+ 'BaseException' : '._builtin:BaseException',
+ 'GeneratorExit' : '._builtin:GeneratorExit',
+ '_sysex' : '._builtin:_sysex',
+ 'print_' : '._builtin:print_',
+ '_reraise' : '._builtin:_reraise',
+ '_tryimport' : '._builtin:_tryimport',
+ 'exec_' : '._builtin:exec_',
+ '_basestring' : '._builtin:_basestring',
+ '_totext' : '._builtin:_totext',
+ '_isbytes' : '._builtin:_isbytes',
+ '_istext' : '._builtin:_istext',
+ '_getimself' : '._builtin:_getimself',
+ '_getfuncdict' : '._builtin:_getfuncdict',
+ '_getcode' : '._builtin:_getcode',
+ 'builtins' : '._builtin:builtins',
+ 'execfile' : '._builtin:execfile',
+ 'callable' : '._builtin:callable',
+ 'bytes' : '._builtin:bytes',
+ 'text' : '._builtin:text',
+ },
+
+ # input-output helping
+ 'io' : {
+ '__doc__' : '._io:__doc__',
+ 'dupfile' : '._io.capture:dupfile',
+ 'TextIO' : '._io.capture:TextIO',
+ 'BytesIO' : '._io.capture:BytesIO',
+ 'FDCapture' : '._io.capture:FDCapture',
+ 'StdCapture' : '._io.capture:StdCapture',
+ 'StdCaptureFD' : '._io.capture:StdCaptureFD',
+ 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
+ 'ansi_print' : '._io.terminalwriter:ansi_print',
+ 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
+ 'saferepr' : '._io.saferepr:saferepr',
+ },
+
+ # small and mean xml/html generation
+ 'xml' : {
+ '__doc__' : '._xmlgen:__doc__',
+ 'html' : '._xmlgen:html',
+ 'Tag' : '._xmlgen:Tag',
+ 'raw' : '._xmlgen:raw',
+ 'Namespace' : '._xmlgen:Namespace',
+ 'escape' : '._xmlgen:escape',
+ },
+
+ 'log' : {
+ # logging API ('producers' and 'consumers' connected via keywords)
+ '__doc__' : '._log:__doc__',
+ '_apiwarn' : '._log.warning:_apiwarn',
+ 'Producer' : '._log.log:Producer',
+ 'setconsumer' : '._log.log:setconsumer',
+ '_setstate' : '._log.log:setstate',
+ '_getstate' : '._log.log:getstate',
+ 'Path' : '._log.log:Path',
+ 'STDOUT' : '._log.log:STDOUT',
+ 'STDERR' : '._log.log:STDERR',
+ 'Syslog' : '._log.log:Syslog',
+ },
+
+})
diff --git a/contrib/python/py/py/__metainfo.py b/contrib/python/py/py/__metainfo.py
index 783d908baa..12581eb7af 100644
--- a/contrib/python/py/py/__metainfo.py
+++ b/contrib/python/py/py/__metainfo.py
@@ -1,2 +1,2 @@
-import py
-pydir = py.path.local(py.__file__).dirpath()
+import py
+pydir = py.path.local(py.__file__).dirpath()
diff --git a/contrib/python/py/py/_builtin.py b/contrib/python/py/py/_builtin.py
index 1345506c91..ddc89fc7be 100644
--- a/contrib/python/py/py/_builtin.py
+++ b/contrib/python/py/py/_builtin.py
@@ -1,6 +1,6 @@
-import sys
-
-
+import sys
+
+
# Passthrough for builtins supported with py27.
BaseException = BaseException
GeneratorExit = GeneratorExit
@@ -8,142 +8,142 @@ _sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
all = all
any = any
callable = callable
-enumerate = enumerate
+enumerate = enumerate
reversed = reversed
set, frozenset = set, frozenset
sorted = sorted
-
-
-if sys.version_info >= (3, 0):
+
+
+if sys.version_info >= (3, 0):
exec("print_ = print ; exec_=exec")
- import builtins
-
- # some backward compatibility helpers
- _basestring = str
- def _totext(obj, encoding=None, errors=None):
- if isinstance(obj, bytes):
- if errors is None:
- obj = obj.decode(encoding)
- else:
- obj = obj.decode(encoding, errors)
- elif not isinstance(obj, str):
- obj = str(obj)
- return obj
-
- def _isbytes(x):
- return isinstance(x, bytes)
-
- def _istext(x):
- return isinstance(x, str)
-
- text = str
- bytes = bytes
-
- def _getimself(function):
- return getattr(function, '__self__', None)
-
- def _getfuncdict(function):
- return getattr(function, "__dict__", None)
-
- def _getcode(function):
- return getattr(function, "__code__", None)
-
- def execfile(fn, globs=None, locs=None):
- if globs is None:
- back = sys._getframe(1)
- globs = back.f_globals
- locs = back.f_locals
- del back
- elif locs is None:
- locs = globs
- fp = open(fn, "r")
- try:
- source = fp.read()
- finally:
- fp.close()
- co = compile(source, fn, "exec", dont_inherit=True)
- exec_(co, globs, locs)
-
-else:
- import __builtin__ as builtins
- _totext = unicode
- _basestring = basestring
- text = unicode
- bytes = str
- execfile = execfile
- callable = callable
- def _isbytes(x):
- return isinstance(x, str)
- def _istext(x):
- return isinstance(x, unicode)
-
- def _getimself(function):
- return getattr(function, 'im_self', None)
-
- def _getfuncdict(function):
- return getattr(function, "__dict__", None)
-
- def _getcode(function):
- try:
- return getattr(function, "__code__")
- except AttributeError:
- return getattr(function, "func_code", None)
-
- def print_(*args, **kwargs):
- """ minimal backport of py3k print statement. """
- sep = ' '
- if 'sep' in kwargs:
- sep = kwargs.pop('sep')
- end = '\n'
- if 'end' in kwargs:
- end = kwargs.pop('end')
- file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
- if kwargs:
- args = ", ".join([str(x) for x in kwargs])
- raise TypeError("invalid keyword arguments: %s" % args)
- at_start = True
- for x in args:
- if not at_start:
- file.write(sep)
- file.write(str(x))
- at_start = False
- file.write(end)
-
- def exec_(obj, globals=None, locals=None):
- """ minimal backport of py3k exec statement. """
- __tracebackhide__ = True
- if globals is None:
- frame = sys._getframe(1)
- globals = frame.f_globals
- if locals is None:
- locals = frame.f_locals
- elif locals is None:
- locals = globals
- exec2(obj, globals, locals)
-
-if sys.version_info >= (3, 0):
- def _reraise(cls, val, tb):
- __tracebackhide__ = True
- assert hasattr(val, '__traceback__')
- raise cls.with_traceback(val, tb)
-else:
- exec ("""
-def _reraise(cls, val, tb):
- __tracebackhide__ = True
- raise cls, val, tb
-def exec2(obj, globals, locals):
- __tracebackhide__ = True
- exec obj in globals, locals
-""")
-
-def _tryimport(*names):
- """ return the first successfully imported module. """
- assert names
- for name in names:
- try:
- __import__(name)
- except ImportError:
- excinfo = sys.exc_info()
- else:
- return sys.modules[name]
- _reraise(*excinfo)
+ import builtins
+
+ # some backward compatibility helpers
+ _basestring = str
+ def _totext(obj, encoding=None, errors=None):
+ if isinstance(obj, bytes):
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
+ elif not isinstance(obj, str):
+ obj = str(obj)
+ return obj
+
+ def _isbytes(x):
+ return isinstance(x, bytes)
+
+ def _istext(x):
+ return isinstance(x, str)
+
+ text = str
+ bytes = bytes
+
+ def _getimself(function):
+ return getattr(function, '__self__', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ return getattr(function, "__code__", None)
+
+ def execfile(fn, globs=None, locs=None):
+ if globs is None:
+ back = sys._getframe(1)
+ globs = back.f_globals
+ locs = back.f_locals
+ del back
+ elif locs is None:
+ locs = globs
+ fp = open(fn, "r")
+ try:
+ source = fp.read()
+ finally:
+ fp.close()
+ co = compile(source, fn, "exec", dont_inherit=True)
+ exec_(co, globs, locs)
+
+else:
+ import __builtin__ as builtins
+ _totext = unicode
+ _basestring = basestring
+ text = unicode
+ bytes = str
+ execfile = execfile
+ callable = callable
+ def _isbytes(x):
+ return isinstance(x, str)
+ def _istext(x):
+ return isinstance(x, unicode)
+
+ def _getimself(function):
+ return getattr(function, 'im_self', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ try:
+ return getattr(function, "__code__")
+ except AttributeError:
+ return getattr(function, "func_code", None)
+
+ def print_(*args, **kwargs):
+ """ minimal backport of py3k print statement. """
+ sep = ' '
+ if 'sep' in kwargs:
+ sep = kwargs.pop('sep')
+ end = '\n'
+ if 'end' in kwargs:
+ end = kwargs.pop('end')
+ file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
+ if kwargs:
+ args = ", ".join([str(x) for x in kwargs])
+ raise TypeError("invalid keyword arguments: %s" % args)
+ at_start = True
+ for x in args:
+ if not at_start:
+ file.write(sep)
+ file.write(str(x))
+ at_start = False
+ file.write(end)
+
+ def exec_(obj, globals=None, locals=None):
+ """ minimal backport of py3k exec statement. """
+ __tracebackhide__ = True
+ if globals is None:
+ frame = sys._getframe(1)
+ globals = frame.f_globals
+ if locals is None:
+ locals = frame.f_locals
+ elif locals is None:
+ locals = globals
+ exec2(obj, globals, locals)
+
+if sys.version_info >= (3, 0):
+ def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ assert hasattr(val, '__traceback__')
+ raise cls.with_traceback(val, tb)
+else:
+ exec ("""
+def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ raise cls, val, tb
+def exec2(obj, globals, locals):
+ __tracebackhide__ = True
+ exec obj in globals, locals
+""")
+
+def _tryimport(*names):
+ """ return the first successfully imported module. """
+ assert names
+ for name in names:
+ try:
+ __import__(name)
+ except ImportError:
+ excinfo = sys.exc_info()
+ else:
+ return sys.modules[name]
+ _reraise(*excinfo)
diff --git a/contrib/python/py/py/_code/__init__.py b/contrib/python/py/py/_code/__init__.py
index 9cf934f614..f15acf8513 100644
--- a/contrib/python/py/py/_code/__init__.py
+++ b/contrib/python/py/py/_code/__init__.py
@@ -1 +1 @@
-""" python inspection/code generation API """
+""" python inspection/code generation API """
diff --git a/contrib/python/py/py/_code/_assertionnew.py b/contrib/python/py/py/_code/_assertionnew.py
index 5e136e1f6f..d03f29d870 100644
--- a/contrib/python/py/py/_code/_assertionnew.py
+++ b/contrib/python/py/py/_code/_assertionnew.py
@@ -1,322 +1,322 @@
-"""
-Find intermediate evalutation results in assert statements through builtin AST.
-This should replace _assertionold.py eventually.
-"""
-
-import sys
-import ast
-
-import py
-from py._code.assertion import _format_explanation, BuiltinAssertionError
-
-
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
def _is_ast_expr(node):
return isinstance(node, ast.expr)
def _is_ast_stmt(node):
return isinstance(node, ast.stmt)
-
-
-class Failure(Exception):
- """Error found while interpreting AST."""
-
- def __init__(self, explanation=""):
- self.cause = sys.exc_info()
- self.explanation = explanation
-
-
-def interpret(source, frame, should_fail=False):
- mod = ast.parse(source)
- visitor = DebugInterpreter(frame)
- try:
- visitor.visit(mod)
- except Failure:
- failure = sys.exc_info()[1]
- return getfailure(failure)
- if should_fail:
- return ("(assertion failed, but when it was re-run for "
- "printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --no-assert)")
-
-def run(offending_line, frame=None):
- if frame is None:
- frame = py.code.Frame(sys._getframe(1))
- return interpret(offending_line, frame)
-
-def getfailure(failure):
- explanation = _format_explanation(failure.explanation)
- value = failure.cause[1]
- if str(value):
- lines = explanation.splitlines()
- if not lines:
- lines.append("")
- lines[0] += " << %s" % (value,)
- explanation = "\n".join(lines)
- text = "%s: %s" % (failure.cause[0].__name__, explanation)
- if text.startswith("AssertionError: assert "):
- text = text[16:]
- return text
-
-
-operator_map = {
- ast.BitOr : "|",
- ast.BitXor : "^",
- ast.BitAnd : "&",
- ast.LShift : "<<",
- ast.RShift : ">>",
- ast.Add : "+",
- ast.Sub : "-",
- ast.Mult : "*",
- ast.Div : "/",
- ast.FloorDiv : "//",
- ast.Mod : "%",
- ast.Eq : "==",
- ast.NotEq : "!=",
- ast.Lt : "<",
- ast.LtE : "<=",
- ast.Gt : ">",
- ast.GtE : ">=",
- ast.Pow : "**",
- ast.Is : "is",
- ast.IsNot : "is not",
- ast.In : "in",
- ast.NotIn : "not in"
-}
-
-unary_map = {
- ast.Not : "not %s",
- ast.Invert : "~%s",
- ast.USub : "-%s",
- ast.UAdd : "+%s"
-}
-
-
-class DebugInterpreter(ast.NodeVisitor):
- """Interpret AST nodes to gleam useful debugging information. """
-
- def __init__(self, frame):
- self.frame = frame
-
- def generic_visit(self, node):
- # Fallback when we don't have a special implementation.
- if _is_ast_expr(node):
- mod = ast.Expression(node)
- co = self._compile(mod)
- try:
- result = self.frame.eval(co)
- except Exception:
- raise Failure()
- explanation = self.frame.repr(result)
- return explanation, result
- elif _is_ast_stmt(node):
- mod = ast.Module([node])
- co = self._compile(mod, "exec")
- try:
- self.frame.exec_(co)
- except Exception:
- raise Failure()
- return None, None
- else:
- raise AssertionError("can't handle %s" %(node,))
-
- def _compile(self, source, mode="eval"):
- return compile(source, "<assertion interpretation>", mode)
-
- def visit_Expr(self, expr):
- return self.visit(expr.value)
-
- def visit_Module(self, mod):
- for stmt in mod.body:
- self.visit(stmt)
-
- def visit_Name(self, name):
- explanation, result = self.generic_visit(name)
- # See if the name is local.
- source = "%r in locals() is not globals()" % (name.id,)
- co = self._compile(source)
- try:
- local = self.frame.eval(co)
- except Exception:
- # have to assume it isn't
- local = False
- if not local:
- return name.id, result
- return explanation, result
-
- def visit_Compare(self, comp):
- left = comp.left
- left_explanation, left_result = self.visit(left)
- for op, next_op in zip(comp.ops, comp.comparators):
- next_explanation, next_result = self.visit(next_op)
- op_symbol = operator_map[op.__class__]
- explanation = "%s %s %s" % (left_explanation, op_symbol,
- next_explanation)
- source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_left=left_result,
- __exprinfo_right=next_result)
- except Exception:
- raise Failure(explanation)
- try:
- if not result:
- break
- except KeyboardInterrupt:
- raise
- except:
- break
- left_explanation, left_result = next_explanation, next_result
-
- rcomp = py.code._reprcompare
- if rcomp:
- res = rcomp(op_symbol, left_result, next_result)
- if res:
- explanation = res
- return explanation, result
-
- def visit_BoolOp(self, boolop):
- is_or = isinstance(boolop.op, ast.Or)
- explanations = []
- for operand in boolop.values:
- explanation, result = self.visit(operand)
- explanations.append(explanation)
- if result == is_or:
- break
- name = is_or and " or " or " and "
- explanation = "(" + name.join(explanations) + ")"
- return explanation, result
-
- def visit_UnaryOp(self, unary):
- pattern = unary_map[unary.op.__class__]
- operand_explanation, operand_result = self.visit(unary.operand)
- explanation = pattern % (operand_explanation,)
- co = self._compile(pattern % ("__exprinfo_expr",))
- try:
- result = self.frame.eval(co, __exprinfo_expr=operand_result)
- except Exception:
- raise Failure(explanation)
- return explanation, result
-
- def visit_BinOp(self, binop):
- left_explanation, left_result = self.visit(binop.left)
- right_explanation, right_result = self.visit(binop.right)
- symbol = operator_map[binop.op.__class__]
- explanation = "(%s %s %s)" % (left_explanation, symbol,
- right_explanation)
- source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_left=left_result,
- __exprinfo_right=right_result)
- except Exception:
- raise Failure(explanation)
- return explanation, result
-
- def visit_Call(self, call):
- func_explanation, func = self.visit(call.func)
- arg_explanations = []
- ns = {"__exprinfo_func" : func}
- arguments = []
- for arg in call.args:
- arg_explanation, arg_result = self.visit(arg)
- arg_name = "__exprinfo_%s" % (len(ns),)
- ns[arg_name] = arg_result
- arguments.append(arg_name)
- arg_explanations.append(arg_explanation)
- for keyword in call.keywords:
- arg_explanation, arg_result = self.visit(keyword.value)
- arg_name = "__exprinfo_%s" % (len(ns),)
- ns[arg_name] = arg_result
- keyword_source = "%s=%%s" % (keyword.arg)
- arguments.append(keyword_source % (arg_name,))
- arg_explanations.append(keyword_source % (arg_explanation,))
- if call.starargs:
- arg_explanation, arg_result = self.visit(call.starargs)
- arg_name = "__exprinfo_star"
- ns[arg_name] = arg_result
- arguments.append("*%s" % (arg_name,))
- arg_explanations.append("*%s" % (arg_explanation,))
- if call.kwargs:
- arg_explanation, arg_result = self.visit(call.kwargs)
- arg_name = "__exprinfo_kwds"
- ns[arg_name] = arg_result
- arguments.append("**%s" % (arg_name,))
- arg_explanations.append("**%s" % (arg_explanation,))
- args_explained = ", ".join(arg_explanations)
- explanation = "%s(%s)" % (func_explanation, args_explained)
- args = ", ".join(arguments)
- source = "__exprinfo_func(%s)" % (args,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, **ns)
- except Exception:
- raise Failure(explanation)
- pattern = "%s\n{%s = %s\n}"
- rep = self.frame.repr(result)
- explanation = pattern % (rep, rep, explanation)
- return explanation, result
-
- def _is_builtin_name(self, name):
- pattern = "%r not in globals() and %r not in locals()"
- source = pattern % (name.id, name.id)
- co = self._compile(source)
- try:
- return self.frame.eval(co)
- except Exception:
- return False
-
- def visit_Attribute(self, attr):
- if not isinstance(attr.ctx, ast.Load):
- return self.generic_visit(attr)
- source_explanation, source_result = self.visit(attr.value)
- explanation = "%s.%s" % (source_explanation, attr.attr)
- source = "__exprinfo_expr.%s" % (attr.attr,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_expr=source_result)
- except Exception:
- raise Failure(explanation)
- explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
- self.frame.repr(result),
- source_explanation, attr.attr)
- # Check if the attr is from an instance.
- source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
- source = source % (attr.attr,)
- co = self._compile(source)
- try:
- from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
- except Exception:
- from_instance = True
- if from_instance:
- rep = self.frame.repr(result)
- pattern = "%s\n{%s = %s\n}"
- explanation = pattern % (rep, rep, explanation)
- return explanation, result
-
- def visit_Assert(self, assrt):
- test_explanation, test_result = self.visit(assrt.test)
- if test_explanation.startswith("False\n{False =") and \
- test_explanation.endswith("\n"):
- test_explanation = test_explanation[15:-2]
- explanation = "assert %s" % (test_explanation,)
- if not test_result:
- try:
- raise BuiltinAssertionError
- except Exception:
- raise Failure(explanation)
- return explanation, test_result
-
- def visit_Assign(self, assign):
- value_explanation, value_result = self.visit(assign.value)
- explanation = "... = %s" % (value_explanation,)
- name = ast.Name("__exprinfo_expr", ast.Load(),
- lineno=assign.value.lineno,
- col_offset=assign.value.col_offset)
- new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
- col_offset=assign.col_offset)
- mod = ast.Module([new_assign])
- co = self._compile(mod, "exec")
- try:
- self.frame.exec_(co, __exprinfo_expr=value_result)
- except Exception:
- raise Failure(explanation)
- return explanation, value_result
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/contrib/python/py/py/_code/_assertionold.py b/contrib/python/py/py/_code/_assertionold.py
index f9d8965538..1bb70a875d 100644
--- a/contrib/python/py/py/_code/_assertionold.py
+++ b/contrib/python/py/py/_code/_assertionold.py
@@ -1,556 +1,556 @@
-import py
-import sys, inspect
-from compiler import parse, ast, pycodegen
-from py._code.assertion import BuiltinAssertionError, _format_explanation
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
import types
-
-passthroughex = py.builtin._sysex
-
-class Failure:
- def __init__(self, node):
- self.exc, self.value, self.tb = sys.exc_info()
- self.node = node
-
-class View(object):
- """View base class.
-
- If C is a subclass of View, then C(x) creates a proxy object around
- the object x. The actual class of the proxy is not C in general,
- but a *subclass* of C determined by the rules below. To avoid confusion
- we call view class the class of the proxy (a subclass of C, so of View)
- and object class the class of x.
-
- Attributes and methods not found in the proxy are automatically read on x.
- Other operations like setting attributes are performed on the proxy, as
- determined by its view class. The object x is available from the proxy
- as its __obj__ attribute.
-
- The view class selection is determined by the __view__ tuples and the
- optional __viewkey__ method. By default, the selected view class is the
- most specific subclass of C whose __view__ mentions the class of x.
- If no such subclass is found, the search proceeds with the parent
- object classes. For example, C(True) will first look for a subclass
- of C with __view__ = (..., bool, ...) and only if it doesn't find any
- look for one with __view__ = (..., int, ...), and then ..., object,...
- If everything fails the class C itself is considered to be the default.
-
- Alternatively, the view class selection can be driven by another aspect
- of the object x, instead of the class of x, by overriding __viewkey__.
- See last example at the end of this module.
- """
-
- _viewcache = {}
- __view__ = ()
-
- def __new__(rootclass, obj, *args, **kwds):
- self = object.__new__(rootclass)
- self.__obj__ = obj
- self.__rootclass__ = rootclass
- key = self.__viewkey__()
- try:
- self.__class__ = self._viewcache[key]
- except KeyError:
- self.__class__ = self._selectsubclass(key)
- return self
-
- def __getattr__(self, attr):
- # attributes not found in the normal hierarchy rooted on View
- # are looked up in the object's real class
- return getattr(self.__obj__, attr)
-
- def __viewkey__(self):
- return self.__obj__.__class__
-
- def __matchkey__(self, key, subclasses):
- if inspect.isclass(key):
- keys = inspect.getmro(key)
- else:
- keys = [key]
- for key in keys:
- result = [C for C in subclasses if key in C.__view__]
- if result:
- return result
- return []
-
- def _selectsubclass(self, key):
- subclasses = list(enumsubclasses(self.__rootclass__))
- for C in subclasses:
- if not isinstance(C.__view__, tuple):
- C.__view__ = (C.__view__,)
- choices = self.__matchkey__(key, subclasses)
- if not choices:
- return self.__rootclass__
- elif len(choices) == 1:
- return choices[0]
- else:
- # combine the multiple choices
- return type('?', tuple(choices), {})
-
- def __repr__(self):
- return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
-
-
-def enumsubclasses(cls):
- for subcls in cls.__subclasses__():
- for subsubclass in enumsubclasses(subcls):
- yield subsubclass
- yield cls
-
-
-class Interpretable(View):
- """A parse tree node with a few extra methods."""
- explanation = None
-
- def is_builtin(self, frame):
- return False
-
- def eval(self, frame):
- # fall-back for unknown expression nodes
- try:
- expr = ast.Expression(self.__obj__)
- expr.filename = '<eval>'
- self.__obj__.filename = '<eval>'
- co = pycodegen.ExpressionCodeGenerator(expr).getCode()
- result = frame.eval(co)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- self.result = result
- self.explanation = self.explanation or frame.repr(self.result)
-
- def run(self, frame):
- # fall-back for unknown statement nodes
- try:
- expr = ast.Module(None, ast.Stmt([self.__obj__]))
- expr.filename = '<run>'
- co = pycodegen.ModuleCodeGenerator(expr).getCode()
- frame.exec_(co)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- def nice_explanation(self):
- return _format_explanation(self.explanation)
-
-
-class Name(Interpretable):
- __view__ = ast.Name
-
- def is_local(self, frame):
- source = '%r in locals() is not globals()' % self.name
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def is_global(self, frame):
- source = '%r in globals()' % self.name
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def is_builtin(self, frame):
- source = '%r not in locals() and %r not in globals()' % (
- self.name, self.name)
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def eval(self, frame):
- super(Name, self).eval(frame)
- if not self.is_local(frame):
- self.explanation = self.name
-
-class Compare(Interpretable):
- __view__ = ast.Compare
-
- def eval(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- for operation, expr2 in self.ops:
- if hasattr(self, 'result'):
- # shortcutting in chained expressions
- if not frame.is_true(self.result):
- break
- expr2 = Interpretable(expr2)
- expr2.eval(frame)
- self.explanation = "%s %s %s" % (
- expr.explanation, operation, expr2.explanation)
- source = "__exprinfo_left %s __exprinfo_right" % operation
- try:
- self.result = frame.eval(source,
- __exprinfo_left=expr.result,
- __exprinfo_right=expr2.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- expr = expr2
-
-class And(Interpretable):
- __view__ = ast.And
-
- def eval(self, frame):
- explanations = []
- for expr in self.nodes:
- expr = Interpretable(expr)
- expr.eval(frame)
- explanations.append(expr.explanation)
- self.result = expr.result
- if not frame.is_true(expr.result):
- break
- self.explanation = '(' + ' and '.join(explanations) + ')'
-
-class Or(Interpretable):
- __view__ = ast.Or
-
- def eval(self, frame):
- explanations = []
- for expr in self.nodes:
- expr = Interpretable(expr)
- expr.eval(frame)
- explanations.append(expr.explanation)
- self.result = expr.result
- if frame.is_true(expr.result):
- break
- self.explanation = '(' + ' or '.join(explanations) + ')'
-
-
-# == Unary operations ==
-keepalive = []
-for astclass, astpattern in {
- ast.Not : 'not __exprinfo_expr',
- ast.Invert : '(~__exprinfo_expr)',
- }.items():
-
- class UnaryArith(Interpretable):
- __view__ = astclass
-
- def eval(self, frame, astpattern=astpattern):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.explanation = astpattern.replace('__exprinfo_expr',
- expr.explanation)
- try:
- self.result = frame.eval(astpattern,
- __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- keepalive.append(UnaryArith)
-
-# == Binary operations ==
-for astclass, astpattern in {
- ast.Add : '(__exprinfo_left + __exprinfo_right)',
- ast.Sub : '(__exprinfo_left - __exprinfo_right)',
- ast.Mul : '(__exprinfo_left * __exprinfo_right)',
- ast.Div : '(__exprinfo_left / __exprinfo_right)',
- ast.Mod : '(__exprinfo_left % __exprinfo_right)',
- ast.Power : '(__exprinfo_left ** __exprinfo_right)',
- }.items():
-
- class BinaryArith(Interpretable):
- __view__ = astclass
-
- def eval(self, frame, astpattern=astpattern):
- left = Interpretable(self.left)
- left.eval(frame)
- right = Interpretable(self.right)
- right.eval(frame)
- self.explanation = (astpattern
- .replace('__exprinfo_left', left .explanation)
- .replace('__exprinfo_right', right.explanation))
- try:
- self.result = frame.eval(astpattern,
- __exprinfo_left=left.result,
- __exprinfo_right=right.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- keepalive.append(BinaryArith)
-
-
-class CallFunc(Interpretable):
- __view__ = ast.CallFunc
-
- def is_bool(self, frame):
- source = 'isinstance(__exprinfo_value, bool)'
- try:
- return frame.is_true(frame.eval(source,
- __exprinfo_value=self.result))
- except passthroughex:
- raise
- except:
- return False
-
- def eval(self, frame):
- node = Interpretable(self.node)
- node.eval(frame)
- explanations = []
- vars = {'__exprinfo_fn': node.result}
- source = '__exprinfo_fn('
- for a in self.args:
- if isinstance(a, ast.Keyword):
- keyword = a.name
- a = a.expr
- else:
- keyword = None
- a = Interpretable(a)
- a.eval(frame)
- argname = '__exprinfo_%d' % len(vars)
- vars[argname] = a.result
- if keyword is None:
- source += argname + ','
- explanations.append(a.explanation)
- else:
- source += '%s=%s,' % (keyword, argname)
- explanations.append('%s=%s' % (keyword, a.explanation))
- if self.star_args:
- star_args = Interpretable(self.star_args)
- star_args.eval(frame)
- argname = '__exprinfo_star'
- vars[argname] = star_args.result
- source += '*' + argname + ','
- explanations.append('*' + star_args.explanation)
- if self.dstar_args:
- dstar_args = Interpretable(self.dstar_args)
- dstar_args.eval(frame)
- argname = '__exprinfo_kwds'
- vars[argname] = dstar_args.result
- source += '**' + argname + ','
- explanations.append('**' + dstar_args.explanation)
- self.explanation = "%s(%s)" % (
- node.explanation, ', '.join(explanations))
- if source.endswith(','):
- source = source[:-1]
- source += ')'
- try:
- self.result = frame.eval(source, **vars)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- if not node.is_builtin(frame) or not self.is_bool(frame):
- r = frame.repr(self.result)
- self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
-
-class Getattr(Interpretable):
- __view__ = ast.Getattr
-
- def eval(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- source = '__exprinfo_expr.%s' % self.attrname
- try:
- self.result = frame.eval(source, __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- self.explanation = '%s.%s' % (expr.explanation, self.attrname)
- # if the attribute comes from the instance, its value is interesting
- source = ('hasattr(__exprinfo_expr, "__dict__") and '
- '%r in __exprinfo_expr.__dict__' % self.attrname)
- try:
- from_instance = frame.is_true(
- frame.eval(source, __exprinfo_expr=expr.result))
- except passthroughex:
- raise
- except:
- from_instance = True
- if from_instance:
- r = frame.repr(self.result)
- self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
-
-# == Re-interpretation of full statements ==
-
-class Assert(Interpretable):
- __view__ = ast.Assert
-
- def run(self, frame):
- test = Interpretable(self.test)
- test.eval(frame)
- # simplify 'assert False where False = ...'
- if (test.explanation.startswith('False\n{False = ') and
- test.explanation.endswith('\n}')):
- test.explanation = test.explanation[15:-2]
- # print the result as 'assert <explanation>'
- self.result = test.result
- self.explanation = 'assert ' + test.explanation
- if not frame.is_true(test.result):
- try:
- raise BuiltinAssertionError
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
-class Assign(Interpretable):
- __view__ = ast.Assign
-
- def run(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.result = expr.result
- self.explanation = '... = ' + expr.explanation
- # fall-back-run the rest of the assignment
- ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
- mod = ast.Module(None, ast.Stmt([ass]))
- mod.filename = '<run>'
- co = pycodegen.ModuleCodeGenerator(mod).getCode()
- try:
- frame.exec_(co, __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
-class Discard(Interpretable):
- __view__ = ast.Discard
-
- def run(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.result = expr.result
- self.explanation = expr.explanation
-
-class Stmt(Interpretable):
- __view__ = ast.Stmt
-
- def run(self, frame):
- for stmt in self.nodes:
- stmt = Interpretable(stmt)
- stmt.run(frame)
-
-
-def report_failure(e):
- explanation = e.node.nice_explanation()
- if explanation:
- explanation = ", in: " + explanation
- else:
- explanation = ""
- sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
-
-def check(s, frame=None):
- if frame is None:
- frame = sys._getframe(1)
- frame = py.code.Frame(frame)
- expr = parse(s, 'eval')
- assert isinstance(expr, ast.Expression)
- node = Interpretable(expr.node)
- try:
- node.eval(frame)
- except passthroughex:
- raise
- except Failure:
- e = sys.exc_info()[1]
- report_failure(e)
- else:
- if not frame.is_true(node.result):
- sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
-
-
-###########################################################
-# API / Entry points
-# #########################################################
-
-def interpret(source, frame, should_fail=False):
- module = Interpretable(parse(source, 'exec').node)
- #print "got module", module
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = '<eval>'
+ self.__obj__.filename = '<eval>'
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert <explanation>'
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
if isinstance(frame, types.FrameType):
- frame = py.code.Frame(frame)
- try:
- module.run(frame)
- except Failure:
- e = sys.exc_info()[1]
- return getfailure(e)
- except passthroughex:
- raise
- except:
- import traceback
- traceback.print_exc()
- if should_fail:
- return ("(assertion failed, but when it was re-run for "
- "printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --nomagic)")
- else:
- return None
-
-def getmsg(excinfo):
- if isinstance(excinfo, tuple):
- excinfo = py.code.ExceptionInfo(excinfo)
- #frame, line = gettbline(tb)
- #frame = py.code.Frame(frame)
- #return interpret(line, frame)
-
- tb = excinfo.traceback[-1]
- source = str(tb.statement).strip()
- x = interpret(source, tb.frame, should_fail=True)
- if not isinstance(x, str):
- raise TypeError("interpret returned non-string %r" % (x,))
- return x
-
-def getfailure(e):
- explanation = e.node.nice_explanation()
- if str(e.value):
- lines = explanation.split('\n')
- lines[0] += " << %s" % (e.value,)
- explanation = '\n'.join(lines)
- text = "%s: %s" % (e.exc.__name__, explanation)
- if text.startswith('AssertionError: assert '):
- text = text[16:]
- return text
-
-def run(s, frame=None):
- if frame is None:
- frame = sys._getframe(1)
- frame = py.code.Frame(frame)
- module = Interpretable(parse(s, 'exec').node)
- try:
- module.run(frame)
- except Failure:
- e = sys.exc_info()[1]
- report_failure(e)
-
-
-if __name__ == '__main__':
- # example:
- def f():
- return 5
- def g():
- return 3
- def h(x):
- return 'never'
- check("f() * g() == 5")
- check("not f()")
- check("not (f() and g() or 0)")
- check("f() == g()")
- i = 4
- check("i == f()")
- check("len(f()) == 0")
- check("isinstance(2+3+4, float)")
-
- run("x = i")
- check("x == 5")
-
- run("assert not f(), 'oops'")
- run("a, b, c = 1, 2")
- run("a, b, c = f()")
-
- check("max([f(),g()]) == 4")
- check("'hello'[g()] == 'h'")
- run("'guk%d' % h(f())")
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/contrib/python/py/py/_code/_py2traceback.py b/contrib/python/py/py/_code/_py2traceback.py
index 8dd9541b45..d65e27cb73 100644
--- a/contrib/python/py/py/_code/_py2traceback.py
+++ b/contrib/python/py/py/_code/_py2traceback.py
@@ -1,79 +1,79 @@
-# copied from python-2.7.3's traceback.py
-# CHANGES:
-# - some_str is replaced, trying to create unicode strings
-#
-import types
-
-def format_exception_only(etype, value):
- """Format the exception part of a traceback.
-
- The arguments are the exception type and value such as given by
- sys.last_type and sys.last_value. The return value is a list of
- strings, each ending in a newline.
-
- Normally, the list contains a single string; however, for
- SyntaxError exceptions, it contains several lines that (when
- printed) display detailed information about where the syntax
- error occurred.
-
- The message indicating which exception occurred is always the last
- string in the list.
-
- """
-
- # An instance should not have a meaningful value parameter, but
- # sometimes does, particularly for string exceptions, such as
- # >>> raise string1, string2 # deprecated
- #
- # Clear these out first because issubtype(string1, SyntaxError)
- # would throw another exception and mask the original problem.
- if (isinstance(etype, BaseException) or
- isinstance(etype, types.InstanceType) or
- etype is None or type(etype) is str):
- return [_format_final_exc_line(etype, value)]
-
- stype = etype.__name__
-
- if not issubclass(etype, SyntaxError):
- return [_format_final_exc_line(stype, value)]
-
- # It was a syntax error; show exactly where the problem was found.
- lines = []
- try:
- msg, (filename, lineno, offset, badline) = value.args
- except Exception:
- pass
- else:
- filename = filename or "<string>"
- lines.append(' File "%s", line %d\n' % (filename, lineno))
- if badline is not None:
- lines.append(' %s\n' % badline.strip())
- if offset is not None:
- caretspace = badline.rstrip('\n')[:offset].lstrip()
- # non-space whitespace (likes tabs) must be kept for alignment
- caretspace = ((c.isspace() and c or ' ') for c in caretspace)
- # only three spaces to account for offset1 == pos 0
- lines.append(' %s^\n' % ''.join(caretspace))
- value = msg
-
- lines.append(_format_final_exc_line(stype, value))
- return lines
-
-def _format_final_exc_line(etype, value):
- """Return a list of a single line -- normal case for format_exception_only"""
- valuestr = _some_str(value)
- if value is None or not valuestr:
- line = "%s\n" % etype
- else:
- line = "%s: %s\n" % (etype, valuestr)
- return line
-
-def _some_str(value):
- try:
- return unicode(value)
- except Exception:
- try:
- return str(value)
- except Exception:
- pass
- return '<unprintable %s object>' % type(value).__name__
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ lines.append(' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/contrib/python/py/py/_code/assertion.py b/contrib/python/py/py/_code/assertion.py
index df529c274e..ff1643799c 100644
--- a/contrib/python/py/py/_code/assertion.py
+++ b/contrib/python/py/py/_code/assertion.py
@@ -1,90 +1,90 @@
-import sys
-import py
-
-BuiltinAssertionError = py.builtin.builtins.AssertionError
-
-_reprcompare = None # if set, will be called by assert reinterp for comparison ops
-
-def _format_explanation(explanation):
- """This formats an explanation
-
- Normally all embedded newlines are escaped, however there are
- three exceptions: \n{, \n} and \n~. The first two are intended
- cover nested explanations, see function and attribute explanations
- for examples (.visit_Call(), visit_Attribute()). The last one is
- for when one explanation needs to span multiple lines, e.g. when
- displaying diffs.
- """
- raw_lines = (explanation or '').split('\n')
- # escape newlines not followed by {, } and ~
- lines = [raw_lines[0]]
- for l in raw_lines[1:]:
- if l.startswith('{') or l.startswith('}') or l.startswith('~'):
- lines.append(l)
- else:
- lines[-1] += '\\n' + l
-
- result = lines[:1]
- stack = [0]
- stackcnt = [0]
- for line in lines[1:]:
- if line.startswith('{'):
- if stackcnt[-1]:
- s = 'and '
- else:
- s = 'where '
- stack.append(len(result))
- stackcnt[-1] += 1
- stackcnt.append(0)
- result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
- elif line.startswith('}'):
- assert line.startswith('}')
- stack.pop()
- stackcnt.pop()
- result[stack[-1]] += line[1:]
- else:
- assert line.startswith('~')
- result.append(' '*len(stack) + line[1:])
- assert len(stack) == 1
- return '\n'.join(result)
-
-
-class AssertionError(BuiltinAssertionError):
- def __init__(self, *args):
- BuiltinAssertionError.__init__(self, *args)
- if args:
- try:
- self.msg = str(args[0])
- except py.builtin._sysex:
- raise
- except:
- self.msg = "<[broken __repr__] %s at %0xd>" %(
- args[0].__class__, id(args[0]))
- else:
- f = py.code.Frame(sys._getframe(1))
- try:
- source = f.code.fullsource
- if source is not None:
- try:
- source = source.getstatement(f.lineno, assertion=True)
- except IndexError:
- source = None
- else:
- source = str(source.deindent()).strip()
- except py.error.ENOENT:
- source = None
- # this can also occur during reinterpretation, when the
- # co_filename is set to "<run>".
- if source:
- self.msg = reinterpret(source, f, should_fail=True)
- else:
- self.msg = "<could not determine information>"
- if not self.args:
- self.args = (self.msg,)
-
-if sys.version_info > (3, 0):
- AssertionError.__module__ = "builtins"
- reinterpret_old = "old reinterpretation not available for py3"
-else:
- from py._code._assertionold import interpret as reinterpret_old
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
from py._code._assertionnew import interpret as reinterpret
diff --git a/contrib/python/py/py/_code/code.py b/contrib/python/py/py/_code/code.py
index a3170c4376..dad796283f 100644
--- a/contrib/python/py/py/_code/code.py
+++ b/contrib/python/py/py/_code/code.py
@@ -1,796 +1,796 @@
-import py
-import sys
+import py
+import sys
from inspect import CO_VARARGS, CO_VARKEYWORDS, isclass
-
-builtin_repr = repr
-
-reprlib = py.builtin._tryimport('repr', 'reprlib')
-
-if sys.version_info[0] >= 3:
- from traceback import format_exception_only
-else:
- from py._code._py2traceback import format_exception_only
-
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from py._code._py2traceback import format_exception_only
+
import traceback
-class Code(object):
- """ wrapper around Python code objects """
- def __init__(self, rawcode):
- if not hasattr(rawcode, "co_filename"):
- rawcode = py.code.getrawcode(rawcode)
- try:
- self.filename = rawcode.co_filename
- self.firstlineno = rawcode.co_firstlineno - 1
- self.name = rawcode.co_name
- except AttributeError:
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = py.code.getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
raise TypeError("not a code object: %r" % (rawcode,))
- self.raw = rawcode
-
- def __eq__(self, other):
- return self.raw == other.raw
-
- def __ne__(self, other):
- return not self == other
-
- @property
- def path(self):
- """ return a path object pointing to source code (note that it
- might not point to an actually existing file). """
- p = py.path.local(self.raw.co_filename)
- # maybe don't try this checking
- if not p.check():
- # XXX maybe try harder like the weird logic
- # in the standard lib [linecache.updatecache] does?
- p = self.raw.co_filename
- return p
-
- @property
- def fullsource(self):
- """ return a py.code.Source object for the full source file of the code
- """
- from py._code import source
- full, _ = source.findsource(self.raw)
- return full
-
- def source(self):
- """ return a py.code.Source object for the code object's source only
- """
- # return source only for that part of code
- return py.code.Source(self.raw)
-
- def getargs(self, var=False):
- """ return a tuple with the argument names for the code object
-
- if 'var' is set True also return the names of the variable and
- keyword arguments when present
- """
- # handfull shortcut for getting args
- raw = self.raw
- argcount = raw.co_argcount
- if var:
- argcount += raw.co_flags & CO_VARARGS
- argcount += raw.co_flags & CO_VARKEYWORDS
- return raw.co_varnames[:argcount]
-
-class Frame(object):
- """Wrapper around a Python frame holding f_locals and f_globals
- in which expressions can be evaluated."""
-
- def __init__(self, frame):
- self.lineno = frame.f_lineno - 1
- self.f_globals = frame.f_globals
- self.f_locals = frame.f_locals
- self.raw = frame
- self.code = py.code.Code(frame.f_code)
-
- @property
- def statement(self):
- """ statement this frame is at """
- if self.code.fullsource is None:
- return py.code.Source("")
- return self.code.fullsource.getstatement(self.lineno)
-
- def eval(self, code, **vars):
- """ evaluate 'code' in the frame
-
- 'vars' are optional additional local variables
-
- returns the result of the evaluation
- """
- f_locals = self.f_locals.copy()
- f_locals.update(vars)
- return eval(code, self.f_globals, f_locals)
-
- def exec_(self, code, **vars):
- """ exec 'code' in the frame
-
- 'vars' are optiona; additional local variables
- """
- f_locals = self.f_locals.copy()
- f_locals.update(vars)
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a py.code.Source object for the full source file of the code
+ """
+ from py._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a py.code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ return py.code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = py.code.Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ if self.code.fullsource is None:
+ return py.code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
py.builtin.exec_(code, self.f_globals, f_locals)
-
- def repr(self, object):
- """ return a 'safe' (non-recursive, one-line) string repr for 'object'
- """
- return py.io.saferepr(object)
-
- def is_true(self, object):
- return object
-
- def getargs(self, var=False):
- """ return a list of tuples (name, value) for all arguments
-
- if 'var' is set True also include the variable and keyword
- arguments when present
- """
- retval = []
- for arg in self.code.getargs(var):
- try:
- retval.append((arg, self.f_locals[arg]))
- except KeyError:
- pass # this can occur when using Psyco
- return retval
-
-
-class TracebackEntry(object):
- """ a single entry in a traceback """
-
- _repr_style = None
- exprinfo = None
-
- def __init__(self, rawentry):
- self._rawentry = rawentry
- self.lineno = rawentry.tb_lineno - 1
-
- def set_repr_style(self, mode):
- assert mode in ("short", "long")
- self._repr_style = mode
-
- @property
- def frame(self):
- return py.code.Frame(self._rawentry.tb_frame)
-
- @property
- def relline(self):
- return self.lineno - self.frame.code.firstlineno
-
- def __repr__(self):
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry):
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ return py.code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno+1)
-
- @property
- def statement(self):
- """ py.code.Source object for the current statement """
- source = self.frame.code.fullsource
- return source.getstatement(self.lineno)
-
- @property
- def path(self):
- """ path to the source code """
- return self.frame.code.path
-
- def getlocals(self):
- return self.frame.f_locals
- locals = property(getlocals, None, None, "locals of underlaying frame")
-
- def reinterpret(self):
- """Reinterpret the failing statement and returns a detailed information
- about what operations are performed."""
- if self.exprinfo is None:
- source = str(self.statement).strip()
- x = py.code._reinterpret(source, self.frame, should_fail=True)
- if not isinstance(x, str):
- raise TypeError("interpret returned non-string %r" % (x,))
- self.exprinfo = x
- return self.exprinfo
-
- def getfirstlinesource(self):
- # on Jython this firstlineno can be -1 apparently
- return max(self.frame.code.firstlineno, 0)
-
- def getsource(self, astcache=None):
- """ return failing source code. """
- # we use the passed in astcache to not reparse asttrees
- # within exception info printing
- from py._code.source import getstatementrange_ast
- source = self.frame.code.fullsource
- if source is None:
- return None
- key = astnode = None
- if astcache is not None:
- key = self.frame.code.path
- if key is not None:
- astnode = astcache.get(key, None)
- start = self.getfirstlinesource()
- try:
- astnode, _, end = getstatementrange_ast(self.lineno, source,
- astnode=astnode)
- except SyntaxError:
- end = self.lineno + 1
- else:
- if key is not None:
- astcache[key] = astnode
- return source[start:end]
-
- source = property(getsource)
-
- def ishidden(self):
- """ return True if the current frame has a var __tracebackhide__
- resolving to True
-
- mostly for internal use
- """
- try:
- return self.frame.f_locals['__tracebackhide__']
- except KeyError:
- try:
- return self.frame.f_globals['__tracebackhide__']
- except KeyError:
- return False
-
- def __str__(self):
- try:
- fn = str(self.path)
- except py.error.Error:
- fn = '???'
- name = self.frame.code.name
- try:
- line = str(self.statement).lstrip()
- except KeyboardInterrupt:
- raise
- except:
- line = "???"
+
+ @property
+ def statement(self):
+ """ py.code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from py._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ mostly for internal use
+ """
+ try:
+ return self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ return self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
return " File %r:%d in %s\n %s\n" % (fn, self.lineno+1, name, line)
-
- def name(self):
- return self.frame.code.raw.co_name
- name = property(name, None, None, "co_name of underlaying code")
-
-
-class Traceback(list):
- """ Traceback objects encapsulate and offer higher level
- access to Traceback entries.
- """
- Entry = TracebackEntry
-
- def __init__(self, tb):
- """ initialize from given python traceback object. """
- if hasattr(tb, 'tb_next'):
- def f(cur):
- while cur is not None:
- yield self.Entry(cur)
- cur = cur.tb_next
- list.__init__(self, f(tb))
- else:
- list.__init__(self, tb)
-
- def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
- """ return a Traceback instance wrapping part of this Traceback
-
- by provding any combination of path, lineno and firstlineno, the
- first frame to start the to-be-returned traceback is determined
-
- this allows cutting the first part of a Traceback instance e.g.
- for formatting reasons (removing some uninteresting bits that deal
- with handling of the exception/traceback)
- """
- for x in self:
- code = x.frame.code
- codepath = code.path
- if ((path is None or codepath == path) and
- (excludepath is None or not hasattr(codepath, 'relto') or
- not codepath.relto(excludepath)) and
- (lineno is None or x.lineno == lineno) and
- (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
- return Traceback(x._rawentry)
- return self
-
- def __getitem__(self, key):
- val = super(Traceback, self).__getitem__(key)
- if isinstance(key, type(slice(0))):
- val = self.__class__(val)
- return val
-
- def filter(self, fn=lambda x: not x.ishidden()):
- """ return a Traceback instance with certain items removed
-
- fn is a function that gets a single argument, a TracebackItem
- instance, and should return True when the item should be added
- to the Traceback, False when not
-
- by default this removes all the TracebackItems which are hidden
- (see ishidden() above)
- """
- return Traceback(filter(fn, self))
-
- def getcrashentry(self):
- """ return last non-hidden traceback entry that lead
- to the exception of a traceback.
- """
- for i in range(-1, -len(self)-1, -1):
- entry = self[i]
- if not entry.ishidden():
- return entry
- return self[-1]
-
- def recursionindex(self):
- """ return the index of the frame/TracebackItem where recursion
- originates if appropriate, None if no recursion occurred
- """
- cache = {}
- for i, entry in enumerate(self):
- # id for the code.raw is needed to work around
- # the strange metaprogramming in the decorator lib from pypi
- # which generates code objects that have hash/value equality
- #XXX needs a test
- key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
- #print "checking for recursion at", key
- l = cache.setdefault(key, [])
- if l:
- f = entry.frame
- loc = f.f_locals
- for otherloc in l:
- if f.is_true(f.eval(co_equal,
- __recursioncache_locals_1=loc,
- __recursioncache_locals_2=otherloc)):
- return i
- l.append(entry.frame.f_locals)
- return None
-
-co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
- '?', 'eval')
-
-class ExceptionInfo(object):
- """ wraps sys.exc_info() objects and offers
- help for navigating the traceback.
- """
- _striptext = ''
- def __init__(self, tup=None, exprinfo=None):
- if tup is None:
- tup = sys.exc_info()
- if exprinfo is None and isinstance(tup[1], AssertionError):
- exprinfo = getattr(tup[1], 'msg', None)
- if exprinfo is None:
- exprinfo = str(tup[1])
- if exprinfo and exprinfo.startswith('assert '):
- self._striptext = 'AssertionError: '
- self._excinfo = tup
- #: the exception class
- self.type = tup[0]
- #: the exception instance
- self.value = tup[1]
- #: the exception raw traceback
- self.tb = tup[2]
- #: the exception type name
- self.typename = self.type.__name__
- #: the exception traceback (py.code.Traceback instance)
- self.traceback = py.code.Traceback(self.tb)
-
- def __repr__(self):
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+
+ def __init__(self, tb):
+ """ initialize from given python traceback object. """
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackItem
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackItems which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self))
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackItem where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = str(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (py.code.Traceback instance)
+ self.traceback = py.code.Traceback(self.tb)
+
+ def __repr__(self):
return "<ExceptionInfo %s tblen=%d>" % (
self.typename, len(self.traceback))
-
- def exconly(self, tryshort=False):
- """ return the exception as a string
-
- when 'tryshort' resolves to True, and the exception is a
- py.code._AssertionError, only the actual exception part of
- the exception representation is returned (so 'AssertionError: ' is
- removed from the beginning)
- """
- lines = format_exception_only(self.type, self.value)
- text = ''.join(lines)
- text = text.rstrip()
- if tryshort:
- if text.startswith(self._striptext):
- text = text[len(self._striptext):]
- return text
-
- def errisinstance(self, exc):
- """ return True if the exception is an instance of exc """
- return isinstance(self.value, exc)
-
- def _getreprcrash(self):
- exconly = self.exconly(tryshort=True)
- entry = self.traceback.getcrashentry()
- path, lineno = entry.frame.code.raw.co_filename, entry.lineno
- return ReprFileLocation(path, lineno+1, exconly)
-
- def getrepr(self, showlocals=False, style="long",
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ py.code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
abspath=False, tbfilter=True, funcargs=False):
- """ return str()able representation of this exception info.
- showlocals: show locals per traceback entry
- style: long|short|no|native traceback style
- tbfilter: hide entries (where __tracebackhide__ is true)
-
- in case of style==native, tbfilter and showlocals is ignored.
- """
- if style == 'native':
- return ReprExceptionInfo(ReprTracebackNative(
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
traceback.format_exception(
- self.type,
- self.value,
- self.traceback[0]._rawentry,
- )), self._getreprcrash())
-
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
fmt = FormattedExcinfo(
showlocals=showlocals, style=style,
- abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
- return fmt.repr_excinfo(self)
-
- def __str__(self):
- entry = self.traceback[-1]
- loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
- return str(loc)
-
- def __unicode__(self):
- entry = self.traceback[-1]
- loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
return loc.__unicode__()
-
-
-class FormattedExcinfo(object):
- """ presenting information about failing Functions and Generators. """
- # for traceback entries
- flow_marker = ">"
- fail_marker = "E"
-
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
def __init__(self, showlocals=False, style="long",
abspath=True, tbfilter=True, funcargs=False):
- self.showlocals = showlocals
- self.style = style
- self.tbfilter = tbfilter
- self.funcargs = funcargs
- self.abspath = abspath
- self.astcache = {}
-
- def _getindent(self, source):
- # figure out indent for given source
- try:
- s = str(source.getstatement(len(source)-1))
- except KeyboardInterrupt:
- raise
- except:
- try:
- s = str(source[-1])
- except KeyboardInterrupt:
- raise
- except:
- return 0
- return 4 + (len(s) - len(s.lstrip()))
-
- def _getentrysource(self, entry):
- source = entry.getsource(self.astcache)
- if source is not None:
- source = source.deindent()
- return source
-
- def _saferepr(self, obj):
- return py.io.saferepr(obj)
-
- def repr_args(self, entry):
- if self.funcargs:
- args = []
- for argname, argvalue in entry.frame.getargs(var=True):
- args.append((argname, self._saferepr(argvalue)))
- return ReprFuncArgs(args)
-
- def get_source(self, source, line_index=-1, excinfo=None, short=False):
- """ return formatted and marked up source lines. """
- lines = []
- if source is None or line_index >= len(source.lines):
- source = py.code.Source("???")
- line_index = 0
- if line_index < 0:
- line_index += len(source)
- space_prefix = " "
- if short:
- lines.append(space_prefix + source.lines[line_index].strip())
- else:
- for line in source.lines[:line_index]:
- lines.append(space_prefix + line)
- lines.append(self.flow_marker + " " + source.lines[line_index])
- for line in source.lines[line_index+1:]:
- lines.append(space_prefix + line)
- if excinfo is not None:
- indent = 4 if short else self._getindent(source)
- lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
- return lines
-
- def get_exconly(self, excinfo, indent=4, markall=False):
- lines = []
- indent = " " * indent
- # get the real exception information out
- exlines = excinfo.exconly(tryshort=True).split('\n')
- failindent = self.fail_marker + indent[1:]
- for line in exlines:
- lines.append(failindent + line)
- if not markall:
- failindent = indent
- return lines
-
- def repr_locals(self, locals):
- if self.showlocals:
- lines = []
- keys = [loc for loc in locals if loc[0] != "@"]
- keys.sort()
- for name in keys:
- value = locals[name]
- if name == '__builtins__':
- lines.append("__builtins__ = <builtins>")
- else:
- # This formatting could all be handled by the
- # _repr() function, which is only reprlib.Repr in
- # disguise, so is very configurable.
- str_repr = self._saferepr(value)
- #if len(str_repr) < 70 or not isinstance(value,
- # (list, tuple, dict)):
- lines.append("%-10s = %s" %(name, str_repr))
- #else:
- # self._line("%-10s =\\" % (name,))
- # # XXX
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = py.code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
# pprint.pprint(value, stream=self.excinfowriter)
- return ReprLocals(lines)
-
- def repr_traceback_entry(self, entry, excinfo=None):
- source = self._getentrysource(entry)
- if source is None:
- source = py.code.Source("???")
- line_index = 0
- else:
- # entry.getfirstlinesource() can be -1, should be 0 on jython
- line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
-
- lines = []
- style = entry._repr_style
- if style is None:
- style = self.style
- if style in ("short", "long"):
- short = style == "short"
- reprargs = self.repr_args(entry) if not short else None
- s = self.get_source(source, line_index, excinfo, short=short)
- lines.extend(s)
- if short:
- message = "in %s" %(entry.name)
- else:
- message = excinfo and excinfo.typename or ""
- path = self._makepath(entry.path)
- filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
- localsrepr = None
- if not short:
- localsrepr = self.repr_locals(entry.locals)
- return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
- if excinfo:
- lines.extend(self.get_exconly(excinfo, indent=4))
- return ReprEntry(lines, None, None, None, style)
-
- def _makepath(self, path):
- if not self.abspath:
- try:
- np = py.path.local().bestrelpath(path)
- except OSError:
- return path
- if len(np) < len(str(path)):
- path = np
- return path
-
- def repr_traceback(self, excinfo):
- traceback = excinfo.traceback
- if self.tbfilter:
- traceback = traceback.filter()
- recursionindex = None
- if excinfo.errisinstance(RuntimeError):
- if "maximum recursion depth exceeded" in str(excinfo.value):
- recursionindex = traceback.recursionindex()
- last = traceback[-1]
- entries = []
- extraline = None
- for index, entry in enumerate(traceback):
- einfo = (last == entry) and excinfo or None
- reprentry = self.repr_traceback_entry(entry, einfo)
- entries.append(reprentry)
- if index == recursionindex:
- extraline = "!!! Recursion detected (same locals & position)"
- break
- return ReprTraceback(entries, extraline, style=self.style)
-
- def repr_excinfo(self, excinfo):
- reprtraceback = self.repr_traceback(excinfo)
- reprcrash = excinfo._getreprcrash()
- return ReprExceptionInfo(reprtraceback, reprcrash)
-
-class TerminalRepr:
- def __str__(self):
- s = self.__unicode__()
- if sys.version_info[0] < 3:
- s = s.encode('utf-8')
- return s
-
- def __unicode__(self):
- # FYI this is called from pytest-xdist's serialization of exception
- # information.
- io = py.io.TextIO()
- tw = py.io.TerminalWriter(file=io)
- self.toterminal(tw)
- return io.getvalue().strip()
-
- def __repr__(self):
- return "<%s instance at %0x>" %(self.__class__, id(self))
-
-
-class ReprExceptionInfo(TerminalRepr):
- def __init__(self, reprtraceback, reprcrash):
- self.reprtraceback = reprtraceback
- self.reprcrash = reprcrash
- self.sections = []
-
- def addsection(self, name, content, sep="-"):
- self.sections.append((name, content, sep))
-
- def toterminal(self, tw):
- self.reprtraceback.toterminal(tw)
- for name, content, sep in self.sections:
- tw.sep(sep, name)
- tw.line(content)
-
-class ReprTraceback(TerminalRepr):
- entrysep = "_ "
-
- def __init__(self, reprentries, extraline, style):
- self.reprentries = reprentries
- self.extraline = extraline
- self.style = style
-
- def toterminal(self, tw):
- # the entries might have different styles
- last_style = None
- for i, entry in enumerate(self.reprentries):
- if entry.style == "long":
- tw.line("")
- entry.toterminal(tw)
- if i < len(self.reprentries) - 1:
- next_entry = self.reprentries[i+1]
- if entry.style == "long" or \
- entry.style == "short" and next_entry.style == "long":
- tw.sep(self.entrysep)
-
- if self.extraline:
- tw.line(self.extraline)
-
-class ReprTracebackNative(ReprTraceback):
- def __init__(self, tblines):
- self.style = "native"
- self.reprentries = [ReprEntryNative(tblines)]
- self.extraline = None
-
-class ReprEntryNative(TerminalRepr):
- style = "native"
-
- def __init__(self, tblines):
- self.lines = tblines
-
- def toterminal(self, tw):
- tw.write("".join(self.lines))
-
-class ReprEntry(TerminalRepr):
- localssep = "_ "
-
- def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
- self.lines = lines
- self.reprfuncargs = reprfuncargs
- self.reprlocals = reprlocals
- self.reprfileloc = filelocrepr
- self.style = style
-
- def toterminal(self, tw):
- if self.style == "short":
- self.reprfileloc.toterminal(tw)
- for line in self.lines:
- red = line.startswith("E ")
- tw.line(line, bold=True, red=red)
- #tw.line("")
- return
- if self.reprfuncargs:
- self.reprfuncargs.toterminal(tw)
- for line in self.lines:
- red = line.startswith("E ")
- tw.line(line, bold=True, red=red)
- if self.reprlocals:
- #tw.sep(self.localssep, "Locals")
- tw.line("")
- self.reprlocals.toterminal(tw)
- if self.reprfileloc:
- if self.lines:
- tw.line("")
- self.reprfileloc.toterminal(tw)
-
- def __str__(self):
- return "%s\n%s\n%s" % ("\n".join(self.lines),
- self.reprlocals,
- self.reprfileloc)
-
-class ReprFileLocation(TerminalRepr):
- def __init__(self, path, lineno, message):
- self.path = str(path)
- self.lineno = lineno
- self.message = message
-
- def toterminal(self, tw):
- # filename and lineno output for each entry,
- # using an output format that most editors unterstand
- msg = self.message
- i = msg.find("\n")
- if i != -1:
- msg = msg[:i]
- tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
-
-class ReprLocals(TerminalRepr):
- def __init__(self, lines):
- self.lines = lines
-
- def toterminal(self, tw):
- for line in self.lines:
- tw.line(line)
-
-class ReprFuncArgs(TerminalRepr):
- def __init__(self, args):
- self.args = args
-
- def toterminal(self, tw):
- if self.args:
- linesofar = ""
- for name, value in self.args:
- ns = "%s = %s" %(name, value)
- if len(ns) + len(linesofar) + 2 > tw.fullwidth:
- if linesofar:
- tw.line(linesofar)
- linesofar = ns
- else:
- if linesofar:
- linesofar += ", " + ns
- else:
- linesofar = ns
- if linesofar:
- tw.line(linesofar)
- tw.line("")
-
-
-
-oldbuiltins = {}
-
-def patch_builtins(assertion=True, compile=True):
- """ put compile and AssertionError builtins to Python's builtins. """
- if assertion:
- from py._code import assertion
- l = oldbuiltins.setdefault('AssertionError', [])
- l.append(py.builtin.builtins.AssertionError)
- py.builtin.builtins.AssertionError = assertion.AssertionError
- if compile:
- l = oldbuiltins.setdefault('compile', [])
- l.append(py.builtin.builtins.compile)
- py.builtin.builtins.compile = py.code.compile
-
-def unpatch_builtins(assertion=True, compile=True):
- """ remove compile and AssertionError builtins from Python builtins. """
- if assertion:
- py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
- if compile:
- py.builtin.builtins.compile = oldbuiltins['compile'].pop()
-
-def getrawcode(obj, trycall=True):
- """ return code object for given function. """
- try:
- return obj.__code__
- except AttributeError:
- obj = getattr(obj, 'im_func', obj)
- obj = getattr(obj, 'func_code', obj)
- obj = getattr(obj, 'f_code', obj)
- obj = getattr(obj, '__code__', obj)
- if trycall and not hasattr(obj, 'co_firstlineno'):
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ source = self._getentrysource(entry)
+ if source is None:
+ source = py.code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if excinfo.errisinstance(RuntimeError):
+ if "maximum recursion depth exceeded" in str(excinfo.value):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def repr_excinfo(self, excinfo):
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ReprExceptionInfo(TerminalRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ last_style = None
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
+ if compile:
+ l = oldbuiltins.setdefault('compile', [])
+ l.append(py.builtin.builtins.compile)
+ py.builtin.builtins.compile = py.code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+ """ remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+ if compile:
+ py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
if hasattr(obj, '__call__') and not isclass(obj):
- x = getrawcode(obj.__call__, trycall=False)
- if hasattr(x, 'co_firstlineno'):
- return x
- return obj
-
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
diff --git a/contrib/python/py/py/_code/source.py b/contrib/python/py/py/_code/source.py
index 63004c814e..7fc7b23a96 100644
--- a/contrib/python/py/py/_code/source.py
+++ b/contrib/python/py/py/_code/source.py
@@ -1,410 +1,410 @@
-from __future__ import generators
-
-from bisect import bisect_right
-import sys
-import inspect, tokenize
-import py
-from types import ModuleType
-cpy_compile = compile
-
-try:
- import _ast
- from _ast import PyCF_ONLY_AST as _AST_FLAG
-except ImportError:
- _AST_FLAG = 0
- _ast = None
-
-
-class Source(object):
- """ a immutable object holding a source code fragment,
- possibly deindenting it.
- """
- _compilecounter = 0
- def __init__(self, *parts, **kwargs):
- self.lines = lines = []
- de = kwargs.get('deindent', True)
- rstrip = kwargs.get('rstrip', True)
- for part in parts:
- if not part:
- partlines = []
- if isinstance(part, Source):
- partlines = part.lines
- elif isinstance(part, (tuple, list)):
- partlines = [x.rstrip("\n") for x in part]
- elif isinstance(part, py.builtin._basestring):
- partlines = part.split('\n')
- if rstrip:
- while partlines:
- if partlines[-1].strip():
- break
- partlines.pop()
- else:
- partlines = getsource(part, deindent=de).lines
- if de:
- partlines = deindent(partlines)
- lines.extend(partlines)
-
- def __eq__(self, other):
- try:
- return self.lines == other.lines
- except AttributeError:
- if isinstance(other, str):
- return str(self) == other
- return False
-
- def __getitem__(self, key):
- if isinstance(key, int):
- return self.lines[key]
- else:
- if key.step not in (None, 1):
- raise IndexError("cannot slice a Source with a step")
- return self.__getslice__(key.start, key.stop)
-
- def __len__(self):
- return len(self.lines)
-
- def __getslice__(self, start, end):
- newsource = Source()
- newsource.lines = self.lines[start:end]
- return newsource
-
- def strip(self):
- """ return new source object with trailing
- and leading blank lines removed.
- """
- start, end = 0, len(self)
- while start < end and not self.lines[start].strip():
- start += 1
- while end > start and not self.lines[end-1].strip():
- end -= 1
- source = Source()
- source.lines[:] = self.lines[start:end]
- return source
-
- def putaround(self, before='', after='', indent=' ' * 4):
- """ return a copy of the source object with
- 'before' and 'after' wrapped around it.
- """
- before = Source(before)
- after = Source(after)
- newsource = Source()
- lines = [ (indent + line) for line in self.lines]
- newsource.lines = before.lines + lines + after.lines
- return newsource
-
- def indent(self, indent=' ' * 4):
- """ return a copy of the source object with
- all lines indented by the given indent-string.
- """
- newsource = Source()
- newsource.lines = [(indent+line) for line in self.lines]
- return newsource
-
- def getstatement(self, lineno, assertion=False):
- """ return Source statement which contains the
- given linenumber (counted from 0).
- """
- start, end = self.getstatementrange(lineno, assertion)
- return self[start:end]
-
- def getstatementrange(self, lineno, assertion=False):
- """ return (start, end) tuple which spans the minimal
- statement region which containing the given lineno.
- """
- if not (0 <= lineno < len(self)):
- raise IndexError("lineno out of range")
- ast, start, end = getstatementrange_ast(lineno, self)
- return start, end
-
- def deindent(self, offset=None):
- """ return a new source object deindented by offset.
- If offset is None then guess an indentation offset from
- the first non-blank line. Subsequent lines which have a
- lower indentation offset will be copied verbatim as
- they are assumed to be part of multilines.
- """
- # XXX maybe use the tokenizer to properly handle multiline
- # strings etc.pp?
- newsource = Source()
- newsource.lines[:] = deindent(self.lines, offset)
- return newsource
-
- def isparseable(self, deindent=True):
- """ return True if source is parseable, heuristically
- deindenting it by default.
- """
- try:
- import parser
- except ImportError:
- syntax_checker = lambda x: compile(x, 'asd', 'exec')
- else:
- syntax_checker = parser.suite
-
- if deindent:
- source = str(self.deindent())
- else:
- source = str(self)
- try:
- #compile(source+'\n', "x", "exec")
- syntax_checker(source+'\n')
- except KeyboardInterrupt:
- raise
- except Exception:
- return False
- else:
- return True
-
- def __str__(self):
- return "\n".join(self.lines)
-
- def compile(self, filename=None, mode='exec',
- flag=generators.compiler_flag,
- dont_inherit=0, _genframe=None):
- """ return compiled code object. if filename is None
- invent an artificial filename which displays
- the source/line position of the caller frame.
- """
- if not filename or py.path.local(filename).check(file=0):
- if _genframe is None:
- _genframe = sys._getframe(1) # the caller
- fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
- base = "<%d-codegen " % self._compilecounter
- self.__class__._compilecounter += 1
- if not filename:
- filename = base + '%s:%d>' % (fn, lineno)
- else:
- filename = base + '%r %s:%d>' % (filename, fn, lineno)
- source = "\n".join(self.lines) + '\n'
- try:
- co = cpy_compile(source, filename, mode, flag)
- except SyntaxError:
- ex = sys.exc_info()[1]
- # re-represent syntax errors from parsing python strings
- msglines = self.lines[:ex.lineno]
- if ex.offset:
- msglines.append(" "*ex.offset + '^')
- msglines.append("(code was compiled probably from here: %s)" % filename)
- newex = SyntaxError('\n'.join(msglines))
- newex.offset = ex.offset
- newex.lineno = ex.lineno
- newex.text = ex.text
- raise newex
- else:
- if flag & _AST_FLAG:
- return co
- lines = [(x + "\n") for x in self.lines]
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+from types import ModuleType
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ return self.__getslice__(key.start, key.stop)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getslice__(self, start, end):
+ newsource = Source()
+ newsource.lines = self.lines[start:end]
+ return newsource
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
import linecache
linecache.cache[filename] = (1, None, lines, filename)
- return co
-
-#
-# public API shortcut functions
-#
-
-def compile_(source, filename=None, mode='exec', flags=
- generators.compiler_flag, dont_inherit=0):
- """ compile the given source to a raw code object,
- and maintain an internal cache which allows later
- retrieval of the source code for the code object
- and any recursively created code objects.
- """
- if _ast is not None and isinstance(source, _ast.AST):
- # XXX should Source support having AST?
- return cpy_compile(source, filename, mode, flags, dont_inherit)
- _genframe = sys._getframe(1) # the caller
- s = Source(source)
- co = s.compile(filename, mode, flags, _genframe=_genframe)
- return co
-
-
-def getfslineno(obj):
- """ Return source location (path, lineno) for the given object.
- If the source cannot be determined return ("", -1)
- """
- try:
- code = py.code.Code(obj)
- except TypeError:
- try:
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ try:
+ code = py.code.Code(obj)
+ except TypeError:
+ try:
fn = (inspect.getsourcefile(obj) or
inspect.getfile(obj))
- except TypeError:
- return "", -1
-
- fspath = fn and py.path.local(fn) or None
- lineno = -1
- if fspath:
- try:
- _, lineno = findsource(obj)
- except IOError:
- pass
- else:
- fspath = code.path
- lineno = code.firstlineno
- assert isinstance(lineno, int)
- return fspath, lineno
-
-#
-# helper functions
-#
-
-def findsource(obj):
- try:
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
sourcelines, lineno = inspect.findsource(obj)
- except py.builtin._sysex:
- raise
- except:
- return None, -1
- source = Source()
- source.lines = [line.rstrip() for line in sourcelines]
- return source, lineno
-
-def getsource(obj, **kwargs):
- obj = py.code.getrawcode(obj)
- try:
- strsrc = inspect.getsource(obj)
- except IndentationError:
- strsrc = "\"Buggy python version consider upgrading, cannot get source\""
- assert isinstance(strsrc, str)
- return Source(strsrc, **kwargs)
-
-def deindent(lines, offset=None):
- if offset is None:
- for line in lines:
- line = line.expandtabs()
- s = line.lstrip()
- if s:
- offset = len(line)-len(s)
- break
- else:
- offset = 0
- if offset == 0:
- return list(lines)
- newlines = []
- def readline_generator(lines):
- for line in lines:
- yield line + '\n'
- while True:
- yield ''
-
- it = readline_generator(lines)
-
- try:
- for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
- if sline > len(lines):
- break # End of input reached
- if sline > len(newlines):
- line = lines[sline - 1].expandtabs()
- if line.lstrip() and line[:offset].isspace():
- line = line[offset:] # Deindent
- newlines.append(line)
-
- for i in range(sline, eline):
- # Don't deindent continuing lines of
- # multiline tokens (i.e. multiline strings)
- newlines.append(lines[i])
- except (IndentationError, tokenize.TokenError):
- pass
- # Add any lines we didn't see. E.g. if an exception was raised.
- newlines.extend(lines[len(newlines):])
- return newlines
-
-
-def get_statement_startend2(lineno, node):
- import ast
- # flatten all statements and except handlers into one lineno-list
- # AST's line numbers start indexing at 1
- l = []
- for x in ast.walk(node):
- if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
- l.append(x.lineno - 1)
- for name in "finalbody", "orelse":
- val = getattr(x, name, None)
- if val:
- # treat the finally/orelse part as its own statement
- l.append(val[0].lineno - 1 - 1)
- l.sort()
- insert_index = bisect_right(l, lineno)
- start = l[insert_index - 1]
- if insert_index >= len(l):
- end = None
- else:
- end = l[insert_index]
- return start, end
-
-
-def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
- if astnode is None:
- content = str(source)
- try:
- astnode = compile(content, "source", "exec", 1024) # 1024 for AST
- except ValueError:
- start, end = getstatementrange_old(lineno, source, assertion)
- return None, start, end
- start, end = get_statement_startend2(lineno, astnode)
- # we need to correct the end:
- # - ast-parsing strips comments
- # - there might be empty lines
- # - we might have lesser indented code blocks at the end
- if end is None:
- end = len(source.lines)
-
- if end > start + 1:
- # make sure we don't span differently indented code blocks
- # by using the BlockFinder helper used which inspect.getsource() uses itself
- block_finder = inspect.BlockFinder()
- # if we start with an indented line, put blockfinder to "started" mode
- block_finder.started = source.lines[start][0].isspace()
- it = ((x + "\n") for x in source.lines[start:end])
- try:
- for tok in tokenize.generate_tokens(lambda: next(it)):
- block_finder.tokeneater(*tok)
- except (inspect.EndOfBlock, IndentationError):
- end = block_finder.last + start
- except Exception:
- pass
-
- # the end might still point to a comment or empty line, correct it
- while end:
- line = source.lines[end - 1].lstrip()
- if line.startswith("#") or not line:
- end -= 1
- else:
- break
- return astnode, start, end
-
-
-def getstatementrange_old(lineno, source, assertion=False):
- """ return (start, end) tuple which spans the minimal
- statement region which containing the given lineno.
- raise an IndexError if no such statementrange can be found.
- """
- # XXX this logic is only used on python2.4 and below
- # 1. find the start of the statement
- from codeop import compile_command
- for start in range(lineno, -1, -1):
- if assertion:
- line = source.lines[start]
- # the following lines are not fully tested, change with care
- if 'super' in line and 'self' in line and '__init__' in line:
- raise IndexError("likely a subclass")
- if "assert" not in line and "raise" not in line:
- continue
- trylines = source.lines[start:lineno+1]
- # quick hack to prepare parsing an indented line with
- # compile_command() (which errors on "return" outside defs)
- trylines.insert(0, 'def xxx():')
- trysource = '\n '.join(trylines)
- # ^ space here
- try:
- compile_command(trysource)
- except (SyntaxError, OverflowError, ValueError):
- continue
-
- # 2. find the end of the statement
- for end in range(lineno+1, len(source)+1):
- trysource = source[start:end]
- if trysource.isparseable():
- return start, end
- raise SyntaxError("no valid source range around line %d " % (lineno,))
-
-
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+def getsource(obj, **kwargs):
+ obj = py.code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/contrib/python/py/py/_error.py b/contrib/python/py/py/_error.py
index 3c17f44516..a6375de9fa 100644
--- a/contrib/python/py/py/_error.py
+++ b/contrib/python/py/py/_error.py
@@ -1,91 +1,91 @@
-"""
-create errno-specific classes for IO or os calls.
-
-"""
+"""
+create errno-specific classes for IO or os calls.
+
+"""
from types import ModuleType
-import sys, os, errno
-
-class Error(EnvironmentError):
- def __repr__(self):
- return "%s.%s %r: %s " %(self.__class__.__module__,
- self.__class__.__name__,
- self.__class__.__doc__,
- " ".join(map(str, self.args)),
- #repr(self.args)
- )
-
- def __str__(self):
- s = "[%s]: %s" %(self.__class__.__doc__,
- " ".join(map(str, self.args)),
- )
- return s
-
-_winerrnomap = {
- 2: errno.ENOENT,
- 3: errno.ENOENT,
- 17: errno.EEXIST,
+import sys, os, errno
+
+class Error(EnvironmentError):
+ def __repr__(self):
+ return "%s.%s %r: %s " %(self.__class__.__module__,
+ self.__class__.__name__,
+ self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ #repr(self.args)
+ )
+
+ def __str__(self):
+ s = "[%s]: %s" %(self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ )
+ return s
+
+_winerrnomap = {
+ 2: errno.ENOENT,
+ 3: errno.ENOENT,
+ 17: errno.EEXIST,
18: errno.EXDEV,
- 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
- 22: errno.ENOTDIR,
- 20: errno.ENOTDIR,
- 267: errno.ENOTDIR,
- 5: errno.EACCES, # anything better?
-}
-
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
+ 22: errno.ENOTDIR,
+ 20: errno.ENOTDIR,
+ 267: errno.ENOTDIR,
+ 5: errno.EACCES, # anything better?
+}
+
class ErrorMaker(ModuleType):
- """ lazily provides Exception classes for each possible POSIX errno
- (as defined per the 'errno' module). All such instances
- subclass EnvironmentError.
- """
- Error = Error
- _errno2class = {}
-
- def __getattr__(self, name):
- if name[0] == "_":
- raise AttributeError(name)
- eno = getattr(errno, name)
- cls = self._geterrnoclass(eno)
- setattr(self, name, cls)
- return cls
-
- def _geterrnoclass(self, eno):
- try:
- return self._errno2class[eno]
- except KeyError:
- clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
- errorcls = type(Error)(clsname, (Error,),
- {'__module__':'py.error',
- '__doc__': os.strerror(eno)})
- self._errno2class[eno] = errorcls
- return errorcls
-
- def checked_call(self, func, *args, **kwargs):
- """ call a function and raise an errno-exception if applicable. """
- __tracebackhide__ = True
- try:
- return func(*args, **kwargs)
- except self.Error:
- raise
- except (OSError, EnvironmentError):
- cls, value, tb = sys.exc_info()
- if not hasattr(value, 'errno'):
- raise
- __tracebackhide__ = False
- errno = value.errno
- try:
- if not isinstance(value, WindowsError):
- raise NameError
- except NameError:
- # we are not on Windows, or we got a proper OSError
- cls = self._geterrnoclass(errno)
- else:
- try:
- cls = self._geterrnoclass(_winerrnomap[errno])
- except KeyError:
- raise value
- raise cls("%s%r" % (func.__name__, args))
- __tracebackhide__ = True
-
-
+ """ lazily provides Exception classes for each possible POSIX errno
+ (as defined per the 'errno' module). All such instances
+ subclass EnvironmentError.
+ """
+ Error = Error
+ _errno2class = {}
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError(name)
+ eno = getattr(errno, name)
+ cls = self._geterrnoclass(eno)
+ setattr(self, name, cls)
+ return cls
+
+ def _geterrnoclass(self, eno):
+ try:
+ return self._errno2class[eno]
+ except KeyError:
+ clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
+ errorcls = type(Error)(clsname, (Error,),
+ {'__module__':'py.error',
+ '__doc__': os.strerror(eno)})
+ self._errno2class[eno] = errorcls
+ return errorcls
+
+ def checked_call(self, func, *args, **kwargs):
+ """ call a function and raise an errno-exception if applicable. """
+ __tracebackhide__ = True
+ try:
+ return func(*args, **kwargs)
+ except self.Error:
+ raise
+ except (OSError, EnvironmentError):
+ cls, value, tb = sys.exc_info()
+ if not hasattr(value, 'errno'):
+ raise
+ __tracebackhide__ = False
+ errno = value.errno
+ try:
+ if not isinstance(value, WindowsError):
+ raise NameError
+ except NameError:
+ # we are not on Windows, or we got a proper OSError
+ cls = self._geterrnoclass(errno)
+ else:
+ try:
+ cls = self._geterrnoclass(_winerrnomap[errno])
+ except KeyError:
+ raise value
+ raise cls("%s%r" % (func.__name__, args))
+ __tracebackhide__ = True
+
+
error = ErrorMaker('py.error')
sys.modules[error.__name__] = error \ No newline at end of file
diff --git a/contrib/python/py/py/_io/__init__.py b/contrib/python/py/py/_io/__init__.py
index c59b5e8ba6..835f01f3ab 100644
--- a/contrib/python/py/py/_io/__init__.py
+++ b/contrib/python/py/py/_io/__init__.py
@@ -1 +1 @@
-""" input/output helping """
+""" input/output helping """
diff --git a/contrib/python/py/py/_io/capture.py b/contrib/python/py/py/_io/capture.py
index 4f547e29ae..cacf2fa71a 100644
--- a/contrib/python/py/py/_io/capture.py
+++ b/contrib/python/py/py/_io/capture.py
@@ -1,371 +1,371 @@
-import os
-import sys
-import py
-import tempfile
-
-try:
- from io import StringIO
-except ImportError:
- from StringIO import StringIO
-
-if sys.version_info < (3,0):
- class TextIO(StringIO):
- def write(self, data):
- if not isinstance(data, unicode):
- data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
+import os
+import sys
+import py
+import tempfile
+
+try:
+ from io import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+if sys.version_info < (3,0):
+ class TextIO(StringIO):
+ def write(self, data):
+ if not isinstance(data, unicode):
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
return StringIO.write(self, data)
-else:
- TextIO = StringIO
-
-try:
- from io import BytesIO
-except ImportError:
- class BytesIO(StringIO):
- def write(self, data):
- if isinstance(data, unicode):
- raise TypeError("not a byte value: %r" %(data,))
+else:
+ TextIO = StringIO
+
+try:
+ from io import BytesIO
+except ImportError:
+ class BytesIO(StringIO):
+ def write(self, data):
+ if isinstance(data, unicode):
+ raise TypeError("not a byte value: %r" %(data,))
return StringIO.write(self, data)
-
-patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
-
-class FDCapture:
- """ Capture IO to/from a given os-level filedescriptor. """
-
- def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
- """ save targetfd descriptor, and open a new
- temporary file there. If no tmpfile is
- specified a tempfile.Tempfile() will be opened
- in text mode.
- """
- self.targetfd = targetfd
- if tmpfile is None and targetfd != 0:
- f = tempfile.TemporaryFile('wb+')
- tmpfile = dupfile(f, encoding="UTF-8")
- f.close()
- self.tmpfile = tmpfile
- self._savefd = os.dup(self.targetfd)
- if patchsys:
- self._oldsys = getattr(sys, patchsysdict[targetfd])
- if now:
- self.start()
-
- def start(self):
- try:
- os.fstat(self._savefd)
- except OSError:
- raise ValueError("saved filedescriptor not valid, "
- "did you call start() twice?")
- if self.targetfd == 0 and not self.tmpfile:
- fd = os.open(devnullpath, os.O_RDONLY)
- os.dup2(fd, 0)
- os.close(fd)
- if hasattr(self, '_oldsys'):
- setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
- else:
- os.dup2(self.tmpfile.fileno(), self.targetfd)
- if hasattr(self, '_oldsys'):
- setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
-
- def done(self):
- """ unpatch and clean up, returns the self.tmpfile (file object)
- """
- os.dup2(self._savefd, self.targetfd)
- os.close(self._savefd)
- if self.targetfd != 0:
- self.tmpfile.seek(0)
- if hasattr(self, '_oldsys'):
- setattr(sys, patchsysdict[self.targetfd], self._oldsys)
- return self.tmpfile
-
- def writeorg(self, data):
- """ write a string to the original file descriptor
- """
- tempfp = tempfile.TemporaryFile()
- try:
- os.dup2(self._savefd, tempfp.fileno())
- tempfp.write(data)
- finally:
- tempfp.close()
-
-
-def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
- """ return a new open file object that's a duplicate of f
-
- mode is duplicated if not given, 'buffering' controls
- buffer size (defaulting to no buffering) and 'raising'
- defines whether an exception is raised when an incompatible
- file object is passed in (if raising is False, the file
- object itself will be returned)
- """
- try:
- fd = f.fileno()
- mode = mode or f.mode
- except AttributeError:
- if raising:
- raise
- return f
- newfd = os.dup(fd)
- if sys.version_info >= (3,0):
- if encoding is not None:
- mode = mode.replace("b", "")
- buffering = True
- return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
- else:
- f = os.fdopen(newfd, mode, buffering)
- if encoding is not None:
- return EncodedFile(f, encoding)
- return f
-
-class EncodedFile(object):
- def __init__(self, _stream, encoding):
- self._stream = _stream
- self.encoding = encoding
-
- def write(self, obj):
- if isinstance(obj, unicode):
- obj = obj.encode(self.encoding)
- elif isinstance(obj, str):
- pass
- else:
- obj = str(obj)
- self._stream.write(obj)
-
- def writelines(self, linelist):
- data = ''.join(linelist)
- self.write(data)
-
- def __getattr__(self, name):
- return getattr(self._stream, name)
-
-class Capture(object):
- def call(cls, func, *args, **kwargs):
- """ return a (res, out, err) tuple where
- out and err represent the output/error output
- during function execution.
- call the given function with args/kwargs
- and capture output/error during its execution.
- """
- so = cls()
- try:
- res = func(*args, **kwargs)
- finally:
- out, err = so.reset()
- return res, out, err
- call = classmethod(call)
-
- def reset(self):
- """ reset sys.stdout/stderr and return captured output as strings. """
- if hasattr(self, '_reset'):
- raise ValueError("was already reset")
- self._reset = True
- outfile, errfile = self.done(save=False)
- out, err = "", ""
- if outfile and not outfile.closed:
- out = outfile.read()
- outfile.close()
- if errfile and errfile != outfile and not errfile.closed:
- err = errfile.read()
- errfile.close()
- return out, err
-
- def suspend(self):
- """ return current snapshot captures, memorize tempfiles. """
- outerr = self.readouterr()
- outfile, errfile = self.done()
- return outerr
-
-
-class StdCaptureFD(Capture):
- """ This class allows to capture writes to FD1 and FD2
- and may connect a NULL file to FD0 (and prevent
- reads from sys.stdin). If any of the 0,1,2 file descriptors
- is invalid it will not be captured.
- """
- def __init__(self, out=True, err=True, mixed=False,
- in_=True, patchsys=True, now=True):
- self._options = {
- "out": out,
- "err": err,
- "mixed": mixed,
- "in_": in_,
- "patchsys": patchsys,
- "now": now,
- }
- self._save()
- if now:
- self.startall()
-
- def _save(self):
- in_ = self._options['in_']
- out = self._options['out']
- err = self._options['err']
- mixed = self._options['mixed']
- patchsys = self._options['patchsys']
- if in_:
- try:
- self.in_ = FDCapture(0, tmpfile=None, now=False,
- patchsys=patchsys)
- except OSError:
- pass
- if out:
- tmpfile = None
- if hasattr(out, 'write'):
- tmpfile = out
- try:
- self.out = FDCapture(1, tmpfile=tmpfile,
- now=False, patchsys=patchsys)
- self._options['out'] = self.out.tmpfile
- except OSError:
- pass
- if err:
- if out and mixed:
- tmpfile = self.out.tmpfile
- elif hasattr(err, 'write'):
- tmpfile = err
- else:
- tmpfile = None
- try:
- self.err = FDCapture(2, tmpfile=tmpfile,
- now=False, patchsys=patchsys)
- self._options['err'] = self.err.tmpfile
- except OSError:
- pass
-
- def startall(self):
- if hasattr(self, 'in_'):
- self.in_.start()
- if hasattr(self, 'out'):
- self.out.start()
- if hasattr(self, 'err'):
- self.err.start()
-
- def resume(self):
- """ resume capturing with original temp files. """
- self.startall()
-
- def done(self, save=True):
- """ return (outfile, errfile) and stop capturing. """
- outfile = errfile = None
- if hasattr(self, 'out') and not self.out.tmpfile.closed:
- outfile = self.out.done()
- if hasattr(self, 'err') and not self.err.tmpfile.closed:
- errfile = self.err.done()
- if hasattr(self, 'in_'):
- tmpfile = self.in_.done()
- if save:
- self._save()
- return outfile, errfile
-
- def readouterr(self):
- """ return snapshot value of stdout/stderr capturings. """
- if hasattr(self, "out"):
- out = self._readsnapshot(self.out.tmpfile)
- else:
- out = ""
- if hasattr(self, "err"):
- err = self._readsnapshot(self.err.tmpfile)
- else:
- err = ""
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
+ """ save targetfd descriptor, and open a new
+ temporary file there. If no tmpfile is
+ specified a tempfile.Tempfile() will be opened
+ in text mode.
+ """
+ self.targetfd = targetfd
+ if tmpfile is None and targetfd != 0:
+ f = tempfile.TemporaryFile('wb+')
+ tmpfile = dupfile(f, encoding="UTF-8")
+ f.close()
+ self.tmpfile = tmpfile
+ self._savefd = os.dup(self.targetfd)
+ if patchsys:
+ self._oldsys = getattr(sys, patchsysdict[targetfd])
+ if now:
+ self.start()
+
+ def start(self):
+ try:
+ os.fstat(self._savefd)
+ except OSError:
+ raise ValueError("saved filedescriptor not valid, "
+ "did you call start() twice?")
+ if self.targetfd == 0 and not self.tmpfile:
+ fd = os.open(devnullpath, os.O_RDONLY)
+ os.dup2(fd, 0)
+ os.close(fd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
+ else:
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
+
+ def done(self):
+ """ unpatch and clean up, returns the self.tmpfile (file object)
+ """
+ os.dup2(self._savefd, self.targetfd)
+ os.close(self._savefd)
+ if self.targetfd != 0:
+ self.tmpfile.seek(0)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self._oldsys)
+ return self.tmpfile
+
+ def writeorg(self, data):
+ """ write a string to the original file descriptor
+ """
+ tempfp = tempfile.TemporaryFile()
+ try:
+ os.dup2(self._savefd, tempfp.fileno())
+ tempfp.write(data)
+ finally:
+ tempfp.close()
+
+
+def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
+ """ return a new open file object that's a duplicate of f
+
+ mode is duplicated if not given, 'buffering' controls
+ buffer size (defaulting to no buffering) and 'raising'
+ defines whether an exception is raised when an incompatible
+ file object is passed in (if raising is False, the file
+ object itself will be returned)
+ """
+ try:
+ fd = f.fileno()
+ mode = mode or f.mode
+ except AttributeError:
+ if raising:
+ raise
+ return f
+ newfd = os.dup(fd)
+ if sys.version_info >= (3,0):
+ if encoding is not None:
+ mode = mode.replace("b", "")
+ buffering = True
+ return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
+ else:
+ f = os.fdopen(newfd, mode, buffering)
+ if encoding is not None:
+ return EncodedFile(f, encoding)
+ return f
+
+class EncodedFile(object):
+ def __init__(self, _stream, encoding):
+ self._stream = _stream
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding)
+ elif isinstance(obj, str):
+ pass
+ else:
+ obj = str(obj)
+ self._stream.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+class Capture(object):
+ def call(cls, func, *args, **kwargs):
+ """ return a (res, out, err) tuple where
+ out and err represent the output/error output
+ during function execution.
+ call the given function with args/kwargs
+ and capture output/error during its execution.
+ """
+ so = cls()
+ try:
+ res = func(*args, **kwargs)
+ finally:
+ out, err = so.reset()
+ return res, out, err
+ call = classmethod(call)
+
+ def reset(self):
+ """ reset sys.stdout/stderr and return captured output as strings. """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already reset")
+ self._reset = True
+ outfile, errfile = self.done(save=False)
+ out, err = "", ""
+ if outfile and not outfile.closed:
+ out = outfile.read()
+ outfile.close()
+ if errfile and errfile != outfile and not errfile.closed:
+ err = errfile.read()
+ errfile.close()
return out, err
-
- def _readsnapshot(self, f):
- f.seek(0)
- res = f.read()
- enc = getattr(f, "encoding", None)
- if enc:
- res = py.builtin._totext(res, enc, "replace")
- f.truncate(0)
- f.seek(0)
- return res
-
-
-class StdCapture(Capture):
- """ This class allows to capture writes to sys.stdout|stderr "in-memory"
- and will raise errors on tries to read from sys.stdin. It only
- modifies sys.stdout|stderr|stdin attributes and does not
- touch underlying File Descriptors (use StdCaptureFD for that).
- """
- def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
- self._oldout = sys.stdout
- self._olderr = sys.stderr
- self._oldin = sys.stdin
- if out and not hasattr(out, 'file'):
- out = TextIO()
- self.out = out
- if err:
- if mixed:
- err = out
- elif not hasattr(err, 'write'):
- err = TextIO()
- self.err = err
- self.in_ = in_
- if now:
- self.startall()
-
- def startall(self):
- if self.out:
- sys.stdout = self.out
- if self.err:
- sys.stderr = self.err
- if self.in_:
- sys.stdin = self.in_ = DontReadFromInput()
-
- def done(self, save=True):
- """ return (outfile, errfile) and stop capturing. """
- outfile = errfile = None
- if self.out and not self.out.closed:
- sys.stdout = self._oldout
- outfile = self.out
- outfile.seek(0)
- if self.err and not self.err.closed:
- sys.stderr = self._olderr
- errfile = self.err
- errfile.seek(0)
- if self.in_:
- sys.stdin = self._oldin
- return outfile, errfile
-
- def resume(self):
- """ resume capturing with original temp files. """
- self.startall()
-
- def readouterr(self):
- """ return snapshot value of stdout/stderr capturings. """
- out = err = ""
- if self.out:
- out = self.out.getvalue()
- self.out.truncate(0)
- self.out.seek(0)
- if self.err:
- err = self.err.getvalue()
- self.err.truncate(0)
- self.err.seek(0)
- return out, err
-
-class DontReadFromInput:
- """Temporary stub class. Ideally when stdin is accessed, the
- capturing should be turned off, with possibly all data captured
- so far sent to the screen. This should be configurable, though,
- because in automated test runs it is better to crash than
- hang indefinitely.
- """
- def read(self, *args):
- raise IOError("reading from stdin while output is captured")
- readline = read
- readlines = read
- __iter__ = read
-
- def fileno(self):
- raise ValueError("redirected Stdin is pseudofile, has no fileno()")
- def isatty(self):
- return False
- def close(self):
- pass
-
-try:
- devnullpath = os.devnull
-except AttributeError:
- if os.name == 'nt':
- devnullpath = 'NUL'
- else:
- devnullpath = '/dev/null'
+
+ def suspend(self):
+ """ return current snapshot captures, memorize tempfiles. """
+ outerr = self.readouterr()
+ outfile, errfile = self.done()
+ return outerr
+
+
+class StdCaptureFD(Capture):
+ """ This class allows to capture writes to FD1 and FD2
+ and may connect a NULL file to FD0 (and prevent
+ reads from sys.stdin). If any of the 0,1,2 file descriptors
+ is invalid it will not be captured.
+ """
+ def __init__(self, out=True, err=True, mixed=False,
+ in_=True, patchsys=True, now=True):
+ self._options = {
+ "out": out,
+ "err": err,
+ "mixed": mixed,
+ "in_": in_,
+ "patchsys": patchsys,
+ "now": now,
+ }
+ self._save()
+ if now:
+ self.startall()
+
+ def _save(self):
+ in_ = self._options['in_']
+ out = self._options['out']
+ err = self._options['err']
+ mixed = self._options['mixed']
+ patchsys = self._options['patchsys']
+ if in_:
+ try:
+ self.in_ = FDCapture(0, tmpfile=None, now=False,
+ patchsys=patchsys)
+ except OSError:
+ pass
+ if out:
+ tmpfile = None
+ if hasattr(out, 'write'):
+ tmpfile = out
+ try:
+ self.out = FDCapture(1, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['out'] = self.out.tmpfile
+ except OSError:
+ pass
+ if err:
+ if out and mixed:
+ tmpfile = self.out.tmpfile
+ elif hasattr(err, 'write'):
+ tmpfile = err
+ else:
+ tmpfile = None
+ try:
+ self.err = FDCapture(2, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['err'] = self.err.tmpfile
+ except OSError:
+ pass
+
+ def startall(self):
+ if hasattr(self, 'in_'):
+ self.in_.start()
+ if hasattr(self, 'out'):
+ self.out.start()
+ if hasattr(self, 'err'):
+ self.err.start()
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if hasattr(self, 'out') and not self.out.tmpfile.closed:
+ outfile = self.out.done()
+ if hasattr(self, 'err') and not self.err.tmpfile.closed:
+ errfile = self.err.done()
+ if hasattr(self, 'in_'):
+ tmpfile = self.in_.done()
+ if save:
+ self._save()
+ return outfile, errfile
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ if hasattr(self, "out"):
+ out = self._readsnapshot(self.out.tmpfile)
+ else:
+ out = ""
+ if hasattr(self, "err"):
+ err = self._readsnapshot(self.err.tmpfile)
+ else:
+ err = ""
+ return out, err
+
+ def _readsnapshot(self, f):
+ f.seek(0)
+ res = f.read()
+ enc = getattr(f, "encoding", None)
+ if enc:
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+
+class StdCapture(Capture):
+ """ This class allows to capture writes to sys.stdout|stderr "in-memory"
+ and will raise errors on tries to read from sys.stdin. It only
+ modifies sys.stdout|stderr|stdin attributes and does not
+ touch underlying File Descriptors (use StdCaptureFD for that).
+ """
+ def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
+ self._oldout = sys.stdout
+ self._olderr = sys.stderr
+ self._oldin = sys.stdin
+ if out and not hasattr(out, 'file'):
+ out = TextIO()
+ self.out = out
+ if err:
+ if mixed:
+ err = out
+ elif not hasattr(err, 'write'):
+ err = TextIO()
+ self.err = err
+ self.in_ = in_
+ if now:
+ self.startall()
+
+ def startall(self):
+ if self.out:
+ sys.stdout = self.out
+ if self.err:
+ sys.stderr = self.err
+ if self.in_:
+ sys.stdin = self.in_ = DontReadFromInput()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if self.out and not self.out.closed:
+ sys.stdout = self._oldout
+ outfile = self.out
+ outfile.seek(0)
+ if self.err and not self.err.closed:
+ sys.stderr = self._olderr
+ errfile = self.err
+ errfile.seek(0)
+ if self.in_:
+ sys.stdin = self._oldin
+ return outfile, errfile
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ out = err = ""
+ if self.out:
+ out = self.out.getvalue()
+ self.out.truncate(0)
+ self.out.seek(0)
+ if self.err:
+ err = self.err.getvalue()
+ self.err.truncate(0)
+ self.err.seek(0)
+ return out, err
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+ def isatty(self):
+ return False
+ def close(self):
+ pass
+
+try:
+ devnullpath = os.devnull
+except AttributeError:
+ if os.name == 'nt':
+ devnullpath = 'NUL'
+ else:
+ devnullpath = '/dev/null'
diff --git a/contrib/python/py/py/_io/saferepr.py b/contrib/python/py/py/_io/saferepr.py
index 8fabbf2d69..8518290efd 100644
--- a/contrib/python/py/py/_io/saferepr.py
+++ b/contrib/python/py/py/_io/saferepr.py
@@ -1,71 +1,71 @@
-import py
-import sys
-
-builtin_repr = repr
-
-reprlib = py.builtin._tryimport('repr', 'reprlib')
-
-class SafeRepr(reprlib.Repr):
- """ subclass of repr.Repr that limits the resulting size of repr()
- and includes information on exceptions raised during the call.
- """
- def repr(self, x):
- return self._callhelper(reprlib.Repr.repr, self, x)
-
- def repr_unicode(self, x, level):
- # Strictly speaking wrong on narrow builds
- def repr(u):
- if "'" not in u:
- return py.builtin._totext("'%s'") % u
- elif '"' not in u:
- return py.builtin._totext('"%s"') % u
- else:
- return py.builtin._totext("'%s'") % u.replace("'", r"\'")
- s = repr(x[:self.maxstring])
- if len(s) > self.maxstring:
- i = max(0, (self.maxstring-3)//2)
- j = max(0, self.maxstring-3-i)
- s = repr(x[:i] + x[len(x)-j:])
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
- def repr_instance(self, x, level):
- return self._callhelper(builtin_repr, x)
-
- def _callhelper(self, call, x, *args):
- try:
- # Try the vanilla repr and make sure that the result is a string
- s = call(x, *args)
- except py.builtin._sysex:
- raise
- except:
- cls, e, tb = sys.exc_info()
- exc_name = getattr(cls, '__name__', 'unknown')
- try:
- exc_info = str(e)
- except py.builtin._sysex:
- raise
- except:
- exc_info = 'unknown'
- return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
- exc_name, exc_info, x.__class__.__name__, id(x))
- else:
- if len(s) > self.maxsize:
- i = max(0, (self.maxsize-3)//2)
- j = max(0, self.maxsize-3-i)
- s = s[:i] + '...' + s[len(s)-j:]
- return s
-
-def saferepr(obj, maxsize=240):
- """ return a size-limited safe repr-string for the given object.
- Failing __repr__ functions of user instances will be represented
- with a short exception info and 'saferepr' generally takes
- care to never raise exceptions itself. This function is a wrapper
- around the Repr/reprlib functionality of the standard 2.6 lib.
- """
- # review exception handling
- srepr = SafeRepr()
- srepr.maxstring = maxsize
- srepr.maxsize = maxsize
- srepr.maxother = 160
- return srepr.repr(obj)
+import py
+import sys
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+class SafeRepr(reprlib.Repr):
+ """ subclass of repr.Repr that limits the resulting size of repr()
+ and includes information on exceptions raised during the call.
+ """
+ def repr(self, x):
+ return self._callhelper(reprlib.Repr.repr, self, x)
+
+ def repr_unicode(self, x, level):
+ # Strictly speaking wrong on narrow builds
+ def repr(u):
+ if "'" not in u:
+ return py.builtin._totext("'%s'") % u
+ elif '"' not in u:
+ return py.builtin._totext('"%s"') % u
+ else:
+ return py.builtin._totext("'%s'") % u.replace("'", r"\'")
+ s = repr(x[:self.maxstring])
+ if len(s) > self.maxstring:
+ i = max(0, (self.maxstring-3)//2)
+ j = max(0, self.maxstring-3-i)
+ s = repr(x[:i] + x[len(x)-j:])
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_instance(self, x, level):
+ return self._callhelper(builtin_repr, x)
+
+ def _callhelper(self, call, x, *args):
+ try:
+ # Try the vanilla repr and make sure that the result is a string
+ s = call(x, *args)
+ except py.builtin._sysex:
+ raise
+ except:
+ cls, e, tb = sys.exc_info()
+ exc_name = getattr(cls, '__name__', 'unknown')
+ try:
+ exc_info = str(e)
+ except py.builtin._sysex:
+ raise
+ except:
+ exc_info = 'unknown'
+ return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
+ exc_name, exc_info, x.__class__.__name__, id(x))
+ else:
+ if len(s) > self.maxsize:
+ i = max(0, (self.maxsize-3)//2)
+ j = max(0, self.maxsize-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+def saferepr(obj, maxsize=240):
+ """ return a size-limited safe repr-string for the given object.
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info and 'saferepr' generally takes
+ care to never raise exceptions itself. This function is a wrapper
+ around the Repr/reprlib functionality of the standard 2.6 lib.
+ """
+ # review exception handling
+ srepr = SafeRepr()
+ srepr.maxstring = maxsize
+ srepr.maxsize = maxsize
+ srepr.maxother = 160
+ return srepr.repr(obj)
diff --git a/contrib/python/py/py/_io/terminalwriter.py b/contrib/python/py/py/_io/terminalwriter.py
index 02af14ea62..442ca2395e 100644
--- a/contrib/python/py/py/_io/terminalwriter.py
+++ b/contrib/python/py/py/_io/terminalwriter.py
@@ -1,30 +1,30 @@
-"""
-
-Helper functions for writing to terminals and files.
-
-"""
-
-
+"""
+
+Helper functions for writing to terminals and files.
+
+"""
+
+
import sys, os, unicodedata
-import py
-py3k = sys.version_info[0] >= 3
+import py
+py3k = sys.version_info[0] >= 3
py33 = sys.version_info >= (3, 3)
-from py.builtin import text, bytes
-
-win32_and_ctypes = False
-colorama = None
-if sys.platform == "win32":
- try:
- import colorama
- except ImportError:
- try:
- import ctypes
- win32_and_ctypes = True
- except ImportError:
- pass
-
-
-def _getdimensions():
+from py.builtin import text, bytes
+
+win32_and_ctypes = False
+colorama = None
+if sys.platform == "win32":
+ try:
+ import colorama
+ except ImportError:
+ try:
+ import ctypes
+ win32_and_ctypes = True
+ except ImportError:
+ pass
+
+
+def _getdimensions():
if py33:
import shutil
size = shutil.get_terminal_size()
@@ -34,31 +34,31 @@ def _getdimensions():
call = fcntl.ioctl(1, termios.TIOCGWINSZ, "\000" * 8)
height, width = struct.unpack("hhhh", call)[:2]
return height, width
-
-
-def get_terminal_width():
+
+
+def get_terminal_width():
width = 0
- try:
+ try:
_, width = _getdimensions()
- except py.builtin._sysex:
- raise
- except:
- # pass to fallback below
- pass
-
- if width == 0:
- # FALLBACK:
- # * some exception happened
- # * or this is emacs terminal which reports (0,0)
- width = int(os.environ.get('COLUMNS', 80))
-
- # XXX the windows getdimensions may be bogus, let's sanify a bit
- if width < 40:
- width = 80
- return width
-
-terminal_width = get_terminal_width()
-
+ except py.builtin._sysex:
+ raise
+ except:
+ # pass to fallback below
+ pass
+
+ if width == 0:
+ # FALLBACK:
+ # * some exception happened
+ # * or this is emacs terminal which reports (0,0)
+ width = int(os.environ.get('COLUMNS', 80))
+
+ # XXX the windows getdimensions may be bogus, let's sanify a bit
+ if width < 40:
+ width = 80
+ return width
+
+terminal_width = get_terminal_width()
+
char_width = {
'A': 1, # "Ambiguous"
'F': 2, # Fullwidth
@@ -74,97 +74,97 @@ def get_line_width(text):
return sum(char_width.get(unicodedata.east_asian_width(c), 1) for c in text)
-# XXX unify with _escaped func below
-def ansi_print(text, esc, file=None, newline=True, flush=False):
- if file is None:
- file = sys.stderr
- text = text.rstrip()
- if esc and not isinstance(esc, tuple):
- esc = (esc,)
- if esc and sys.platform != "win32" and file.isatty():
- text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
- text +
- '\x1b[0m') # ANSI color code "reset"
- if newline:
- text += '\n'
-
- if esc and win32_and_ctypes and file.isatty():
- if 1 in esc:
- bold = True
- esc = tuple([x for x in esc if x != 1])
- else:
- bold = False
- esctable = {() : FOREGROUND_WHITE, # normal
- (31,): FOREGROUND_RED, # red
- (32,): FOREGROUND_GREEN, # green
- (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
- (34,): FOREGROUND_BLUE, # blue
- (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
- (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
- (37,): FOREGROUND_WHITE, # white
- (39,): FOREGROUND_WHITE, # reset
- }
- attr = esctable.get(esc, FOREGROUND_WHITE)
- if bold:
- attr |= FOREGROUND_INTENSITY
- STD_OUTPUT_HANDLE = -11
- STD_ERROR_HANDLE = -12
- if file is sys.stderr:
- handle = GetStdHandle(STD_ERROR_HANDLE)
- else:
- handle = GetStdHandle(STD_OUTPUT_HANDLE)
- oldcolors = GetConsoleInfo(handle).wAttributes
- attr |= (oldcolors & 0x0f0)
- SetConsoleTextAttribute(handle, attr)
- while len(text) > 32768:
- file.write(text[:32768])
- text = text[32768:]
- if text:
- file.write(text)
- SetConsoleTextAttribute(handle, oldcolors)
- else:
- file.write(text)
-
- if flush:
- file.flush()
-
-def should_do_markup(file):
- if os.environ.get('PY_COLORS') == '1':
- return True
- if os.environ.get('PY_COLORS') == '0':
- return False
+# XXX unify with _escaped func below
+def ansi_print(text, esc, file=None, newline=True, flush=False):
+ if file is None:
+ file = sys.stderr
+ text = text.rstrip()
+ if esc and not isinstance(esc, tuple):
+ esc = (esc,)
+ if esc and sys.platform != "win32" and file.isatty():
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +
+ '\x1b[0m') # ANSI color code "reset"
+ if newline:
+ text += '\n'
+
+ if esc and win32_and_ctypes and file.isatty():
+ if 1 in esc:
+ bold = True
+ esc = tuple([x for x in esc if x != 1])
+ else:
+ bold = False
+ esctable = {() : FOREGROUND_WHITE, # normal
+ (31,): FOREGROUND_RED, # red
+ (32,): FOREGROUND_GREEN, # green
+ (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
+ (34,): FOREGROUND_BLUE, # blue
+ (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
+ (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
+ (37,): FOREGROUND_WHITE, # white
+ (39,): FOREGROUND_WHITE, # reset
+ }
+ attr = esctable.get(esc, FOREGROUND_WHITE)
+ if bold:
+ attr |= FOREGROUND_INTENSITY
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ if file is sys.stderr:
+ handle = GetStdHandle(STD_ERROR_HANDLE)
+ else:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ attr |= (oldcolors & 0x0f0)
+ SetConsoleTextAttribute(handle, attr)
+ while len(text) > 32768:
+ file.write(text[:32768])
+ text = text[32768:]
+ if text:
+ file.write(text)
+ SetConsoleTextAttribute(handle, oldcolors)
+ else:
+ file.write(text)
+
+ if flush:
+ file.flush()
+
+def should_do_markup(file):
+ if os.environ.get('PY_COLORS') == '1':
+ return True
+ if os.environ.get('PY_COLORS') == '0':
+ return False
if 'NO_COLOR' in os.environ:
return False
- return hasattr(file, 'isatty') and file.isatty() \
- and os.environ.get('TERM') != 'dumb' \
- and not (sys.platform.startswith('java') and os._name == 'nt')
-
-class TerminalWriter(object):
- _esctable = dict(black=30, red=31, green=32, yellow=33,
- blue=34, purple=35, cyan=36, white=37,
- Black=40, Red=41, Green=42, Yellow=43,
- Blue=44, Purple=45, Cyan=46, White=47,
- bold=1, light=2, blink=5, invert=7)
-
- # XXX deprecate stringio argument
- def __init__(self, file=None, stringio=False, encoding=None):
- if file is None:
- if stringio:
- self.stringio = file = py.io.TextIO()
- else:
+ return hasattr(file, 'isatty') and file.isatty() \
+ and os.environ.get('TERM') != 'dumb' \
+ and not (sys.platform.startswith('java') and os._name == 'nt')
+
+class TerminalWriter(object):
+ _esctable = dict(black=30, red=31, green=32, yellow=33,
+ blue=34, purple=35, cyan=36, white=37,
+ Black=40, Red=41, Green=42, Yellow=43,
+ Blue=44, Purple=45, Cyan=46, White=47,
+ bold=1, light=2, blink=5, invert=7)
+
+ # XXX deprecate stringio argument
+ def __init__(self, file=None, stringio=False, encoding=None):
+ if file is None:
+ if stringio:
+ self.stringio = file = py.io.TextIO()
+ else:
from sys import stdout as file
- elif py.builtin.callable(file) and not (
- hasattr(file, "write") and hasattr(file, "flush")):
- file = WriteFile(file, encoding=encoding)
- if hasattr(file, "isatty") and file.isatty() and colorama:
- file = colorama.AnsiToWin32(file).stream
- self.encoding = encoding or getattr(file, 'encoding', "utf-8")
- self._file = file
- self.hasmarkup = should_do_markup(file)
- self._lastlen = 0
+ elif py.builtin.callable(file) and not (
+ hasattr(file, "write") and hasattr(file, "flush")):
+ file = WriteFile(file, encoding=encoding)
+ if hasattr(file, "isatty") and file.isatty() and colorama:
+ file = colorama.AnsiToWin32(file).stream
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
+ self._file = file
+ self.hasmarkup = should_do_markup(file)
+ self._lastlen = 0
self._chars_on_current_line = 0
self._width_of_current_line = 0
-
+
@property
def fullwidth(self):
if hasattr(self, '_terminal_width'):
@@ -198,65 +198,65 @@ class TerminalWriter(object):
"""
return self._width_of_current_line
- def _escaped(self, text, esc):
- if esc and self.hasmarkup:
- text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
- text +'\x1b[0m')
- return text
-
- def markup(self, text, **kw):
- esc = []
- for name in kw:
- if name not in self._esctable:
- raise ValueError("unknown markup: %r" %(name,))
- if kw[name]:
- esc.append(self._esctable[name])
- return self._escaped(text, tuple(esc))
-
- def sep(self, sepchar, title=None, fullwidth=None, **kw):
- if fullwidth is None:
- fullwidth = self.fullwidth
- # the goal is to have the line be as long as possible
- # under the condition that len(line) <= fullwidth
- if sys.platform == "win32":
- # if we print in the last column on windows we are on a
- # new line but there is no way to verify/neutralize this
- # (we may not know the exact line width)
- # so let's be defensive to avoid empty lines in the output
- fullwidth -= 1
- if title is not None:
- # we want 2 + 2*len(fill) + len(title) <= fullwidth
- # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
- # 2*len(sepchar)*N <= fullwidth - len(title) - 2
- # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+ def _escaped(self, text, esc):
+ if esc and self.hasmarkup:
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +'\x1b[0m')
+ return text
+
+ def markup(self, text, **kw):
+ esc = []
+ for name in kw:
+ if name not in self._esctable:
+ raise ValueError("unknown markup: %r" %(name,))
+ if kw[name]:
+ esc.append(self._esctable[name])
+ return self._escaped(text, tuple(esc))
+
+ def sep(self, sepchar, title=None, fullwidth=None, **kw):
+ if fullwidth is None:
+ fullwidth = self.fullwidth
+ # the goal is to have the line be as long as possible
+ # under the condition that len(line) <= fullwidth
+ if sys.platform == "win32":
+ # if we print in the last column on windows we are on a
+ # new line but there is no way to verify/neutralize this
+ # (we may not know the exact line width)
+ # so let's be defensive to avoid empty lines in the output
+ fullwidth -= 1
+ if title is not None:
+ # we want 2 + 2*len(fill) + len(title) <= fullwidth
+ # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
+ # 2*len(sepchar)*N <= fullwidth - len(title) - 2
+ # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
N = max((fullwidth - len(title) - 2) // (2*len(sepchar)), 1)
- fill = sepchar * N
- line = "%s %s %s" % (fill, title, fill)
- else:
- # we want len(sepchar)*N <= fullwidth
- # i.e. N <= fullwidth // len(sepchar)
- line = sepchar * (fullwidth // len(sepchar))
- # in some situations there is room for an extra sepchar at the right,
- # in particular if we consider that with a sepchar like "_ " the
- # trailing space is not important at the end of the line
- if len(line) + len(sepchar.rstrip()) <= fullwidth:
- line += sepchar.rstrip()
-
- self.line(line, **kw)
-
- def write(self, msg, **kw):
- if msg:
- if not isinstance(msg, (bytes, text)):
- msg = text(msg)
+ fill = sepchar * N
+ line = "%s %s %s" % (fill, title, fill)
+ else:
+ # we want len(sepchar)*N <= fullwidth
+ # i.e. N <= fullwidth // len(sepchar)
+ line = sepchar * (fullwidth // len(sepchar))
+ # in some situations there is room for an extra sepchar at the right,
+ # in particular if we consider that with a sepchar like "_ " the
+ # trailing space is not important at the end of the line
+ if len(line) + len(sepchar.rstrip()) <= fullwidth:
+ line += sepchar.rstrip()
+
+ self.line(line, **kw)
+
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
self._update_chars_on_current_line(msg)
- if self.hasmarkup and kw:
- markupmsg = self.markup(msg, **kw)
- else:
- markupmsg = msg
- write_out(self._file, markupmsg)
-
+ if self.hasmarkup and kw:
+ markupmsg = self.markup(msg, **kw)
+ else:
+ markupmsg = msg
+ write_out(self._file, markupmsg)
+
def _update_chars_on_current_line(self, text_or_bytes):
newline = b'\n' if isinstance(text_or_bytes, bytes) else '\n'
current_line = text_or_bytes.rsplit(newline, 1)[-1]
@@ -269,155 +269,155 @@ class TerminalWriter(object):
self._chars_on_current_line += len(current_line)
self._width_of_current_line += get_line_width(current_line)
- def line(self, s='', **kw):
- self.write(s, **kw)
- self._checkfill(s)
- self.write('\n')
-
- def reline(self, line, **kw):
- if not self.hasmarkup:
- raise ValueError("cannot use rewrite-line without terminal")
- self.write(line, **kw)
- self._checkfill(line)
- self.write('\r')
- self._lastlen = len(line)
-
- def _checkfill(self, line):
- diff2last = self._lastlen - len(line)
- if diff2last > 0:
- self.write(" " * diff2last)
-
-class Win32ConsoleWriter(TerminalWriter):
- def write(self, msg, **kw):
- if msg:
- if not isinstance(msg, (bytes, text)):
- msg = text(msg)
+ def line(self, s='', **kw):
+ self.write(s, **kw)
+ self._checkfill(s)
+ self.write('\n')
+
+ def reline(self, line, **kw):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ self.write(line, **kw)
+ self._checkfill(line)
+ self.write('\r')
+ self._lastlen = len(line)
+
+ def _checkfill(self, line):
+ diff2last = self._lastlen - len(line)
+ if diff2last > 0:
+ self.write(" " * diff2last)
+
+class Win32ConsoleWriter(TerminalWriter):
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
self._update_chars_on_current_line(msg)
- oldcolors = None
- if self.hasmarkup and kw:
- handle = GetStdHandle(STD_OUTPUT_HANDLE)
- oldcolors = GetConsoleInfo(handle).wAttributes
- default_bg = oldcolors & 0x00F0
- attr = default_bg
- if kw.pop('bold', False):
- attr |= FOREGROUND_INTENSITY
-
- if kw.pop('red', False):
- attr |= FOREGROUND_RED
- elif kw.pop('blue', False):
- attr |= FOREGROUND_BLUE
- elif kw.pop('green', False):
- attr |= FOREGROUND_GREEN
- elif kw.pop('yellow', False):
- attr |= FOREGROUND_GREEN|FOREGROUND_RED
- else:
- attr |= oldcolors & 0x0007
-
- SetConsoleTextAttribute(handle, attr)
- write_out(self._file, msg)
- if oldcolors:
- SetConsoleTextAttribute(handle, oldcolors)
-
-class WriteFile(object):
- def __init__(self, writemethod, encoding=None):
- self.encoding = encoding
- self._writemethod = writemethod
-
- def write(self, data):
- if self.encoding:
- data = data.encode(self.encoding, "replace")
- self._writemethod(data)
-
- def flush(self):
- return
-
-
-if win32_and_ctypes:
- TerminalWriter = Win32ConsoleWriter
- import ctypes
- from ctypes import wintypes
-
- # ctypes access to the Windows console
- STD_OUTPUT_HANDLE = -11
- STD_ERROR_HANDLE = -12
- FOREGROUND_BLACK = 0x0000 # black text
- FOREGROUND_BLUE = 0x0001 # text color contains blue.
- FOREGROUND_GREEN = 0x0002 # text color contains green.
- FOREGROUND_RED = 0x0004 # text color contains red.
- FOREGROUND_WHITE = 0x0007
- FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
- BACKGROUND_BLACK = 0x0000 # background color black
- BACKGROUND_BLUE = 0x0010 # background color contains blue.
- BACKGROUND_GREEN = 0x0020 # background color contains green.
- BACKGROUND_RED = 0x0040 # background color contains red.
- BACKGROUND_WHITE = 0x0070
- BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
-
- SHORT = ctypes.c_short
- class COORD(ctypes.Structure):
- _fields_ = [('X', SHORT),
- ('Y', SHORT)]
- class SMALL_RECT(ctypes.Structure):
- _fields_ = [('Left', SHORT),
- ('Top', SHORT),
- ('Right', SHORT),
- ('Bottom', SHORT)]
- class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
- _fields_ = [('dwSize', COORD),
- ('dwCursorPosition', COORD),
- ('wAttributes', wintypes.WORD),
- ('srWindow', SMALL_RECT),
- ('dwMaximumWindowSize', COORD)]
-
- _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
- _GetStdHandle.argtypes = [wintypes.DWORD]
- _GetStdHandle.restype = wintypes.HANDLE
- def GetStdHandle(kind):
- return _GetStdHandle(kind)
-
- SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
- SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
- SetConsoleTextAttribute.restype = wintypes.BOOL
-
- _GetConsoleScreenBufferInfo = \
- ctypes.windll.kernel32.GetConsoleScreenBufferInfo
- _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
- ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
- _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
- def GetConsoleInfo(handle):
- info = CONSOLE_SCREEN_BUFFER_INFO()
- _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
- return info
-
- def _getdimensions():
- handle = GetStdHandle(STD_OUTPUT_HANDLE)
- info = GetConsoleInfo(handle)
- # Substract one from the width, otherwise the cursor wraps
- # and the ending \n causes an empty line to display.
- return info.dwSize.Y, info.dwSize.X - 1
-
-def write_out(fil, msg):
- # XXX sometimes "msg" is of type bytes, sometimes text which
- # complicates the situation. Should we try to enforce unicode?
- try:
- # on py27 and above writing out to sys.stdout with an encoding
- # should usually work for unicode messages (if the encoding is
- # capable of it)
- fil.write(msg)
- except UnicodeEncodeError:
- # on py26 it might not work because stdout expects bytes
- if fil.encoding:
- try:
- fil.write(msg.encode(fil.encoding))
- except UnicodeEncodeError:
- # it might still fail if the encoding is not capable
- pass
- else:
- fil.flush()
- return
- # fallback: escape all unicode characters
- msg = msg.encode("unicode-escape").decode("ascii")
- fil.write(msg)
- fil.flush()
+ oldcolors = None
+ if self.hasmarkup and kw:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ default_bg = oldcolors & 0x00F0
+ attr = default_bg
+ if kw.pop('bold', False):
+ attr |= FOREGROUND_INTENSITY
+
+ if kw.pop('red', False):
+ attr |= FOREGROUND_RED
+ elif kw.pop('blue', False):
+ attr |= FOREGROUND_BLUE
+ elif kw.pop('green', False):
+ attr |= FOREGROUND_GREEN
+ elif kw.pop('yellow', False):
+ attr |= FOREGROUND_GREEN|FOREGROUND_RED
+ else:
+ attr |= oldcolors & 0x0007
+
+ SetConsoleTextAttribute(handle, attr)
+ write_out(self._file, msg)
+ if oldcolors:
+ SetConsoleTextAttribute(handle, oldcolors)
+
+class WriteFile(object):
+ def __init__(self, writemethod, encoding=None):
+ self.encoding = encoding
+ self._writemethod = writemethod
+
+ def write(self, data):
+ if self.encoding:
+ data = data.encode(self.encoding, "replace")
+ self._writemethod(data)
+
+ def flush(self):
+ return
+
+
+if win32_and_ctypes:
+ TerminalWriter = Win32ConsoleWriter
+ import ctypes
+ from ctypes import wintypes
+
+ # ctypes access to the Windows console
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ FOREGROUND_BLACK = 0x0000 # black text
+ FOREGROUND_BLUE = 0x0001 # text color contains blue.
+ FOREGROUND_GREEN = 0x0002 # text color contains green.
+ FOREGROUND_RED = 0x0004 # text color contains red.
+ FOREGROUND_WHITE = 0x0007
+ FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
+ BACKGROUND_BLACK = 0x0000 # background color black
+ BACKGROUND_BLUE = 0x0010 # background color contains blue.
+ BACKGROUND_GREEN = 0x0020 # background color contains green.
+ BACKGROUND_RED = 0x0040 # background color contains red.
+ BACKGROUND_WHITE = 0x0070
+ BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
+
+ SHORT = ctypes.c_short
+ class COORD(ctypes.Structure):
+ _fields_ = [('X', SHORT),
+ ('Y', SHORT)]
+ class SMALL_RECT(ctypes.Structure):
+ _fields_ = [('Left', SHORT),
+ ('Top', SHORT),
+ ('Right', SHORT),
+ ('Bottom', SHORT)]
+ class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
+ _fields_ = [('dwSize', COORD),
+ ('dwCursorPosition', COORD),
+ ('wAttributes', wintypes.WORD),
+ ('srWindow', SMALL_RECT),
+ ('dwMaximumWindowSize', COORD)]
+
+ _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [wintypes.DWORD]
+ _GetStdHandle.restype = wintypes.HANDLE
+ def GetStdHandle(kind):
+ return _GetStdHandle(kind)
+
+ SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
+ SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
+ SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _GetConsoleScreenBufferInfo = \
+ ctypes.windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+ def GetConsoleInfo(handle):
+ info = CONSOLE_SCREEN_BUFFER_INFO()
+ _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
+ return info
+
+ def _getdimensions():
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ info = GetConsoleInfo(handle)
+ # Substract one from the width, otherwise the cursor wraps
+ # and the ending \n causes an empty line to display.
+ return info.dwSize.Y, info.dwSize.X - 1
+
+def write_out(fil, msg):
+ # XXX sometimes "msg" is of type bytes, sometimes text which
+ # complicates the situation. Should we try to enforce unicode?
+ try:
+ # on py27 and above writing out to sys.stdout with an encoding
+ # should usually work for unicode messages (if the encoding is
+ # capable of it)
+ fil.write(msg)
+ except UnicodeEncodeError:
+ # on py26 it might not work because stdout expects bytes
+ if fil.encoding:
+ try:
+ fil.write(msg.encode(fil.encoding))
+ except UnicodeEncodeError:
+ # it might still fail if the encoding is not capable
+ pass
+ else:
+ fil.flush()
+ return
+ # fallback: escape all unicode characters
+ msg = msg.encode("unicode-escape").decode("ascii")
+ fil.write(msg)
+ fil.flush()
diff --git a/contrib/python/py/py/_log/__init__.py b/contrib/python/py/py/_log/__init__.py
index c982259a8d..fad62e960d 100644
--- a/contrib/python/py/py/_log/__init__.py
+++ b/contrib/python/py/py/_log/__init__.py
@@ -1,2 +1,2 @@
-""" logging API ('producers' and 'consumers' connected via keywords) """
-
+""" logging API ('producers' and 'consumers' connected via keywords) """
+
diff --git a/contrib/python/py/py/_log/log.py b/contrib/python/py/py/_log/log.py
index 057889d684..56969bcb58 100644
--- a/contrib/python/py/py/_log/log.py
+++ b/contrib/python/py/py/_log/log.py
@@ -1,197 +1,197 @@
-"""
-basic logging functionality based on a producer/consumer scheme.
-
-XXX implement this API: (maybe put it into slogger.py?)
-
- log = Logger(
- info=py.log.STDOUT,
- debug=py.log.STDOUT,
- command=None)
- log.info("hello", "world")
- log.command("hello", "world")
-
- log = Logger(info=Logger(something=...),
- debug=py.log.STDOUT,
- command=None)
-"""
+"""
+basic logging functionality based on a producer/consumer scheme.
+
+XXX implement this API: (maybe put it into slogger.py?)
+
+ log = Logger(
+ info=py.log.STDOUT,
+ debug=py.log.STDOUT,
+ command=None)
+ log.info("hello", "world")
+ log.command("hello", "world")
+
+ log = Logger(info=Logger(something=...),
+ debug=py.log.STDOUT,
+ command=None)
+"""
import py
import sys
-
-
-class Message(object):
- def __init__(self, keywords, args):
- self.keywords = keywords
- self.args = args
-
- def content(self):
- return " ".join(map(str, self.args))
-
- def prefix(self):
- return "[%s] " % (":".join(self.keywords))
-
- def __str__(self):
- return self.prefix() + self.content()
-
-
-class Producer(object):
- """ (deprecated) Log producer API which sends messages to be logged
- to a 'consumer' object, which then prints them to stdout,
- stderr, files, etc. Used extensively by PyPy-1.1.
- """
-
- Message = Message # to allow later customization
- keywords2consumer = {}
-
- def __init__(self, keywords, keywordmapper=None, **kw):
- if hasattr(keywords, 'split'):
- keywords = tuple(keywords.split())
- self._keywords = keywords
- if keywordmapper is None:
- keywordmapper = default_keywordmapper
- self._keywordmapper = keywordmapper
-
- def __repr__(self):
- return "<py.log.Producer %s>" % ":".join(self._keywords)
-
- def __getattr__(self, name):
- if '_' in name:
- raise AttributeError(name)
- producer = self.__class__(self._keywords + (name,))
- setattr(self, name, producer)
- return producer
-
- def __call__(self, *args):
- """ write a message to the appropriate consumer(s) """
- func = self._keywordmapper.getconsumer(self._keywords)
- if func is not None:
- func(self.Message(self._keywords, args))
-
-class KeywordMapper:
- def __init__(self):
- self.keywords2consumer = {}
-
- def getstate(self):
- return self.keywords2consumer.copy()
-
- def setstate(self, state):
- self.keywords2consumer.clear()
- self.keywords2consumer.update(state)
-
- def getconsumer(self, keywords):
- """ return a consumer matching the given keywords.
-
- tries to find the most suitable consumer by walking, starting from
- the back, the list of keywords, the first consumer matching a
- keyword is returned (falling back to py.log.default)
- """
- for i in range(len(keywords), 0, -1):
- try:
- return self.keywords2consumer[keywords[:i]]
- except KeyError:
- continue
- return self.keywords2consumer.get('default', default_consumer)
-
- def setconsumer(self, keywords, consumer):
- """ set a consumer for a set of keywords. """
- # normalize to tuples
- if isinstance(keywords, str):
- keywords = tuple(filter(None, keywords.split()))
- elif hasattr(keywords, '_keywords'):
- keywords = keywords._keywords
- elif not isinstance(keywords, tuple):
- raise TypeError("key %r is not a string or tuple" % (keywords,))
- if consumer is not None and not py.builtin.callable(consumer):
- if not hasattr(consumer, 'write'):
- raise TypeError(
- "%r should be None, callable or file-like" % (consumer,))
- consumer = File(consumer)
- self.keywords2consumer[keywords] = consumer
-
-
-def default_consumer(msg):
- """ the default consumer, prints the message to stdout (using 'print') """
- sys.stderr.write(str(msg)+"\n")
-
-default_keywordmapper = KeywordMapper()
-
-
-def setconsumer(keywords, consumer):
- default_keywordmapper.setconsumer(keywords, consumer)
-
-
-def setstate(state):
- default_keywordmapper.setstate(state)
-
-
-def getstate():
- return default_keywordmapper.getstate()
-
-#
-# Consumers
-#
-
-
-class File(object):
- """ log consumer wrapping a file(-like) object """
- def __init__(self, f):
- assert hasattr(f, 'write')
+
+
+class Message(object):
+ def __init__(self, keywords, args):
+ self.keywords = keywords
+ self.args = args
+
+ def content(self):
+ return " ".join(map(str, self.args))
+
+ def prefix(self):
+ return "[%s] " % (":".join(self.keywords))
+
+ def __str__(self):
+ return self.prefix() + self.content()
+
+
+class Producer(object):
+ """ (deprecated) Log producer API which sends messages to be logged
+ to a 'consumer' object, which then prints them to stdout,
+ stderr, files, etc. Used extensively by PyPy-1.1.
+ """
+
+ Message = Message # to allow later customization
+ keywords2consumer = {}
+
+ def __init__(self, keywords, keywordmapper=None, **kw):
+ if hasattr(keywords, 'split'):
+ keywords = tuple(keywords.split())
+ self._keywords = keywords
+ if keywordmapper is None:
+ keywordmapper = default_keywordmapper
+ self._keywordmapper = keywordmapper
+
+ def __repr__(self):
+ return "<py.log.Producer %s>" % ":".join(self._keywords)
+
+ def __getattr__(self, name):
+ if '_' in name:
+ raise AttributeError(name)
+ producer = self.__class__(self._keywords + (name,))
+ setattr(self, name, producer)
+ return producer
+
+ def __call__(self, *args):
+ """ write a message to the appropriate consumer(s) """
+ func = self._keywordmapper.getconsumer(self._keywords)
+ if func is not None:
+ func(self.Message(self._keywords, args))
+
+class KeywordMapper:
+ def __init__(self):
+ self.keywords2consumer = {}
+
+ def getstate(self):
+ return self.keywords2consumer.copy()
+
+ def setstate(self, state):
+ self.keywords2consumer.clear()
+ self.keywords2consumer.update(state)
+
+ def getconsumer(self, keywords):
+ """ return a consumer matching the given keywords.
+
+ tries to find the most suitable consumer by walking, starting from
+ the back, the list of keywords, the first consumer matching a
+ keyword is returned (falling back to py.log.default)
+ """
+ for i in range(len(keywords), 0, -1):
+ try:
+ return self.keywords2consumer[keywords[:i]]
+ except KeyError:
+ continue
+ return self.keywords2consumer.get('default', default_consumer)
+
+ def setconsumer(self, keywords, consumer):
+ """ set a consumer for a set of keywords. """
+ # normalize to tuples
+ if isinstance(keywords, str):
+ keywords = tuple(filter(None, keywords.split()))
+ elif hasattr(keywords, '_keywords'):
+ keywords = keywords._keywords
+ elif not isinstance(keywords, tuple):
+ raise TypeError("key %r is not a string or tuple" % (keywords,))
+ if consumer is not None and not py.builtin.callable(consumer):
+ if not hasattr(consumer, 'write'):
+ raise TypeError(
+ "%r should be None, callable or file-like" % (consumer,))
+ consumer = File(consumer)
+ self.keywords2consumer[keywords] = consumer
+
+
+def default_consumer(msg):
+ """ the default consumer, prints the message to stdout (using 'print') """
+ sys.stderr.write(str(msg)+"\n")
+
+default_keywordmapper = KeywordMapper()
+
+
+def setconsumer(keywords, consumer):
+ default_keywordmapper.setconsumer(keywords, consumer)
+
+
+def setstate(state):
+ default_keywordmapper.setstate(state)
+
+
+def getstate():
+ return default_keywordmapper.getstate()
+
+#
+# Consumers
+#
+
+
+class File(object):
+ """ log consumer wrapping a file(-like) object """
+ def __init__(self, f):
+ assert hasattr(f, 'write')
# assert isinstance(f, file) or not hasattr(f, 'open')
- self._file = f
-
- def __call__(self, msg):
- """ write a message to the log """
- self._file.write(str(msg) + "\n")
- if hasattr(self._file, 'flush'):
- self._file.flush()
-
-
-class Path(object):
- """ log consumer that opens and writes to a Path """
- def __init__(self, filename, append=False,
- delayed_create=False, buffering=False):
- self._append = append
- self._filename = str(filename)
- self._buffering = buffering
- if not delayed_create:
- self._openfile()
-
- def _openfile(self):
- mode = self._append and 'a' or 'w'
- f = open(self._filename, mode)
- self._file = f
-
- def __call__(self, msg):
- """ write a message to the log """
- if not hasattr(self, "_file"):
- self._openfile()
- self._file.write(str(msg) + "\n")
- if not self._buffering:
- self._file.flush()
-
-
-def STDOUT(msg):
- """ consumer that writes to sys.stdout """
- sys.stdout.write(str(msg)+"\n")
-
-
-def STDERR(msg):
- """ consumer that writes to sys.stderr """
- sys.stderr.write(str(msg)+"\n")
-
-
-class Syslog:
- """ consumer that writes to the syslog daemon """
-
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ self._file.write(str(msg) + "\n")
+ if hasattr(self._file, 'flush'):
+ self._file.flush()
+
+
+class Path(object):
+ """ log consumer that opens and writes to a Path """
+ def __init__(self, filename, append=False,
+ delayed_create=False, buffering=False):
+ self._append = append
+ self._filename = str(filename)
+ self._buffering = buffering
+ if not delayed_create:
+ self._openfile()
+
+ def _openfile(self):
+ mode = self._append and 'a' or 'w'
+ f = open(self._filename, mode)
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ if not hasattr(self, "_file"):
+ self._openfile()
+ self._file.write(str(msg) + "\n")
+ if not self._buffering:
+ self._file.flush()
+
+
+def STDOUT(msg):
+ """ consumer that writes to sys.stdout """
+ sys.stdout.write(str(msg)+"\n")
+
+
+def STDERR(msg):
+ """ consumer that writes to sys.stderr """
+ sys.stderr.write(str(msg)+"\n")
+
+
+class Syslog:
+ """ consumer that writes to the syslog daemon """
+
def __init__(self, priority=None):
- if priority is None:
- priority = self.LOG_INFO
- self.priority = priority
-
- def __call__(self, msg):
- """ write a message to the log """
+ if priority is None:
+ priority = self.LOG_INFO
+ self.priority = priority
+
+ def __call__(self, msg):
+ """ write a message to the log """
import syslog
syslog.syslog(self.priority, str(msg))
-
+
try:
import syslog
diff --git a/contrib/python/py/py/_log/warning.py b/contrib/python/py/py/_log/warning.py
index 4461641501..6ef20d98a2 100644
--- a/contrib/python/py/py/_log/warning.py
+++ b/contrib/python/py/py/_log/warning.py
@@ -1,79 +1,79 @@
-import py, sys
-
-class DeprecationWarning(DeprecationWarning):
- def __init__(self, msg, path, lineno):
- self.msg = msg
- self.path = path
- self.lineno = lineno
- def __repr__(self):
- return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
- def __str__(self):
- return self.msg
-
-def _apiwarn(startversion, msg, stacklevel=2, function=None):
- # below is mostly COPIED from python2.4/warnings.py's def warn()
- # Get context information
- if isinstance(stacklevel, str):
- frame = sys._getframe(1)
- level = 1
- found = frame.f_code.co_filename.find(stacklevel) != -1
- while frame:
- co = frame.f_code
- if co.co_filename.find(stacklevel) == -1:
- if found:
- stacklevel = level
- break
- else:
- found = True
- level += 1
- frame = frame.f_back
- else:
- stacklevel = 1
- msg = "%s (since version %s)" %(msg, startversion)
- warn(msg, stacklevel=stacklevel+1, function=function)
-
+import py, sys
-def warn(msg, stacklevel=1, function=None):
- if function is not None:
+class DeprecationWarning(DeprecationWarning):
+ def __init__(self, msg, path, lineno):
+ self.msg = msg
+ self.path = path
+ self.lineno = lineno
+ def __repr__(self):
+ return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
+ def __str__(self):
+ return self.msg
+
+def _apiwarn(startversion, msg, stacklevel=2, function=None):
+ # below is mostly COPIED from python2.4/warnings.py's def warn()
+ # Get context information
+ if isinstance(stacklevel, str):
+ frame = sys._getframe(1)
+ level = 1
+ found = frame.f_code.co_filename.find(stacklevel) != -1
+ while frame:
+ co = frame.f_code
+ if co.co_filename.find(stacklevel) == -1:
+ if found:
+ stacklevel = level
+ break
+ else:
+ found = True
+ level += 1
+ frame = frame.f_back
+ else:
+ stacklevel = 1
+ msg = "%s (since version %s)" %(msg, startversion)
+ warn(msg, stacklevel=stacklevel+1, function=function)
+
+
+def warn(msg, stacklevel=1, function=None):
+ if function is not None:
import inspect
filename = inspect.getfile(function)
- lineno = py.code.getrawcode(function).co_firstlineno
- else:
- try:
- caller = sys._getframe(stacklevel)
- except ValueError:
- globals = sys.__dict__
- lineno = 1
- else:
- globals = caller.f_globals
- lineno = caller.f_lineno
- if '__name__' in globals:
- module = globals['__name__']
- else:
- module = "<string>"
- filename = globals.get('__file__')
- if filename:
- fnl = filename.lower()
- if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
- filename = filename[:-1]
- elif fnl.endswith("$py.class"):
- filename = filename.replace('$py.class', '.py')
- else:
- if module == "__main__":
- try:
- filename = sys.argv[0]
- except AttributeError:
- # embedded interpreters don't have sys.argv, see bug #839151
- filename = '__main__'
- if not filename:
- filename = module
- path = py.path.local(filename)
- warning = DeprecationWarning(msg, path, lineno)
+ lineno = py.code.getrawcode(function).co_firstlineno
+ else:
+ try:
+ caller = sys._getframe(stacklevel)
+ except ValueError:
+ globals = sys.__dict__
+ lineno = 1
+ else:
+ globals = caller.f_globals
+ lineno = caller.f_lineno
+ if '__name__' in globals:
+ module = globals['__name__']
+ else:
+ module = "<string>"
+ filename = globals.get('__file__')
+ if filename:
+ fnl = filename.lower()
+ if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+ filename = filename[:-1]
+ elif fnl.endswith("$py.class"):
+ filename = filename.replace('$py.class', '.py')
+ else:
+ if module == "__main__":
+ try:
+ filename = sys.argv[0]
+ except AttributeError:
+ # embedded interpreters don't have sys.argv, see bug #839151
+ filename = '__main__'
+ if not filename:
+ filename = module
+ path = py.path.local(filename)
+ warning = DeprecationWarning(msg, path, lineno)
import warnings
warnings.warn_explicit(warning, category=Warning,
- filename=str(warning.path),
- lineno=warning.lineno,
+ filename=str(warning.path),
+ lineno=warning.lineno,
registry=warnings.__dict__.setdefault(
- "__warningsregistry__", {})
- )
-
+ "__warningsregistry__", {})
+ )
+
diff --git a/contrib/python/py/py/_path/__init__.py b/contrib/python/py/py/_path/__init__.py
index 040337cf87..51f3246f80 100644
--- a/contrib/python/py/py/_path/__init__.py
+++ b/contrib/python/py/py/_path/__init__.py
@@ -1 +1 @@
-""" unified file system api """
+""" unified file system api """
diff --git a/contrib/python/py/py/_path/cacheutil.py b/contrib/python/py/py/_path/cacheutil.py
index 8fca4799b8..9922504750 100644
--- a/contrib/python/py/py/_path/cacheutil.py
+++ b/contrib/python/py/py/_path/cacheutil.py
@@ -1,114 +1,114 @@
-"""
-This module contains multithread-safe cache implementations.
-
-All Caches have
-
- getorbuild(key, builder)
- delentry(key)
-
-methods and allow configuration when instantiating the cache class.
-"""
-from time import time as gettime
-
-class BasicCache(object):
- def __init__(self, maxentries=128):
- self.maxentries = maxentries
- self.prunenum = int(maxentries - maxentries/8)
- self._dict = {}
-
- def clear(self):
- self._dict.clear()
-
- def _getentry(self, key):
- return self._dict[key]
-
- def _putentry(self, key, entry):
- self._prunelowestweight()
- self._dict[key] = entry
-
- def delentry(self, key, raising=False):
- try:
- del self._dict[key]
- except KeyError:
- if raising:
- raise
-
- def getorbuild(self, key, builder):
- try:
- entry = self._getentry(key)
- except KeyError:
- entry = self._build(key, builder)
- self._putentry(key, entry)
- return entry.value
-
- def _prunelowestweight(self):
- """ prune out entries with lowest weight. """
- numentries = len(self._dict)
- if numentries >= self.maxentries:
- # evict according to entry's weight
- items = [(entry.weight, key)
- for key, entry in self._dict.items()]
- items.sort()
- index = numentries - self.prunenum
- if index > 0:
- for weight, key in items[:index]:
- # in MT situations the element might be gone
- self.delentry(key, raising=False)
-
-class BuildcostAccessCache(BasicCache):
- """ A BuildTime/Access-counting cache implementation.
- the weight of a value is computed as the product of
-
- num-accesses-of-a-value * time-to-build-the-value
-
- The values with the least such weights are evicted
- if the cache maxentries threshold is superceded.
- For implementation flexibility more than one object
- might be evicted at a time.
- """
- # time function to use for measuring build-times
-
- def _build(self, key, builder):
- start = gettime()
- val = builder()
- end = gettime()
- return WeightedCountingEntry(val, end-start)
-
-
-class WeightedCountingEntry(object):
- def __init__(self, value, oneweight):
- self._value = value
- self.weight = self._oneweight = oneweight
-
- def value(self):
- self.weight += self._oneweight
- return self._value
- value = property(value)
-
-class AgingCache(BasicCache):
- """ This cache prunes out cache entries that are too old.
- """
- def __init__(self, maxentries=128, maxseconds=10.0):
- super(AgingCache, self).__init__(maxentries)
- self.maxseconds = maxseconds
-
- def _getentry(self, key):
- entry = self._dict[key]
- if entry.isexpired():
- self.delentry(key)
- raise KeyError(key)
- return entry
-
- def _build(self, key, builder):
- val = builder()
- entry = AgingEntry(val, gettime() + self.maxseconds)
- return entry
-
-class AgingEntry(object):
- def __init__(self, value, expirationtime):
- self.value = value
- self.weight = expirationtime
-
- def isexpired(self):
- t = gettime()
- return t >= self.weight
+"""
+This module contains multithread-safe cache implementations.
+
+All Caches have
+
+ getorbuild(key, builder)
+ delentry(key)
+
+methods and allow configuration when instantiating the cache class.
+"""
+from time import time as gettime
+
+class BasicCache(object):
+ def __init__(self, maxentries=128):
+ self.maxentries = maxentries
+ self.prunenum = int(maxentries - maxentries/8)
+ self._dict = {}
+
+ def clear(self):
+ self._dict.clear()
+
+ def _getentry(self, key):
+ return self._dict[key]
+
+ def _putentry(self, key, entry):
+ self._prunelowestweight()
+ self._dict[key] = entry
+
+ def delentry(self, key, raising=False):
+ try:
+ del self._dict[key]
+ except KeyError:
+ if raising:
+ raise
+
+ def getorbuild(self, key, builder):
+ try:
+ entry = self._getentry(key)
+ except KeyError:
+ entry = self._build(key, builder)
+ self._putentry(key, entry)
+ return entry.value
+
+ def _prunelowestweight(self):
+ """ prune out entries with lowest weight. """
+ numentries = len(self._dict)
+ if numentries >= self.maxentries:
+ # evict according to entry's weight
+ items = [(entry.weight, key)
+ for key, entry in self._dict.items()]
+ items.sort()
+ index = numentries - self.prunenum
+ if index > 0:
+ for weight, key in items[:index]:
+ # in MT situations the element might be gone
+ self.delentry(key, raising=False)
+
+class BuildcostAccessCache(BasicCache):
+ """ A BuildTime/Access-counting cache implementation.
+ the weight of a value is computed as the product of
+
+ num-accesses-of-a-value * time-to-build-the-value
+
+ The values with the least such weights are evicted
+ if the cache maxentries threshold is superceded.
+ For implementation flexibility more than one object
+ might be evicted at a time.
+ """
+ # time function to use for measuring build-times
+
+ def _build(self, key, builder):
+ start = gettime()
+ val = builder()
+ end = gettime()
+ return WeightedCountingEntry(val, end-start)
+
+
+class WeightedCountingEntry(object):
+ def __init__(self, value, oneweight):
+ self._value = value
+ self.weight = self._oneweight = oneweight
+
+ def value(self):
+ self.weight += self._oneweight
+ return self._value
+ value = property(value)
+
+class AgingCache(BasicCache):
+ """ This cache prunes out cache entries that are too old.
+ """
+ def __init__(self, maxentries=128, maxseconds=10.0):
+ super(AgingCache, self).__init__(maxentries)
+ self.maxseconds = maxseconds
+
+ def _getentry(self, key):
+ entry = self._dict[key]
+ if entry.isexpired():
+ self.delentry(key)
+ raise KeyError(key)
+ return entry
+
+ def _build(self, key, builder):
+ val = builder()
+ entry = AgingEntry(val, gettime() + self.maxseconds)
+ return entry
+
+class AgingEntry(object):
+ def __init__(self, value, expirationtime):
+ self.value = value
+ self.weight = expirationtime
+
+ def isexpired(self):
+ t = gettime()
+ return t >= self.weight
diff --git a/contrib/python/py/py/_path/common.py b/contrib/python/py/py/_path/common.py
index 2b1954d2bd..2364e5fef5 100644
--- a/contrib/python/py/py/_path/common.py
+++ b/contrib/python/py/py/_path/common.py
@@ -1,15 +1,15 @@
-"""
-"""
+"""
+"""
import warnings
import os
import sys
import posixpath
import fnmatch
-import py
-
-# Moved from local.py.
-iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
-
+import py
+
+# Moved from local.py.
+iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
+
try:
# FileNotFoundError might happen in py34, and is not available with py27.
import_errors = (ImportError, FileNotFoundError)
@@ -50,410 +50,410 @@ except ImportError:
raise TypeError("expected str, bytes or os.PathLike object, not "
+ path_type.__name__)
-class Checkers:
- _depend_on_existence = 'exists', 'link', 'dir', 'file'
-
- def __init__(self, path):
- self.path = path
-
- def dir(self):
- raise NotImplementedError
-
- def file(self):
- raise NotImplementedError
-
- def dotfile(self):
- return self.path.basename.startswith('.')
-
- def ext(self, arg):
- if not arg.startswith('.'):
- arg = '.' + arg
- return self.path.ext == arg
-
- def exists(self):
- raise NotImplementedError
-
- def basename(self, arg):
- return self.path.basename == arg
-
- def basestarts(self, arg):
- return self.path.basename.startswith(arg)
-
- def relto(self, arg):
- return self.path.relto(arg)
-
- def fnmatch(self, arg):
- return self.path.fnmatch(arg)
-
- def endswith(self, arg):
- return str(self.path).endswith(arg)
-
- def _evaluate(self, kw):
- for name, value in kw.items():
- invert = False
- meth = None
- try:
- meth = getattr(self, name)
- except AttributeError:
- if name[:3] == 'not':
- invert = True
- try:
- meth = getattr(self, name[3:])
- except AttributeError:
- pass
- if meth is None:
- raise TypeError(
- "no %r checker available for %r" % (name, self.path))
- try:
- if py.code.getrawcode(meth).co_argcount > 1:
- if (not meth(value)) ^ invert:
- return False
- else:
- if bool(value) ^ bool(meth()) ^ invert:
- return False
- except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
- # EBUSY feels not entirely correct,
- # but its kind of necessary since ENOMEDIUM
- # is not accessible in python
- for name in self._depend_on_existence:
- if name in kw:
- if kw.get(name):
- return False
- name = 'not' + name
- if name in kw:
- if not kw.get(name):
- return False
- return True
-
-class NeverRaised(Exception):
- pass
-
-class PathBase(object):
- """ shared implementation for filesystem path objects."""
- Checkers = Checkers
-
- def __div__(self, other):
+class Checkers:
+ _depend_on_existence = 'exists', 'link', 'dir', 'file'
+
+ def __init__(self, path):
+ self.path = path
+
+ def dir(self):
+ raise NotImplementedError
+
+ def file(self):
+ raise NotImplementedError
+
+ def dotfile(self):
+ return self.path.basename.startswith('.')
+
+ def ext(self, arg):
+ if not arg.startswith('.'):
+ arg = '.' + arg
+ return self.path.ext == arg
+
+ def exists(self):
+ raise NotImplementedError
+
+ def basename(self, arg):
+ return self.path.basename == arg
+
+ def basestarts(self, arg):
+ return self.path.basename.startswith(arg)
+
+ def relto(self, arg):
+ return self.path.relto(arg)
+
+ def fnmatch(self, arg):
+ return self.path.fnmatch(arg)
+
+ def endswith(self, arg):
+ return str(self.path).endswith(arg)
+
+ def _evaluate(self, kw):
+ for name, value in kw.items():
+ invert = False
+ meth = None
+ try:
+ meth = getattr(self, name)
+ except AttributeError:
+ if name[:3] == 'not':
+ invert = True
+ try:
+ meth = getattr(self, name[3:])
+ except AttributeError:
+ pass
+ if meth is None:
+ raise TypeError(
+ "no %r checker available for %r" % (name, self.path))
+ try:
+ if py.code.getrawcode(meth).co_argcount > 1:
+ if (not meth(value)) ^ invert:
+ return False
+ else:
+ if bool(value) ^ bool(meth()) ^ invert:
+ return False
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
+ for name in self._depend_on_existence:
+ if name in kw:
+ if kw.get(name):
+ return False
+ name = 'not' + name
+ if name in kw:
+ if not kw.get(name):
+ return False
+ return True
+
+class NeverRaised(Exception):
+ pass
+
+class PathBase(object):
+ """ shared implementation for filesystem path objects."""
+ Checkers = Checkers
+
+ def __div__(self, other):
return self.join(fspath(other))
- __truediv__ = __div__ # py3k
-
- def basename(self):
- """ basename part of path. """
- return self._getbyspec('basename')[0]
- basename = property(basename, None, None, basename.__doc__)
-
- def dirname(self):
- """ dirname part of path. """
- return self._getbyspec('dirname')[0]
- dirname = property(dirname, None, None, dirname.__doc__)
-
- def purebasename(self):
- """ pure base name of the path."""
- return self._getbyspec('purebasename')[0]
- purebasename = property(purebasename, None, None, purebasename.__doc__)
-
- def ext(self):
- """ extension of the path (including the '.')."""
- return self._getbyspec('ext')[0]
- ext = property(ext, None, None, ext.__doc__)
-
- def dirpath(self, *args, **kwargs):
- """ return the directory path joined with any given path arguments. """
- return self.new(basename='').join(*args, **kwargs)
-
- def read_binary(self):
- """ read and return a bytestring from reading the path. """
- with self.open('rb') as f:
- return f.read()
-
- def read_text(self, encoding):
- """ read and return a Unicode string from reading the path. """
- with self.open("r", encoding=encoding) as f:
- return f.read()
-
-
- def read(self, mode='r'):
- """ read and return a bytestring from reading the path. """
- with self.open(mode) as f:
- return f.read()
-
- def readlines(self, cr=1):
- """ read and return a list of lines from the path. if cr is False, the
-newline will be removed from the end of each line. """
+ __truediv__ = __div__ # py3k
+
+ def basename(self):
+ """ basename part of path. """
+ return self._getbyspec('basename')[0]
+ basename = property(basename, None, None, basename.__doc__)
+
+ def dirname(self):
+ """ dirname part of path. """
+ return self._getbyspec('dirname')[0]
+ dirname = property(dirname, None, None, dirname.__doc__)
+
+ def purebasename(self):
+ """ pure base name of the path."""
+ return self._getbyspec('purebasename')[0]
+ purebasename = property(purebasename, None, None, purebasename.__doc__)
+
+ def ext(self):
+ """ extension of the path (including the '.')."""
+ return self._getbyspec('ext')[0]
+ ext = property(ext, None, None, ext.__doc__)
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ return self.new(basename='').join(*args, **kwargs)
+
+ def read_binary(self):
+ """ read and return a bytestring from reading the path. """
+ with self.open('rb') as f:
+ return f.read()
+
+ def read_text(self, encoding):
+ """ read and return a Unicode string from reading the path. """
+ with self.open("r", encoding=encoding) as f:
+ return f.read()
+
+
+ def read(self, mode='r'):
+ """ read and return a bytestring from reading the path. """
+ with self.open(mode) as f:
+ return f.read()
+
+ def readlines(self, cr=1):
+ """ read and return a list of lines from the path. if cr is False, the
+newline will be removed from the end of each line. """
if sys.version_info < (3, ):
mode = 'rU'
else: # python 3 deprecates mode "U" in favor of "newline" option
mode = 'r'
- if not cr:
+ if not cr:
content = self.read(mode)
- return content.split('\n')
- else:
+ return content.split('\n')
+ else:
f = self.open(mode)
- try:
- return f.readlines()
- finally:
- f.close()
-
- def load(self):
- """ (deprecated) return object unpickled from self.read() """
- f = self.open('rb')
- try:
+ try:
+ return f.readlines()
+ finally:
+ f.close()
+
+ def load(self):
+ """ (deprecated) return object unpickled from self.read() """
+ f = self.open('rb')
+ try:
import pickle
return py.error.checked_call(pickle.load, f)
- finally:
- f.close()
-
- def move(self, target):
- """ move this path to target. """
- if target.relto(self):
+ finally:
+ f.close()
+
+ def move(self, target):
+ """ move this path to target. """
+ if target.relto(self):
raise py.error.EINVAL(
target,
- "cannot move path into a subdirectory of itself")
- try:
- self.rename(target)
- except py.error.EXDEV: # invalid cross-device link
- self.copy(target)
- self.remove()
-
- def __repr__(self):
- """ return a string representation of this path. """
- return repr(str(self))
-
- def check(self, **kw):
- """ check a path for existence and properties.
-
- Without arguments, return True if the path exists, otherwise False.
-
- valid checkers::
-
- file=1 # is a file
- file=0 # is not a file (may not even exist)
- dir=1 # is a dir
- link=1 # is a link
- exists=1 # exists
-
- You can specify multiple checker definitions, for example::
-
- path.check(file=1, link=1) # a link pointing to a file
- """
- if not kw:
+ "cannot move path into a subdirectory of itself")
+ try:
+ self.rename(target)
+ except py.error.EXDEV: # invalid cross-device link
+ self.copy(target)
+ self.remove()
+
+ def __repr__(self):
+ """ return a string representation of this path. """
+ return repr(str(self))
+
+ def check(self, **kw):
+ """ check a path for existence and properties.
+
+ Without arguments, return True if the path exists, otherwise False.
+
+ valid checkers::
+
+ file=1 # is a file
+ file=0 # is not a file (may not even exist)
+ dir=1 # is a dir
+ link=1 # is a link
+ exists=1 # exists
+
+ You can specify multiple checker definitions, for example::
+
+ path.check(file=1, link=1) # a link pointing to a file
+ """
+ if not kw:
kw = {'exists': 1}
- return self.Checkers(self)._evaluate(kw)
-
- def fnmatch(self, pattern):
- """return true if the basename/fullname matches the glob-'pattern'.
-
- valid pattern characters::
-
- * matches everything
- ? matches any single character
- [seq] matches any character in seq
- [!seq] matches any char not in seq
-
- If the pattern contains a path-separator then the full path
- is used for pattern matching and a '*' is prepended to the
- pattern.
-
- if the pattern doesn't contain a path-separator the pattern
- is only matched against the basename.
- """
- return FNMatcher(pattern)(self)
-
- def relto(self, relpath):
- """ return a string which is the relative part of the path
- to the given 'relpath'.
- """
- if not isinstance(relpath, (str, PathBase)):
- raise TypeError("%r: not a string or path object" %(relpath,))
- strrelpath = str(relpath)
- if strrelpath and strrelpath[-1] != self.sep:
- strrelpath += self.sep
- #assert strrelpath[-1] == self.sep
- #assert strrelpath[-2] != self.sep
+ return self.Checkers(self)._evaluate(kw)
+
+ def fnmatch(self, pattern):
+ """return true if the basename/fullname matches the glob-'pattern'.
+
+ valid pattern characters::
+
+ * matches everything
+ ? matches any single character
+ [seq] matches any character in seq
+ [!seq] matches any char not in seq
+
+ If the pattern contains a path-separator then the full path
+ is used for pattern matching and a '*' is prepended to the
+ pattern.
+
+ if the pattern doesn't contain a path-separator the pattern
+ is only matched against the basename.
+ """
+ return FNMatcher(pattern)(self)
+
+ def relto(self, relpath):
+ """ return a string which is the relative part of the path
+ to the given 'relpath'.
+ """
+ if not isinstance(relpath, (str, PathBase)):
+ raise TypeError("%r: not a string or path object" %(relpath,))
+ strrelpath = str(relpath)
+ if strrelpath and strrelpath[-1] != self.sep:
+ strrelpath += self.sep
+ #assert strrelpath[-1] == self.sep
+ #assert strrelpath[-2] != self.sep
strself = self.strpath
- if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
- if os.path.normcase(strself).startswith(
- os.path.normcase(strrelpath)):
- return strself[len(strrelpath):]
- elif strself.startswith(strrelpath):
- return strself[len(strrelpath):]
- return ""
-
- def ensure_dir(self, *args):
- """ ensure the path joined with args is a directory. """
- return self.ensure(*args, **{"dir": True})
-
- def bestrelpath(self, dest):
- """ return a string which is a relative path from self
- (assumed to be a directory) to dest such that
- self.join(bestrelpath) == dest and if not such
- path can be determined return dest.
- """
- try:
- if self == dest:
- return os.curdir
- base = self.common(dest)
- if not base: # can be the case on windows
- return str(dest)
- self2base = self.relto(base)
- reldest = dest.relto(base)
- if self2base:
- n = self2base.count(self.sep) + 1
- else:
- n = 0
- l = [os.pardir] * n
- if reldest:
- l.append(reldest)
- target = dest.sep.join(l)
- return target
- except AttributeError:
- return str(dest)
-
- def exists(self):
- return self.check()
-
- def isdir(self):
- return self.check(dir=1)
-
- def isfile(self):
- return self.check(file=1)
-
- def parts(self, reverse=False):
- """ return a root-first list of all ancestor directories
- plus the path itself.
- """
- current = self
- l = [self]
- while 1:
- last = current
- current = current.dirpath()
- if last == current:
- break
- l.append(current)
- if not reverse:
- l.reverse()
- return l
-
- def common(self, other):
- """ return the common part shared with the other path
- or None if there is no common part.
- """
- last = None
- for x, y in zip(self.parts(), other.parts()):
- if x != y:
- return last
- last = x
- return last
-
- def __add__(self, other):
- """ return new path object with 'other' added to the basename"""
- return self.new(basename=self.basename+str(other))
-
- def __cmp__(self, other):
- """ return sort value (-1, 0, +1). """
- try:
- return cmp(self.strpath, other.strpath)
- except AttributeError:
- return cmp(str(self), str(other)) # self.path, other.path)
-
- def __lt__(self, other):
- try:
- return self.strpath < other.strpath
- except AttributeError:
- return str(self) < str(other)
-
- def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
- """ yields all paths below the current one
-
- fil is a filter (glob pattern or callable), if not matching the
- path will not be yielded, defaulting to None (everything is
- returned)
-
- rec is a filter (glob pattern or callable) that controls whether
- a node is descended, defaulting to None
-
- ignore is an Exception class that is ignoredwhen calling dirlist()
- on any of the paths (by default, all exceptions are reported)
-
- bf if True will cause a breadthfirst search instead of the
- default depthfirst. Default: False
-
- sort if True will sort entries within each directory level.
- """
- for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
- yield x
-
- def _sortlist(self, res, sort):
- if sort:
- if hasattr(sort, '__call__'):
+ if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
+ if os.path.normcase(strself).startswith(
+ os.path.normcase(strrelpath)):
+ return strself[len(strrelpath):]
+ elif strself.startswith(strrelpath):
+ return strself[len(strrelpath):]
+ return ""
+
+ def ensure_dir(self, *args):
+ """ ensure the path joined with args is a directory. """
+ return self.ensure(*args, **{"dir": True})
+
+ def bestrelpath(self, dest):
+ """ return a string which is a relative path from self
+ (assumed to be a directory) to dest such that
+ self.join(bestrelpath) == dest and if not such
+ path can be determined return dest.
+ """
+ try:
+ if self == dest:
+ return os.curdir
+ base = self.common(dest)
+ if not base: # can be the case on windows
+ return str(dest)
+ self2base = self.relto(base)
+ reldest = dest.relto(base)
+ if self2base:
+ n = self2base.count(self.sep) + 1
+ else:
+ n = 0
+ l = [os.pardir] * n
+ if reldest:
+ l.append(reldest)
+ target = dest.sep.join(l)
+ return target
+ except AttributeError:
+ return str(dest)
+
+ def exists(self):
+ return self.check()
+
+ def isdir(self):
+ return self.check(dir=1)
+
+ def isfile(self):
+ return self.check(file=1)
+
+ def parts(self, reverse=False):
+ """ return a root-first list of all ancestor directories
+ plus the path itself.
+ """
+ current = self
+ l = [self]
+ while 1:
+ last = current
+ current = current.dirpath()
+ if last == current:
+ break
+ l.append(current)
+ if not reverse:
+ l.reverse()
+ return l
+
+ def common(self, other):
+ """ return the common part shared with the other path
+ or None if there is no common part.
+ """
+ last = None
+ for x, y in zip(self.parts(), other.parts()):
+ if x != y:
+ return last
+ last = x
+ return last
+
+ def __add__(self, other):
+ """ return new path object with 'other' added to the basename"""
+ return self.new(basename=self.basename+str(other))
+
+ def __cmp__(self, other):
+ """ return sort value (-1, 0, +1). """
+ try:
+ return cmp(self.strpath, other.strpath)
+ except AttributeError:
+ return cmp(str(self), str(other)) # self.path, other.path)
+
+ def __lt__(self, other):
+ try:
+ return self.strpath < other.strpath
+ except AttributeError:
+ return str(self) < str(other)
+
+ def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
+ """ yields all paths below the current one
+
+ fil is a filter (glob pattern or callable), if not matching the
+ path will not be yielded, defaulting to None (everything is
+ returned)
+
+ rec is a filter (glob pattern or callable) that controls whether
+ a node is descended, defaulting to None
+
+ ignore is an Exception class that is ignoredwhen calling dirlist()
+ on any of the paths (by default, all exceptions are reported)
+
+ bf if True will cause a breadthfirst search instead of the
+ default depthfirst. Default: False
+
+ sort if True will sort entries within each directory level.
+ """
+ for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
+ yield x
+
+ def _sortlist(self, res, sort):
+ if sort:
+ if hasattr(sort, '__call__'):
warnings.warn(DeprecationWarning(
"listdir(sort=callable) is deprecated and breaks on python3"
), stacklevel=3)
- res.sort(sort)
- else:
- res.sort()
-
- def samefile(self, other):
- """ return True if other refers to the same stat object as self. """
- return self.strpath == str(other)
-
+ res.sort(sort)
+ else:
+ res.sort()
+
+ def samefile(self, other):
+ """ return True if other refers to the same stat object as self. """
+ return self.strpath == str(other)
+
def __fspath__(self):
return self.strpath
-class Visitor:
- def __init__(self, fil, rec, ignore, bf, sort):
+class Visitor:
+ def __init__(self, fil, rec, ignore, bf, sort):
if isinstance(fil, py.builtin._basestring):
- fil = FNMatcher(fil)
+ fil = FNMatcher(fil)
if isinstance(rec, py.builtin._basestring):
- self.rec = FNMatcher(rec)
- elif not hasattr(rec, '__call__') and rec:
- self.rec = lambda path: True
- else:
- self.rec = rec
- self.fil = fil
- self.ignore = ignore
- self.breadthfirst = bf
- self.optsort = sort and sorted or (lambda x: x)
-
- def gen(self, path):
- try:
- entries = path.listdir()
- except self.ignore:
- return
- rec = self.rec
- dirs = self.optsort([p for p in entries
- if p.check(dir=1) and (rec is None or rec(p))])
- if not self.breadthfirst:
- for subdir in dirs:
- for p in self.gen(subdir):
- yield p
- for p in self.optsort(entries):
- if self.fil is None or self.fil(p):
- yield p
- if self.breadthfirst:
- for subdir in dirs:
- for p in self.gen(subdir):
- yield p
-
-class FNMatcher:
- def __init__(self, pattern):
- self.pattern = pattern
-
- def __call__(self, path):
- pattern = self.pattern
-
- if (pattern.find(path.sep) == -1 and
- iswin32 and
- pattern.find(posixpath.sep) != -1):
- # Running on Windows, the pattern has no Windows path separators,
- # and the pattern has one or more Posix path separators. Replace
- # the Posix path separators with the Windows path separator.
- pattern = pattern.replace(posixpath.sep, path.sep)
-
- if pattern.find(path.sep) == -1:
- name = path.basename
- else:
- name = str(path) # path.strpath # XXX svn?
- if not os.path.isabs(pattern):
- pattern = '*' + path.sep + pattern
+ self.rec = FNMatcher(rec)
+ elif not hasattr(rec, '__call__') and rec:
+ self.rec = lambda path: True
+ else:
+ self.rec = rec
+ self.fil = fil
+ self.ignore = ignore
+ self.breadthfirst = bf
+ self.optsort = sort and sorted or (lambda x: x)
+
+ def gen(self, path):
+ try:
+ entries = path.listdir()
+ except self.ignore:
+ return
+ rec = self.rec
+ dirs = self.optsort([p for p in entries
+ if p.check(dir=1) and (rec is None or rec(p))])
+ if not self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+ for p in self.optsort(entries):
+ if self.fil is None or self.fil(p):
+ yield p
+ if self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+
+class FNMatcher:
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def __call__(self, path):
+ pattern = self.pattern
+
+ if (pattern.find(path.sep) == -1 and
+ iswin32 and
+ pattern.find(posixpath.sep) != -1):
+ # Running on Windows, the pattern has no Windows path separators,
+ # and the pattern has one or more Posix path separators. Replace
+ # the Posix path separators with the Windows path separator.
+ pattern = pattern.replace(posixpath.sep, path.sep)
+
+ if pattern.find(path.sep) == -1:
+ name = path.basename
+ else:
+ name = str(path) # path.strpath # XXX svn?
+ if not os.path.isabs(pattern):
+ pattern = '*' + path.sep + pattern
return fnmatch.fnmatch(name, pattern)
diff --git a/contrib/python/py/py/_path/local.py b/contrib/python/py/py/_path/local.py
index fe5edc61c9..1385a03987 100644
--- a/contrib/python/py/py/_path/local.py
+++ b/contrib/python/py/py/_path/local.py
@@ -1,423 +1,423 @@
-"""
-local path implementation.
-"""
-from __future__ import with_statement
-
-from contextlib import contextmanager
+"""
+local path implementation.
+"""
+from __future__ import with_statement
+
+from contextlib import contextmanager
import sys, os, atexit, io, uuid
-import py
-from py._path import common
+import py
+from py._path import common
from py._path.common import iswin32, fspath
-from stat import S_ISLNK, S_ISDIR, S_ISREG
-
-from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
-
-if sys.version_info > (3,0):
- def map_as_list(func, iter):
- return list(map(func, iter))
-else:
- map_as_list = map
-
+from stat import S_ISLNK, S_ISDIR, S_ISREG
+
+from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
+
+if sys.version_info > (3,0):
+ def map_as_list(func, iter):
+ return list(map(func, iter))
+else:
+ map_as_list = map
+
ALLOW_IMPORTLIB_MODE = sys.version_info > (3,5)
if ALLOW_IMPORTLIB_MODE:
import importlib
-class Stat(object):
- def __getattr__(self, name):
- return getattr(self._osstatresult, "st_" + name)
-
- def __init__(self, path, osstatresult):
- self.path = path
- self._osstatresult = osstatresult
-
- @property
- def owner(self):
- if iswin32:
- raise NotImplementedError("XXX win32")
- import pwd
- entry = py.error.checked_call(pwd.getpwuid, self.uid)
- return entry[0]
-
- @property
- def group(self):
- """ return group name of file. """
- if iswin32:
- raise NotImplementedError("XXX win32")
- import grp
- entry = py.error.checked_call(grp.getgrgid, self.gid)
- return entry[0]
-
- def isdir(self):
- return S_ISDIR(self._osstatresult.st_mode)
-
- def isfile(self):
- return S_ISREG(self._osstatresult.st_mode)
-
- def islink(self):
- st = self.path.lstat()
- return S_ISLNK(self._osstatresult.st_mode)
-
-class PosixPath(common.PathBase):
- def chown(self, user, group, rec=0):
- """ change ownership to the given user and group.
- user and group may be specified by a number or
- by a name. if rec is True change ownership
- recursively.
- """
- uid = getuserid(user)
- gid = getgroupid(group)
- if rec:
- for x in self.visit(rec=lambda x: x.check(link=0)):
- if x.check(link=0):
- py.error.checked_call(os.chown, str(x), uid, gid)
- py.error.checked_call(os.chown, str(self), uid, gid)
-
- def readlink(self):
- """ return value of a symbolic link. """
- return py.error.checked_call(os.readlink, self.strpath)
-
- def mklinkto(self, oldname):
- """ posix style hard link to another name. """
- py.error.checked_call(os.link, str(oldname), str(self))
-
- def mksymlinkto(self, value, absolute=1):
- """ create a symbolic link with the given value (pointing to another name). """
- if absolute:
- py.error.checked_call(os.symlink, str(value), self.strpath)
- else:
- base = self.common(value)
- # with posix local paths '/' is always a common base
- relsource = self.__class__(value).relto(base)
- reldest = self.relto(base)
- n = reldest.count(self.sep)
- target = self.sep.join(('..', )*n + (relsource, ))
- py.error.checked_call(os.symlink, target, self.strpath)
-
-def getuserid(user):
- import pwd
- if not isinstance(user, int):
- user = pwd.getpwnam(user)[2]
- return user
-
-def getgroupid(group):
- import grp
- if not isinstance(group, int):
- group = grp.getgrnam(group)[2]
- return group
-
-FSBase = not iswin32 and PosixPath or common.PathBase
-
-class LocalPath(FSBase):
- """ object oriented interface to os.path and other local filesystem
- related information.
- """
- class ImportMismatchError(ImportError):
- """ raised on pyimport() if there is a mismatch of __file__'s"""
-
- sep = os.sep
- class Checkers(common.Checkers):
- def _stat(self):
- try:
- return self._statcache
- except AttributeError:
- try:
- self._statcache = self.path.stat()
- except py.error.ELOOP:
- self._statcache = self.path.lstat()
- return self._statcache
-
- def dir(self):
- return S_ISDIR(self._stat().mode)
-
- def file(self):
- return S_ISREG(self._stat().mode)
-
- def exists(self):
- return self._stat()
-
- def link(self):
- st = self.path.lstat()
- return S_ISLNK(st.mode)
-
- def __init__(self, path=None, expanduser=False):
- """ Initialize and return a local Path instance.
-
- Path can be relative to the current directory.
- If path is None it defaults to the current working directory.
- If expanduser is True, tilde-expansion is performed.
- Note that Path instances always carry an absolute path.
- Note also that passing in a local path object will simply return
- the exact same path object. Use new() to get a new copy.
- """
- if path is None:
- self.strpath = py.error.checked_call(os.getcwd)
+class Stat(object):
+ def __getattr__(self, name):
+ return getattr(self._osstatresult, "st_" + name)
+
+ def __init__(self, path, osstatresult):
+ self.path = path
+ self._osstatresult = osstatresult
+
+ @property
+ def owner(self):
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import pwd
+ entry = py.error.checked_call(pwd.getpwuid, self.uid)
+ return entry[0]
+
+ @property
+ def group(self):
+ """ return group name of file. """
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import grp
+ entry = py.error.checked_call(grp.getgrgid, self.gid)
+ return entry[0]
+
+ def isdir(self):
+ return S_ISDIR(self._osstatresult.st_mode)
+
+ def isfile(self):
+ return S_ISREG(self._osstatresult.st_mode)
+
+ def islink(self):
+ st = self.path.lstat()
+ return S_ISLNK(self._osstatresult.st_mode)
+
+class PosixPath(common.PathBase):
+ def chown(self, user, group, rec=0):
+ """ change ownership to the given user and group.
+ user and group may be specified by a number or
+ by a name. if rec is True change ownership
+ recursively.
+ """
+ uid = getuserid(user)
+ gid = getgroupid(group)
+ if rec:
+ for x in self.visit(rec=lambda x: x.check(link=0)):
+ if x.check(link=0):
+ py.error.checked_call(os.chown, str(x), uid, gid)
+ py.error.checked_call(os.chown, str(self), uid, gid)
+
+ def readlink(self):
+ """ return value of a symbolic link. """
+ return py.error.checked_call(os.readlink, self.strpath)
+
+ def mklinkto(self, oldname):
+ """ posix style hard link to another name. """
+ py.error.checked_call(os.link, str(oldname), str(self))
+
+ def mksymlinkto(self, value, absolute=1):
+ """ create a symbolic link with the given value (pointing to another name). """
+ if absolute:
+ py.error.checked_call(os.symlink, str(value), self.strpath)
+ else:
+ base = self.common(value)
+ # with posix local paths '/' is always a common base
+ relsource = self.__class__(value).relto(base)
+ reldest = self.relto(base)
+ n = reldest.count(self.sep)
+ target = self.sep.join(('..', )*n + (relsource, ))
+ py.error.checked_call(os.symlink, target, self.strpath)
+
+def getuserid(user):
+ import pwd
+ if not isinstance(user, int):
+ user = pwd.getpwnam(user)[2]
+ return user
+
+def getgroupid(group):
+ import grp
+ if not isinstance(group, int):
+ group = grp.getgrnam(group)[2]
+ return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+ """ object oriented interface to os.path and other local filesystem
+ related information.
+ """
+ class ImportMismatchError(ImportError):
+ """ raised on pyimport() if there is a mismatch of __file__'s"""
+
+ sep = os.sep
+ class Checkers(common.Checkers):
+ def _stat(self):
+ try:
+ return self._statcache
+ except AttributeError:
+ try:
+ self._statcache = self.path.stat()
+ except py.error.ELOOP:
+ self._statcache = self.path.lstat()
+ return self._statcache
+
+ def dir(self):
+ return S_ISDIR(self._stat().mode)
+
+ def file(self):
+ return S_ISREG(self._stat().mode)
+
+ def exists(self):
+ return self._stat()
+
+ def link(self):
+ st = self.path.lstat()
+ return S_ISLNK(st.mode)
+
+ def __init__(self, path=None, expanduser=False):
+ """ Initialize and return a local Path instance.
+
+ Path can be relative to the current directory.
+ If path is None it defaults to the current working directory.
+ If expanduser is True, tilde-expansion is performed.
+ Note that Path instances always carry an absolute path.
+ Note also that passing in a local path object will simply return
+ the exact same path object. Use new() to get a new copy.
+ """
+ if path is None:
+ self.strpath = py.error.checked_call(os.getcwd)
else:
try:
path = fspath(path)
except TypeError:
raise ValueError("can only pass None, Path instances "
"or non-empty strings to LocalPath")
- if expanduser:
- path = os.path.expanduser(path)
- self.strpath = abspath(path)
-
- def __hash__(self):
+ if expanduser:
+ path = os.path.expanduser(path)
+ self.strpath = abspath(path)
+
+ def __hash__(self):
s = self.strpath
if iswin32:
s = s.lower()
return hash(s)
-
- def __eq__(self, other):
+
+ def __eq__(self, other):
s1 = fspath(self)
try:
s2 = fspath(other)
except TypeError:
return False
- if iswin32:
- s1 = s1.lower()
- try:
- s2 = s2.lower()
- except AttributeError:
- return False
- return s1 == s2
-
- def __ne__(self, other):
- return not (self == other)
-
- def __lt__(self, other):
+ if iswin32:
+ s1 = s1.lower()
+ try:
+ s2 = s2.lower()
+ except AttributeError:
+ return False
+ return s1 == s2
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __lt__(self, other):
return fspath(self) < fspath(other)
-
- def __gt__(self, other):
+
+ def __gt__(self, other):
return fspath(self) > fspath(other)
-
- def samefile(self, other):
- """ return True if 'other' references the same file as 'self'.
- """
+
+ def samefile(self, other):
+ """ return True if 'other' references the same file as 'self'.
+ """
other = fspath(other)
- if not isabs(other):
- other = abspath(other)
- if self == other:
- return True
+ if not isabs(other):
+ other = abspath(other)
+ if self == other:
+ return True
if not hasattr(os.path, "samefile"):
return False
- return py.error.checked_call(
- os.path.samefile, self.strpath, other)
-
- def remove(self, rec=1, ignore_errors=False):
- """ remove a file or directory (or a directory tree if rec=1).
- if ignore_errors is True, errors while removing directories will
- be ignored.
- """
- if self.check(dir=1, link=0):
- if rec:
- # force remove of readonly files on windows
- if iswin32:
+ return py.error.checked_call(
+ os.path.samefile, self.strpath, other)
+
+ def remove(self, rec=1, ignore_errors=False):
+ """ remove a file or directory (or a directory tree if rec=1).
+ if ignore_errors is True, errors while removing directories will
+ be ignored.
+ """
+ if self.check(dir=1, link=0):
+ if rec:
+ # force remove of readonly files on windows
+ if iswin32:
self.chmod(0o700, rec=1)
import shutil
py.error.checked_call(
shutil.rmtree, self.strpath,
- ignore_errors=ignore_errors)
- else:
- py.error.checked_call(os.rmdir, self.strpath)
- else:
- if iswin32:
+ ignore_errors=ignore_errors)
+ else:
+ py.error.checked_call(os.rmdir, self.strpath)
+ else:
+ if iswin32:
self.chmod(0o700)
- py.error.checked_call(os.remove, self.strpath)
-
- def computehash(self, hashtype="md5", chunksize=524288):
- """ return hexdigest of hashvalue for this file. """
- try:
- try:
- import hashlib as mod
- except ImportError:
- if hashtype == "sha1":
- hashtype = "sha"
- mod = __import__(hashtype)
- hash = getattr(mod, hashtype)()
- except (AttributeError, ImportError):
- raise ValueError("Don't know how to compute %r hash" %(hashtype,))
- f = self.open('rb')
- try:
- while 1:
- buf = f.read(chunksize)
- if not buf:
- return hash.hexdigest()
- hash.update(buf)
- finally:
- f.close()
-
- def new(self, **kw):
- """ create a modified version of this path.
- the following keyword arguments modify various path parts::
-
- a:/some/path/to/a/file.ext
- xx drive
- xxxxxxxxxxxxxxxxx dirname
- xxxxxxxx basename
- xxxx purebasename
- xxx ext
- """
- obj = object.__new__(self.__class__)
- if not kw:
- obj.strpath = self.strpath
- return obj
- drive, dirname, basename, purebasename,ext = self._getbyspec(
- "drive,dirname,basename,purebasename,ext")
- if 'basename' in kw:
- if 'purebasename' in kw or 'ext' in kw:
- raise ValueError("invalid specification %r" % kw)
- else:
- pb = kw.setdefault('purebasename', purebasename)
- try:
- ext = kw['ext']
- except KeyError:
- pass
- else:
- if ext and not ext.startswith('.'):
- ext = '.' + ext
- kw['basename'] = pb + ext
-
- if ('dirname' in kw and not kw['dirname']):
- kw['dirname'] = drive
- else:
- kw.setdefault('dirname', dirname)
- kw.setdefault('sep', self.sep)
- obj.strpath = normpath(
- "%(dirname)s%(sep)s%(basename)s" % kw)
- return obj
-
- def _getbyspec(self, spec):
- """ see new for what 'spec' can be. """
- res = []
- parts = self.strpath.split(self.sep)
-
- args = filter(None, spec.split(',') )
- append = res.append
- for name in args:
- if name == 'drive':
- append(parts[0])
- elif name == 'dirname':
- append(self.sep.join(parts[:-1]))
- else:
- basename = parts[-1]
- if name == 'basename':
- append(basename)
- else:
- i = basename.rfind('.')
- if i == -1:
- purebasename, ext = basename, ''
- else:
- purebasename, ext = basename[:i], basename[i:]
- if name == 'purebasename':
- append(purebasename)
- elif name == 'ext':
- append(ext)
- else:
- raise ValueError("invalid part specification %r" % name)
- return res
-
- def dirpath(self, *args, **kwargs):
- """ return the directory path joined with any given path arguments. """
- if not kwargs:
- path = object.__new__(self.__class__)
- path.strpath = dirname(self.strpath)
- if args:
- path = path.join(*args)
- return path
- return super(LocalPath, self).dirpath(*args, **kwargs)
-
- def join(self, *args, **kwargs):
- """ return a new path by appending all 'args' as path
- components. if abs=1 is used restart from root if any
- of the args is an absolute path.
- """
- sep = self.sep
+ py.error.checked_call(os.remove, self.strpath)
+
+ def computehash(self, hashtype="md5", chunksize=524288):
+ """ return hexdigest of hashvalue for this file. """
+ try:
+ try:
+ import hashlib as mod
+ except ImportError:
+ if hashtype == "sha1":
+ hashtype = "sha"
+ mod = __import__(hashtype)
+ hash = getattr(mod, hashtype)()
+ except (AttributeError, ImportError):
+ raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+ f = self.open('rb')
+ try:
+ while 1:
+ buf = f.read(chunksize)
+ if not buf:
+ return hash.hexdigest()
+ hash.update(buf)
+ finally:
+ f.close()
+
+ def new(self, **kw):
+ """ create a modified version of this path.
+ the following keyword arguments modify various path parts::
+
+ a:/some/path/to/a/file.ext
+ xx drive
+ xxxxxxxxxxxxxxxxx dirname
+ xxxxxxxx basename
+ xxxx purebasename
+ xxx ext
+ """
+ obj = object.__new__(self.__class__)
+ if not kw:
+ obj.strpath = self.strpath
+ return obj
+ drive, dirname, basename, purebasename,ext = self._getbyspec(
+ "drive,dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ try:
+ ext = kw['ext']
+ except KeyError:
+ pass
+ else:
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ if ('dirname' in kw and not kw['dirname']):
+ kw['dirname'] = drive
+ else:
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ obj.strpath = normpath(
+ "%(dirname)s%(sep)s%(basename)s" % kw)
+ return obj
+
+ def _getbyspec(self, spec):
+ """ see new for what 'spec' can be. """
+ res = []
+ parts = self.strpath.split(self.sep)
+
+ args = filter(None, spec.split(',') )
+ append = res.append
+ for name in args:
+ if name == 'drive':
+ append(parts[0])
+ elif name == 'dirname':
+ append(self.sep.join(parts[:-1]))
+ else:
+ basename = parts[-1]
+ if name == 'basename':
+ append(basename)
+ else:
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ append(purebasename)
+ elif name == 'ext':
+ append(ext)
+ else:
+ raise ValueError("invalid part specification %r" % name)
+ return res
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ if not kwargs:
+ path = object.__new__(self.__class__)
+ path.strpath = dirname(self.strpath)
+ if args:
+ path = path.join(*args)
+ return path
+ return super(LocalPath, self).dirpath(*args, **kwargs)
+
+ def join(self, *args, **kwargs):
+ """ return a new path by appending all 'args' as path
+ components. if abs=1 is used restart from root if any
+ of the args is an absolute path.
+ """
+ sep = self.sep
strargs = [fspath(arg) for arg in args]
- strpath = self.strpath
- if kwargs.get('abs'):
- newargs = []
- for arg in reversed(strargs):
- if isabs(arg):
- strpath = arg
- strargs = newargs
- break
- newargs.insert(0, arg)
+ strpath = self.strpath
+ if kwargs.get('abs'):
+ newargs = []
+ for arg in reversed(strargs):
+ if isabs(arg):
+ strpath = arg
+ strargs = newargs
+ break
+ newargs.insert(0, arg)
# special case for when we have e.g. strpath == "/"
actual_sep = "" if strpath.endswith(sep) else sep
- for arg in strargs:
- arg = arg.strip(sep)
- if iswin32:
- # allow unix style paths even on windows.
- arg = arg.strip('/')
- arg = arg.replace('/', sep)
+ for arg in strargs:
+ arg = arg.strip(sep)
+ if iswin32:
+ # allow unix style paths even on windows.
+ arg = arg.strip('/')
+ arg = arg.replace('/', sep)
strpath = strpath + actual_sep + arg
actual_sep = sep
- obj = object.__new__(self.__class__)
- obj.strpath = normpath(strpath)
- return obj
-
- def open(self, mode='r', ensure=False, encoding=None):
- """ return an opened file with the given mode.
-
- If ensure is True, create parent directories if needed.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- if encoding:
- return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
- return py.error.checked_call(open, self.strpath, mode)
-
- def _fastjoin(self, name):
- child = object.__new__(self.__class__)
- child.strpath = self.strpath + self.sep + name
- return child
-
- def islink(self):
- return islink(self.strpath)
-
- def check(self, **kw):
- if not kw:
- return exists(self.strpath)
- if len(kw) == 1:
- if "dir" in kw:
- return not kw["dir"] ^ isdir(self.strpath)
- if "file" in kw:
- return not kw["file"] ^ isfile(self.strpath)
- return super(LocalPath, self).check(**kw)
-
- _patternchars = set("*?[" + os.path.sep)
- def listdir(self, fil=None, sort=None):
- """ list directory contents, possibly filter by the given fil func
- and possibly sorted.
- """
- if fil is None and sort is None:
- names = py.error.checked_call(os.listdir, self.strpath)
- return map_as_list(self._fastjoin, names)
- if isinstance(fil, py.builtin._basestring):
- if not self._patternchars.intersection(fil):
- child = self._fastjoin(fil)
- if exists(child.strpath):
- return [child]
- return []
- fil = common.FNMatcher(fil)
- names = py.error.checked_call(os.listdir, self.strpath)
- res = []
- for name in names:
- child = self._fastjoin(name)
- if fil is None or fil(child):
- res.append(child)
- self._sortlist(res, sort)
- return res
-
- def size(self):
- """ return size of the underlying file object """
- return self.stat().size
-
- def mtime(self):
- """ return last modification time of the path. """
- return self.stat().mtime
-
+ obj = object.__new__(self.__class__)
+ obj.strpath = normpath(strpath)
+ return obj
+
+ def open(self, mode='r', ensure=False, encoding=None):
+ """ return an opened file with the given mode.
+
+ If ensure is True, create parent directories if needed.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if encoding:
+ return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
+ return py.error.checked_call(open, self.strpath, mode)
+
+ def _fastjoin(self, name):
+ child = object.__new__(self.__class__)
+ child.strpath = self.strpath + self.sep + name
+ return child
+
+ def islink(self):
+ return islink(self.strpath)
+
+ def check(self, **kw):
+ if not kw:
+ return exists(self.strpath)
+ if len(kw) == 1:
+ if "dir" in kw:
+ return not kw["dir"] ^ isdir(self.strpath)
+ if "file" in kw:
+ return not kw["file"] ^ isfile(self.strpath)
+ return super(LocalPath, self).check(**kw)
+
+ _patternchars = set("*?[" + os.path.sep)
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if fil is None and sort is None:
+ names = py.error.checked_call(os.listdir, self.strpath)
+ return map_as_list(self._fastjoin, names)
+ if isinstance(fil, py.builtin._basestring):
+ if not self._patternchars.intersection(fil):
+ child = self._fastjoin(fil)
+ if exists(child.strpath):
+ return [child]
+ return []
+ fil = common.FNMatcher(fil)
+ names = py.error.checked_call(os.listdir, self.strpath)
+ res = []
+ for name in names:
+ child = self._fastjoin(name)
+ if fil is None or fil(child):
+ res.append(child)
+ self._sortlist(res, sort)
+ return res
+
+ def size(self):
+ """ return size of the underlying file object """
+ return self.stat().size
+
+ def mtime(self):
+ """ return last modification time of the path. """
+ return self.stat().mtime
+
def copy(self, target, mode=False, stat=False):
""" copy path to target.
@@ -425,239 +425,239 @@ class LocalPath(FSBase):
If stat is True, copy permission, last modification
time, last access time, and flags from path to target.
"""
- if self.check(file=1):
- if target.check(dir=1):
- target = target.join(self.basename)
- assert self!=target
- copychunked(self, target)
- if mode:
+ if self.check(file=1):
+ if target.check(dir=1):
+ target = target.join(self.basename)
+ assert self!=target
+ copychunked(self, target)
+ if mode:
copymode(self.strpath, target.strpath)
if stat:
copystat(self, target)
- else:
- def rec(p):
- return p.check(link=0)
- for x in self.visit(rec=rec):
- relpath = x.relto(self)
- newx = target.join(relpath)
- newx.dirpath().ensure(dir=1)
- if x.check(link=1):
- newx.mksymlinkto(x.readlink())
- continue
- elif x.check(file=1):
- copychunked(x, newx)
- elif x.check(dir=1):
- newx.ensure(dir=1)
- if mode:
+ else:
+ def rec(p):
+ return p.check(link=0)
+ for x in self.visit(rec=rec):
+ relpath = x.relto(self)
+ newx = target.join(relpath)
+ newx.dirpath().ensure(dir=1)
+ if x.check(link=1):
+ newx.mksymlinkto(x.readlink())
+ continue
+ elif x.check(file=1):
+ copychunked(x, newx)
+ elif x.check(dir=1):
+ newx.ensure(dir=1)
+ if mode:
copymode(x.strpath, newx.strpath)
if stat:
copystat(x, newx)
-
- def rename(self, target):
- """ rename this path to target. """
+
+ def rename(self, target):
+ """ rename this path to target. """
target = fspath(target)
- return py.error.checked_call(os.rename, self.strpath, target)
-
- def dump(self, obj, bin=1):
- """ pickle object into path location"""
- f = self.open('wb')
+ return py.error.checked_call(os.rename, self.strpath, target)
+
+ def dump(self, obj, bin=1):
+ """ pickle object into path location"""
+ f = self.open('wb')
import pickle
- try:
+ try:
py.error.checked_call(pickle.dump, obj, f, bin)
- finally:
- f.close()
-
- def mkdir(self, *args):
- """ create & return the directory joined with args. """
- p = self.join(*args)
+ finally:
+ f.close()
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ p = self.join(*args)
py.error.checked_call(os.mkdir, fspath(p))
- return p
-
- def write_binary(self, data, ensure=False):
- """ write binary data into path. If ensure is True create
- missing parent directories.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- with self.open('wb') as f:
- f.write(data)
-
- def write_text(self, data, encoding, ensure=False):
- """ write text data into path using the specified encoding.
- If ensure is True create missing parent directories.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- with self.open('w', encoding=encoding) as f:
- f.write(data)
-
- def write(self, data, mode='w', ensure=False):
- """ write data into path. If ensure is True create
- missing parent directories.
- """
- if ensure:
- self.dirpath().ensure(dir=1)
- if 'b' in mode:
- if not py.builtin._isbytes(data):
- raise ValueError("can only process bytes")
- else:
- if not py.builtin._istext(data):
- if not py.builtin._isbytes(data):
- data = str(data)
- else:
- data = py.builtin._totext(data, sys.getdefaultencoding())
- f = self.open(mode)
- try:
- f.write(data)
- finally:
- f.close()
-
- def _ensuredirs(self):
- parent = self.dirpath()
- if parent == self:
- return self
- if parent.check(dir=0):
- parent._ensuredirs()
- if self.check(dir=0):
- try:
- self.mkdir()
- except py.error.EEXIST:
- # race condition: file/dir created by another thread/process.
- # complain if it is not a dir
- if self.check(dir=0):
- raise
- return self
-
- def ensure(self, *args, **kwargs):
- """ ensure that an args-joined path exists (by default as
- a file). if you specify a keyword argument 'dir=True'
- then the path is forced to be a directory path.
- """
- p = self.join(*args)
- if kwargs.get('dir', 0):
- return p._ensuredirs()
- else:
- p.dirpath()._ensuredirs()
- if not p.check(file=1):
- p.open('w').close()
- return p
-
- def stat(self, raising=True):
- """ Return an os.stat() tuple. """
- if raising == True:
- return Stat(self, py.error.checked_call(os.stat, self.strpath))
- try:
- return Stat(self, os.stat(self.strpath))
- except KeyboardInterrupt:
- raise
- except Exception:
- return None
-
- def lstat(self):
- """ Return an os.lstat() tuple. """
- return Stat(self, py.error.checked_call(os.lstat, self.strpath))
-
- def setmtime(self, mtime=None):
- """ set modification time for the given path. if 'mtime' is None
- (the default) then the file's mtime is set to current time.
-
- Note that the resolution for 'mtime' is platform dependent.
- """
- if mtime is None:
- return py.error.checked_call(os.utime, self.strpath, mtime)
- try:
- return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
- except py.error.EINVAL:
- return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
-
- def chdir(self):
- """ change directory to self and return old current directory """
- try:
- old = self.__class__()
- except py.error.ENOENT:
- old = None
- py.error.checked_call(os.chdir, self.strpath)
- return old
-
-
- @contextmanager
- def as_cwd(self):
- """
+ return p
+
+ def write_binary(self, data, ensure=False):
+ """ write binary data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('wb') as f:
+ f.write(data)
+
+ def write_text(self, data, encoding, ensure=False):
+ """ write text data into path using the specified encoding.
+ If ensure is True create missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('w', encoding=encoding) as f:
+ f.write(data)
+
+ def write(self, data, mode='w', ensure=False):
+ """ write data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if 'b' in mode:
+ if not py.builtin._isbytes(data):
+ raise ValueError("can only process bytes")
+ else:
+ if not py.builtin._istext(data):
+ if not py.builtin._isbytes(data):
+ data = str(data)
+ else:
+ data = py.builtin._totext(data, sys.getdefaultencoding())
+ f = self.open(mode)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent == self:
+ return self
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ try:
+ self.mkdir()
+ except py.error.EEXIST:
+ # race condition: file/dir created by another thread/process.
+ # complain if it is not a dir
+ if self.check(dir=0):
+ raise
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ else:
+ p.dirpath()._ensuredirs()
+ if not p.check(file=1):
+ p.open('w').close()
+ return p
+
+ def stat(self, raising=True):
+ """ Return an os.stat() tuple. """
+ if raising == True:
+ return Stat(self, py.error.checked_call(os.stat, self.strpath))
+ try:
+ return Stat(self, os.stat(self.strpath))
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return None
+
+ def lstat(self):
+ """ Return an os.lstat() tuple. """
+ return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+ def setmtime(self, mtime=None):
+ """ set modification time for the given path. if 'mtime' is None
+ (the default) then the file's mtime is set to current time.
+
+ Note that the resolution for 'mtime' is platform dependent.
+ """
+ if mtime is None:
+ return py.error.checked_call(os.utime, self.strpath, mtime)
+ try:
+ return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+ except py.error.EINVAL:
+ return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+ def chdir(self):
+ """ change directory to self and return old current directory """
+ try:
+ old = self.__class__()
+ except py.error.ENOENT:
+ old = None
+ py.error.checked_call(os.chdir, self.strpath)
+ return old
+
+
+ @contextmanager
+ def as_cwd(self):
+ """
Return a context manager, which changes to the path's dir during the
managed "with" context.
On __enter__ it returns the old dir, which might be ``None``.
"""
- old = self.chdir()
- try:
- yield old
- finally:
+ old = self.chdir()
+ try:
+ yield old
+ finally:
if old is not None:
old.chdir()
-
- def realpath(self):
- """ return a new path which contains no symbolic links."""
- return self.__class__(os.path.realpath(self.strpath))
-
- def atime(self):
- """ return last access time of the path. """
- return self.stat().atime
-
- def __repr__(self):
- return 'local(%r)' % self.strpath
-
- def __str__(self):
- """ return string representation of the Path. """
- return self.strpath
-
- def chmod(self, mode, rec=0):
- """ change permissions to the given mode. If mode is an
- integer it directly encodes the os-specific modes.
- if rec is True perform recursively.
- """
- if not isinstance(mode, int):
- raise TypeError("mode %r must be an integer" % (mode,))
- if rec:
- for x in self.visit(rec=rec):
- py.error.checked_call(os.chmod, str(x), mode)
+
+ def realpath(self):
+ """ return a new path which contains no symbolic links."""
+ return self.__class__(os.path.realpath(self.strpath))
+
+ def atime(self):
+ """ return last access time of the path. """
+ return self.stat().atime
+
+ def __repr__(self):
+ return 'local(%r)' % self.strpath
+
+ def __str__(self):
+ """ return string representation of the Path. """
+ return self.strpath
+
+ def chmod(self, mode, rec=0):
+ """ change permissions to the given mode. If mode is an
+ integer it directly encodes the os-specific modes.
+ if rec is True perform recursively.
+ """
+ if not isinstance(mode, int):
+ raise TypeError("mode %r must be an integer" % (mode,))
+ if rec:
+ for x in self.visit(rec=rec):
+ py.error.checked_call(os.chmod, str(x), mode)
py.error.checked_call(os.chmod, self.strpath, mode)
-
- def pypkgpath(self):
- """ return the Python package path by looking for the last
- directory upwards which still contains an __init__.py.
- Return None if a pkgpath can not be determined.
- """
- pkgpath = None
- for parent in self.parts(reverse=True):
- if parent.isdir():
- if not parent.join('__init__.py').exists():
- break
- if not isimportable(parent.basename):
- break
- pkgpath = parent
- return pkgpath
-
- def _ensuresyspath(self, ensuremode, path):
- if ensuremode:
- s = str(path)
- if ensuremode == "append":
- if s not in sys.path:
- sys.path.append(s)
- else:
- if s != sys.path[0]:
- sys.path.insert(0, s)
-
- def pyimport(self, modname=None, ensuresyspath=True):
- """ return path as an imported python module.
-
- If modname is None, look for the containing package
- and construct an according module name.
- The module will be put/looked up in sys.modules.
- if ensuresyspath is True then the root dir for importing
- the file (taking __init__.py files into account) will
- be prepended to sys.path if it isn't there already.
- If ensuresyspath=="append" the root dir will be appended
- if it isn't already contained in sys.path.
- if ensuresyspath is False no modification of syspath happens.
+
+ def pypkgpath(self):
+ """ return the Python package path by looking for the last
+ directory upwards which still contains an __init__.py.
+ Return None if a pkgpath can not be determined.
+ """
+ pkgpath = None
+ for parent in self.parts(reverse=True):
+ if parent.isdir():
+ if not parent.join('__init__.py').exists():
+ break
+ if not isimportable(parent.basename):
+ break
+ pkgpath = parent
+ return pkgpath
+
+ def _ensuresyspath(self, ensuremode, path):
+ if ensuremode:
+ s = str(path)
+ if ensuremode == "append":
+ if s not in sys.path:
+ sys.path.append(s)
+ else:
+ if s != sys.path[0]:
+ sys.path.insert(0, s)
+
+ def pyimport(self, modname=None, ensuresyspath=True):
+ """ return path as an imported python module.
+
+ If modname is None, look for the containing package
+ and construct an according module name.
+ The module will be put/looked up in sys.modules.
+ if ensuresyspath is True then the root dir for importing
+ the file (taking __init__.py files into account) will
+ be prepended to sys.path if it isn't there already.
+ If ensuresyspath=="append" the root dir will be appended
+ if it isn't already contained in sys.path.
+ if ensuresyspath is False no modification of syspath happens.
Special value of ensuresyspath=="importlib" is intended
purely for using in pytest, it is capable only of importing
@@ -666,10 +666,10 @@ class LocalPath(FSBase):
same-named test modules in different places and offers
mild opt-in via this option. Note that it works only in
recent versions of python.
- """
- if not self.check():
- raise py.error.ENOENT(self)
-
+ """
+ if not self.check():
+ raise py.error.ENOENT(self)
+
if ensuresyspath == 'importlib':
if modname is None:
modname = self.purebasename
@@ -687,184 +687,184 @@ class LocalPath(FSBase):
spec.loader.exec_module(mod)
return mod
- pkgpath = None
- if modname is None:
- pkgpath = self.pypkgpath()
- if pkgpath is not None:
- pkgroot = pkgpath.dirpath()
- names = self.new(ext="").relto(pkgroot).split(self.sep)
- if names[-1] == "__init__":
- names.pop()
- modname = ".".join(names)
- else:
- pkgroot = self.dirpath()
- modname = self.purebasename
-
- self._ensuresyspath(ensuresyspath, pkgroot)
- __import__(modname)
- mod = sys.modules[modname]
- if self.basename == "__init__.py":
- return mod # we don't check anything as we might
+ pkgpath = None
+ if modname is None:
+ pkgpath = self.pypkgpath()
+ if pkgpath is not None:
+ pkgroot = pkgpath.dirpath()
+ names = self.new(ext="").relto(pkgroot).split(self.sep)
+ if names[-1] == "__init__":
+ names.pop()
+ modname = ".".join(names)
+ else:
+ pkgroot = self.dirpath()
+ modname = self.purebasename
+
+ self._ensuresyspath(ensuresyspath, pkgroot)
+ __import__(modname)
+ mod = sys.modules[modname]
+ if self.basename == "__init__.py":
+ return mod # we don't check anything as we might
# be in a namespace package ... too icky to check
- modfile = mod.__file__
- if modfile[-4:] in ('.pyc', '.pyo'):
- modfile = modfile[:-1]
- elif modfile.endswith('$py.class'):
- modfile = modfile[:-9] + '.py'
- if modfile.endswith(os.path.sep + "__init__.py"):
- if self.basename != "__init__.py":
- modfile = modfile[:-12]
- try:
- issame = self.samefile(modfile)
- except py.error.ENOENT:
- issame = False
- if not issame:
+ modfile = mod.__file__
+ if modfile[-4:] in ('.pyc', '.pyo'):
+ modfile = modfile[:-1]
+ elif modfile.endswith('$py.class'):
+ modfile = modfile[:-9] + '.py'
+ if modfile.endswith(os.path.sep + "__init__.py"):
+ if self.basename != "__init__.py":
+ modfile = modfile[:-12]
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
ignore = os.getenv('PY_IGNORE_IMPORTMISMATCH')
if ignore != '1':
raise self.ImportMismatchError(modname, modfile, self)
- return mod
- else:
- try:
- return sys.modules[modname]
- except KeyError:
- # we have a custom modname, do a pseudo-import
+ return mod
+ else:
+ try:
+ return sys.modules[modname]
+ except KeyError:
+ # we have a custom modname, do a pseudo-import
import types
mod = types.ModuleType(modname)
- mod.__file__ = str(self)
- sys.modules[modname] = mod
- try:
- py.builtin.execfile(str(self), mod.__dict__)
- except:
- del sys.modules[modname]
- raise
- return mod
-
- def sysexec(self, *argv, **popen_opts):
- """ return stdout text from executing a system child process,
- where the 'self' path points to executable.
- The process is directly invoked and not through a system shell.
- """
- from subprocess import Popen, PIPE
- argv = map_as_list(str, argv)
- popen_opts['stdout'] = popen_opts['stderr'] = PIPE
- proc = Popen([str(self)] + argv, **popen_opts)
- stdout, stderr = proc.communicate()
- ret = proc.wait()
- if py.builtin._isbytes(stdout):
- stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
- if ret != 0:
- if py.builtin._isbytes(stderr):
- stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
- raise py.process.cmdexec.Error(ret, ret, str(self),
- stdout, stderr,)
- return stdout
-
- def sysfind(cls, name, checker=None, paths=None):
- """ return a path object found by looking at the systems
- underlying PATH specification. If the checker is not None
- it will be invoked to filter matching paths. If a binary
- cannot be found, None is returned
- Note: This is probably not working on plain win32 systems
- but may work on cygwin.
- """
- if isabs(name):
- p = py.path.local(name)
- if p.check(file=1):
- return p
- else:
- if paths is None:
- if iswin32:
+ mod.__file__ = str(self)
+ sys.modules[modname] = mod
+ try:
+ py.builtin.execfile(str(self), mod.__dict__)
+ except:
+ del sys.modules[modname]
+ raise
+ return mod
+
+ def sysexec(self, *argv, **popen_opts):
+ """ return stdout text from executing a system child process,
+ where the 'self' path points to executable.
+ The process is directly invoked and not through a system shell.
+ """
+ from subprocess import Popen, PIPE
+ argv = map_as_list(str, argv)
+ popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+ proc = Popen([str(self)] + argv, **popen_opts)
+ stdout, stderr = proc.communicate()
+ ret = proc.wait()
+ if py.builtin._isbytes(stdout):
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ if ret != 0:
+ if py.builtin._isbytes(stderr):
+ stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+ raise py.process.cmdexec.Error(ret, ret, str(self),
+ stdout, stderr,)
+ return stdout
+
+ def sysfind(cls, name, checker=None, paths=None):
+ """ return a path object found by looking at the systems
+ underlying PATH specification. If the checker is not None
+ it will be invoked to filter matching paths. If a binary
+ cannot be found, None is returned
+ Note: This is probably not working on plain win32 systems
+ but may work on cygwin.
+ """
+ if isabs(name):
+ p = py.path.local(name)
+ if p.check(file=1):
+ return p
+ else:
+ if paths is None:
+ if iswin32:
paths = os.environ['Path'].split(';')
- if '' not in paths and '.' not in paths:
- paths.append('.')
- try:
- systemroot = os.environ['SYSTEMROOT']
- except KeyError:
- pass
- else:
+ if '' not in paths and '.' not in paths:
+ paths.append('.')
+ try:
+ systemroot = os.environ['SYSTEMROOT']
+ except KeyError:
+ pass
+ else:
paths = [path.replace('%SystemRoot%', systemroot)
- for path in paths]
- else:
+ for path in paths]
+ else:
paths = os.environ['PATH'].split(':')
- tryadd = []
- if iswin32:
- tryadd += os.environ['PATHEXT'].split(os.pathsep)
- tryadd.append("")
-
- for x in paths:
- for addext in tryadd:
- p = py.path.local(x).join(name, abs=True) + addext
- try:
- if p.check(file=1):
- if checker:
- if not checker(p):
- continue
- return p
- except py.error.EACCES:
- pass
- return None
- sysfind = classmethod(sysfind)
-
- def _gethomedir(cls):
- try:
- x = os.environ['HOME']
- except KeyError:
- try:
- x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
- except KeyError:
- return None
- return cls(x)
- _gethomedir = classmethod(_gethomedir)
-
+ tryadd = []
+ if iswin32:
+ tryadd += os.environ['PATHEXT'].split(os.pathsep)
+ tryadd.append("")
+
+ for x in paths:
+ for addext in tryadd:
+ p = py.path.local(x).join(name, abs=True) + addext
+ try:
+ if p.check(file=1):
+ if checker:
+ if not checker(p):
+ continue
+ return p
+ except py.error.EACCES:
+ pass
+ return None
+ sysfind = classmethod(sysfind)
+
+ def _gethomedir(cls):
+ try:
+ x = os.environ['HOME']
+ except KeyError:
+ try:
+ x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
+ except KeyError:
+ return None
+ return cls(x)
+ _gethomedir = classmethod(_gethomedir)
+
# """
# special class constructors for local filesystem paths
# """
@classmethod
- def get_temproot(cls):
- """ return the system's temporary directory
- (where tempfiles are usually created in)
- """
+ def get_temproot(cls):
+ """ return the system's temporary directory
+ (where tempfiles are usually created in)
+ """
import tempfile
return py.path.local(tempfile.gettempdir())
-
+
@classmethod
- def mkdtemp(cls, rootdir=None):
- """ return a Path object pointing to a fresh new temporary directory
- (which we created ourself).
- """
- import tempfile
- if rootdir is None:
- rootdir = cls.get_temproot()
- return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
-
- def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+ def mkdtemp(cls, rootdir=None):
+ """ return a Path object pointing to a fresh new temporary directory
+ (which we created ourself).
+ """
+ import tempfile
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+ return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
+
+ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
lock_timeout=172800): # two days
- """ return unique directory with a number greater than the current
- maximum one. The number is assumed to start directly after prefix.
- if keep is true directories with a number less than (maxnum-keep)
+ """ return unique directory with a number greater than the current
+ maximum one. The number is assumed to start directly after prefix.
+ if keep is true directories with a number less than (maxnum-keep)
will be removed. If .lock files are used (lock_timeout non-zero),
algorithm is multi-process safe.
- """
- if rootdir is None:
- rootdir = cls.get_temproot()
-
+ """
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+
nprefix = prefix.lower()
- def parse_num(path):
- """ parse the number out of a path (if it matches the prefix) """
+ def parse_num(path):
+ """ parse the number out of a path (if it matches the prefix) """
nbasename = path.basename.lower()
if nbasename.startswith(nprefix):
- try:
+ try:
return int(nbasename[len(nprefix):])
- except ValueError:
- pass
-
+ except ValueError:
+ pass
+
def create_lockfile(path):
""" exclusively create lockfile. Throws when failed """
- mypid = os.getpid()
+ mypid = os.getpid()
lockfile = path.join('.lock')
- if hasattr(lockfile, 'mksymlinkto'):
- lockfile.mksymlinkto(str(mypid))
- else:
+ if hasattr(lockfile, 'mksymlinkto'):
+ lockfile.mksymlinkto(str(mypid))
+ else:
fd = py.error.checked_call(os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
with os.fdopen(fd, 'w') as f:
f.write(str(mypid))
@@ -873,20 +873,20 @@ class LocalPath(FSBase):
def atexit_remove_lockfile(lockfile):
""" ensure lockfile is removed at process exit """
mypid = os.getpid()
- def try_remove_lockfile():
- # in a fork() situation, only the last process should
- # remove the .lock, otherwise the other processes run the
- # risk of seeing their temporary dir disappear. For now
- # we remove the .lock in the parent only (i.e. we assume
- # that the children finish before the parent).
- if os.getpid() != mypid:
- return
- try:
- lockfile.remove()
- except py.error.Error:
- pass
- atexit.register(try_remove_lockfile)
-
+ def try_remove_lockfile():
+ # in a fork() situation, only the last process should
+ # remove the .lock, otherwise the other processes run the
+ # risk of seeing their temporary dir disappear. For now
+ # we remove the .lock in the parent only (i.e. we assume
+ # that the children finish before the parent).
+ if os.getpid() != mypid:
+ return
+ try:
+ lockfile.remove()
+ except py.error.Error:
+ pass
+ atexit.register(try_remove_lockfile)
+
# compute the maximum number currently in use with the prefix
lastmax = None
while True:
@@ -931,13 +931,13 @@ class LocalPath(FSBase):
bn = path.basename
return bn.startswith(garbage_prefix)
- # prune old directories
+ # prune old directories
udir_time = get_mtime(udir)
if keep and udir_time:
- for path in rootdir.listdir():
- num = parse_num(path)
- if num is not None and num <= (maxnum - keep):
- try:
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None and num <= (maxnum - keep):
+ try:
# try acquiring lock to remove directory as exclusive user
if lock_timeout:
create_lockfile(path)
@@ -955,7 +955,7 @@ class LocalPath(FSBase):
# and scheduled for removal to avoid another thread/process
# treating it as a new directory or removal candidate
garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4()))
- try:
+ try:
path.rename(garbage_path)
garbage_path.remove(rec=1)
except KeyboardInterrupt:
@@ -964,41 +964,41 @@ class LocalPath(FSBase):
pass
if is_garbage(path):
try:
- path.remove(rec=1)
- except KeyboardInterrupt:
- raise
- except: # this might be py.error.Error, WindowsError ...
- pass
-
- # make link...
- try:
- username = os.environ['USER'] #linux, et al
- except KeyError:
- try:
- username = os.environ['USERNAME'] #windows
- except KeyError:
- username = 'current'
-
- src = str(udir)
- dest = src[:src.rfind('-')] + '-' + username
- try:
- os.unlink(dest)
- except OSError:
- pass
- try:
- os.symlink(src, dest)
- except (OSError, AttributeError, NotImplementedError):
- pass
-
- return udir
- make_numbered_dir = classmethod(make_numbered_dir)
-
-
-def copymode(src, dest):
+ path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+
+ # make link...
+ try:
+ username = os.environ['USER'] #linux, et al
+ except KeyError:
+ try:
+ username = os.environ['USERNAME'] #windows
+ except KeyError:
+ username = 'current'
+
+ src = str(udir)
+ dest = src[:src.rfind('-')] + '-' + username
+ try:
+ os.unlink(dest)
+ except OSError:
+ pass
+ try:
+ os.symlink(src, dest)
+ except (OSError, AttributeError, NotImplementedError):
+ pass
+
+ return udir
+ make_numbered_dir = classmethod(make_numbered_dir)
+
+
+def copymode(src, dest):
""" copy permission from src to dst. """
import shutil
shutil.copymode(src, dest)
-
+
def copystat(src, dest):
""" copy permission, last modification time,
@@ -1007,24 +1007,24 @@ def copystat(src, dest):
shutil.copystat(str(src), str(dest))
-def copychunked(src, dest):
+def copychunked(src, dest):
chunksize = 524288 # half a meg of bytes
- fsrc = src.open('rb')
- try:
- fdest = dest.open('wb')
- try:
- while 1:
- buf = fsrc.read(chunksize)
- if not buf:
- break
- fdest.write(buf)
- finally:
- fdest.close()
- finally:
- fsrc.close()
-
-
-def isimportable(name):
- if name and (name[0].isalpha() or name[0] == '_'):
- name = name.replace("_", '')
- return not name or name.isalnum()
+ fsrc = src.open('rb')
+ try:
+ fdest = dest.open('wb')
+ try:
+ while 1:
+ buf = fsrc.read(chunksize)
+ if not buf:
+ break
+ fdest.write(buf)
+ finally:
+ fdest.close()
+ finally:
+ fsrc.close()
+
+
+def isimportable(name):
+ if name and (name[0].isalpha() or name[0] == '_'):
+ name = name.replace("_", '')
+ return not name or name.isalnum()
diff --git a/contrib/python/py/py/_path/svnurl.py b/contrib/python/py/py/_path/svnurl.py
index 586d97d62d..6589a71d09 100644
--- a/contrib/python/py/py/_path/svnurl.py
+++ b/contrib/python/py/py/_path/svnurl.py
@@ -1,380 +1,380 @@
-"""
-module defining a subversion path object based on the external
-command 'svn'. This modules aims to work with svn 1.3 and higher
-but might also interact well with earlier versions.
-"""
-
-import os, sys, time, re
-import py
-from py import path, process
-from py._path import common
-from py._path import svnwc as svncommon
-from py._path.cacheutil import BuildcostAccessCache, AgingCache
-
-DEBUG=False
-
-class SvnCommandPath(svncommon.SvnPathBase):
- """ path implementation that offers access to (possibly remote) subversion
- repositories. """
-
- _lsrevcache = BuildcostAccessCache(maxentries=128)
- _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
-
- def __new__(cls, path, rev=None, auth=None):
- self = object.__new__(cls)
- if isinstance(path, cls):
- rev = path.rev
- auth = path.auth
- path = path.strpath
- svncommon.checkbadchars(path)
- path = path.rstrip('/')
- self.strpath = path
- self.rev = rev
- self.auth = auth
- return self
-
- def __repr__(self):
- if self.rev == -1:
- return 'svnurl(%r)' % self.strpath
- else:
- return 'svnurl(%r, %r)' % (self.strpath, self.rev)
-
- def _svnwithrev(self, cmd, *args):
- """ execute an svn command, append our own url and revision """
- if self.rev is None:
- return self._svnwrite(cmd, *args)
- else:
- args = ['-r', self.rev] + list(args)
- return self._svnwrite(cmd, *args)
-
- def _svnwrite(self, cmd, *args):
- """ execute an svn command, append our own url """
- l = ['svn %s' % cmd]
- args = ['"%s"' % self._escape(item) for item in args]
- l.extend(args)
- l.append('"%s"' % self._encodedurl())
- # fixing the locale because we can't otherwise parse
- string = " ".join(l)
- if DEBUG:
- print("execing %s" % string)
- out = self._svncmdexecauth(string)
- return out
-
- def _svncmdexecauth(self, cmd):
- """ execute an svn command 'as is' """
- cmd = svncommon.fixlocale() + cmd
- if self.auth is not None:
- cmd += ' ' + self.auth.makecmdoptions()
- return self._cmdexec(cmd)
-
- def _cmdexec(self, cmd):
- try:
- out = process.cmdexec(cmd)
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- if (e.err.find('File Exists') != -1 or
- e.err.find('File already exists') != -1):
- raise py.error.EEXIST(self)
- raise
- return out
-
- def _svnpopenauth(self, cmd):
- """ execute an svn command, return a pipe for reading stdin """
- cmd = svncommon.fixlocale() + cmd
- if self.auth is not None:
- cmd += ' ' + self.auth.makecmdoptions()
- return self._popen(cmd)
-
- def _popen(self, cmd):
- return os.popen(cmd)
-
- def _encodedurl(self):
- return self._escape(self.strpath)
-
- def _norev_delentry(self, path):
- auth = self.auth and self.auth.makecmdoptions() or None
- self._lsnorevcache.delentry((str(path), auth))
-
- def open(self, mode='r'):
- """ return an opened file with the given mode. """
- if mode not in ("r", "rU",):
- raise ValueError("mode %r not supported" % (mode,))
- assert self.check(file=1) # svn cat returns an empty file otherwise
- if self.rev is None:
- return self._svnpopenauth('svn cat "%s"' % (
- self._escape(self.strpath), ))
- else:
- return self._svnpopenauth('svn cat -r %s "%s"' % (
- self.rev, self._escape(self.strpath)))
-
- def dirpath(self, *args, **kwargs):
- """ return the directory path of the current path joined
- with any given path arguments.
- """
- l = self.strpath.split(self.sep)
- if len(l) < 4:
- raise py.error.EINVAL(self, "base is not valid")
- elif len(l) == 4:
- return self.join(*args, **kwargs)
- else:
- return self.new(basename='').join(*args, **kwargs)
-
- # modifying methods (cache must be invalidated)
- def mkdir(self, *args, **kwargs):
- """ create & return the directory joined with args.
- pass a 'msg' keyword argument to set the commit message.
- """
- commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
- createpath = self.join(*args)
- createpath._svnwrite('mkdir', '-m', commit_msg)
- self._norev_delentry(createpath.dirpath())
- return createpath
-
- def copy(self, target, msg='copied by py lib invocation'):
- """ copy path to target with checkin message msg."""
- if getattr(target, 'rev', None) is not None:
- raise py.error.EINVAL(target, "revisions are immutable")
- self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
- self._escape(self), self._escape(target)))
- self._norev_delentry(target.dirpath())
-
- def rename(self, target, msg="renamed by py lib invocation"):
- """ rename this path to target with checkin message msg. """
- if getattr(self, 'rev', None) is not None:
- raise py.error.EINVAL(self, "revisions are immutable")
- self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
- msg, self._escape(self), self._escape(target)))
- self._norev_delentry(self.dirpath())
- self._norev_delentry(self)
-
- def remove(self, rec=1, msg='removed by py lib invocation'):
- """ remove a file or directory (or a directory tree if rec=1) with
-checkin message msg."""
- if self.rev is not None:
- raise py.error.EINVAL(self, "revisions are immutable")
- self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
- self._norev_delentry(self.dirpath())
-
- def export(self, topath):
- """ export to a local path
-
- topath should not exist prior to calling this, returns a
- py.path.local instance
- """
- topath = py.path.local(topath)
- args = ['"%s"' % (self._escape(self),),
- '"%s"' % (self._escape(topath),)]
- if self.rev is not None:
- args = ['-r', str(self.rev)] + args
- self._svncmdexecauth('svn export %s' % (' '.join(args),))
- return topath
-
- def ensure(self, *args, **kwargs):
- """ ensure that an args-joined path exists (by default as
- a file). If you specify a keyword argument 'dir=True'
- then the path is forced to be a directory path.
- """
- if getattr(self, 'rev', None) is not None:
- raise py.error.EINVAL(self, "revisions are immutable")
- target = self.join(*args)
- dir = kwargs.get('dir', 0)
- for x in target.parts(reverse=True):
- if x.check():
- break
- else:
- raise py.error.ENOENT(target, "has not any valid base!")
- if x == target:
- if not x.check(dir=dir):
- raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
- return x
- tocreate = target.relto(x)
- basename = tocreate.split(self.sep, 1)[0]
- tempdir = py.path.local.mkdtemp()
- try:
- tempdir.ensure(tocreate, dir=dir)
- cmd = 'svn import -m "%s" "%s" "%s"' % (
- "ensure %s" % self._escape(tocreate),
- self._escape(tempdir.join(basename)),
- x.join(basename)._encodedurl())
- self._svncmdexecauth(cmd)
- self._norev_delentry(x)
- finally:
- tempdir.remove()
- return target
-
- # end of modifying methods
- def _propget(self, name):
- res = self._svnwithrev('propget', name)
- return res[:-1] # strip trailing newline
-
- def _proplist(self):
- res = self._svnwithrev('proplist')
- lines = res.split('\n')
- lines = [x.strip() for x in lines[1:]]
- return svncommon.PropListDict(self, lines)
-
- def info(self):
- """ return an Info structure with svn-provided information. """
- parent = self.dirpath()
- nameinfo_seq = parent._listdir_nameinfo()
- bn = self.basename
- for name, info in nameinfo_seq:
- if name == bn:
- return info
- raise py.error.ENOENT(self)
-
-
- def _listdir_nameinfo(self):
- """ return sequence of name-info directory entries of self """
- def builder():
- try:
- res = self._svnwithrev('ls', '-v')
- except process.cmdexec.Error:
- e = sys.exc_info()[1]
- if e.err.find('non-existent in that revision') != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find("E200009:") != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find('File not found') != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find('not part of a repository')!=-1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find('Unable to open')!=-1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.lower().find('method not allowed')!=-1:
- raise py.error.EACCES(self, e.err)
- raise py.error.Error(e.err)
- lines = res.split('\n')
- nameinfo_seq = []
- for lsline in lines:
- if lsline:
- info = InfoSvnCommand(lsline)
- if info._name != '.': # svn 1.5 produces '.' dirs,
- nameinfo_seq.append((info._name, info))
- nameinfo_seq.sort()
- return nameinfo_seq
- auth = self.auth and self.auth.makecmdoptions() or None
- if self.rev is not None:
- return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
- builder)
- else:
- return self._lsnorevcache.getorbuild((self.strpath, auth),
- builder)
-
- def listdir(self, fil=None, sort=None):
- """ list directory contents, possibly filter by the given fil func
- and possibly sorted.
- """
- if isinstance(fil, str):
- fil = common.FNMatcher(fil)
- nameinfo_seq = self._listdir_nameinfo()
- if len(nameinfo_seq) == 1:
- name, info = nameinfo_seq[0]
- if name == self.basename and info.kind == 'file':
- #if not self.check(dir=1):
- raise py.error.ENOTDIR(self)
- paths = [self.join(name) for (name, info) in nameinfo_seq]
- if fil:
- paths = [x for x in paths if fil(x)]
- self._sortlist(paths, sort)
- return paths
-
-
- def log(self, rev_start=None, rev_end=1, verbose=False):
- """ return a list of LogEntry instances for this path.
-rev_start is the starting revision (defaulting to the first one).
-rev_end is the last revision (defaulting to HEAD).
-if verbose is True, then the LogEntry instances also know which files changed.
-"""
- assert self.check() #make it simpler for the pipe
- rev_start = rev_start is None and "HEAD" or rev_start
- rev_end = rev_end is None and "HEAD" or rev_end
-
- if rev_start == "HEAD" and rev_end == 1:
- rev_opt = ""
- else:
- rev_opt = "-r %s:%s" % (rev_start, rev_end)
- verbose_opt = verbose and "-v" or ""
- xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
- (rev_opt, verbose_opt, self.strpath))
- from xml.dom import minidom
- tree = minidom.parse(xmlpipe)
- result = []
- for logentry in filter(None, tree.firstChild.childNodes):
- if logentry.nodeType == logentry.ELEMENT_NODE:
- result.append(svncommon.LogEntry(logentry))
- return result
-
-#01234567890123456789012345678901234567890123467
-# 2256 hpk 165 Nov 24 17:55 __init__.py
-# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
-# 1312 johnny 1627 May 05 14:32 test_decorators.py
-#
-class InfoSvnCommand:
- # the '0?' part in the middle is an indication of whether the resource is
- # locked, see 'svn help ls'
- lspattern = re.compile(
- r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
+"""
+module defining a subversion path object based on the external
+command 'svn'. This modules aims to work with svn 1.3 and higher
+but might also interact well with earlier versions.
+"""
+
+import os, sys, time, re
+import py
+from py import path, process
+from py._path import common
+from py._path import svnwc as svncommon
+from py._path.cacheutil import BuildcostAccessCache, AgingCache
+
+DEBUG=False
+
+class SvnCommandPath(svncommon.SvnPathBase):
+ """ path implementation that offers access to (possibly remote) subversion
+ repositories. """
+
+ _lsrevcache = BuildcostAccessCache(maxentries=128)
+ _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
+
+ def __new__(cls, path, rev=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(path, cls):
+ rev = path.rev
+ auth = path.auth
+ path = path.strpath
+ svncommon.checkbadchars(path)
+ path = path.rstrip('/')
+ self.strpath = path
+ self.rev = rev
+ self.auth = auth
+ return self
+
+ def __repr__(self):
+ if self.rev == -1:
+ return 'svnurl(%r)' % self.strpath
+ else:
+ return 'svnurl(%r, %r)' % (self.strpath, self.rev)
+
+ def _svnwithrev(self, cmd, *args):
+ """ execute an svn command, append our own url and revision """
+ if self.rev is None:
+ return self._svnwrite(cmd, *args)
+ else:
+ args = ['-r', self.rev] + list(args)
+ return self._svnwrite(cmd, *args)
+
+ def _svnwrite(self, cmd, *args):
+ """ execute an svn command, append our own url """
+ l = ['svn %s' % cmd]
+ args = ['"%s"' % self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._encodedurl())
+ # fixing the locale because we can't otherwise parse
+ string = " ".join(l)
+ if DEBUG:
+ print("execing %s" % string)
+ out = self._svncmdexecauth(string)
+ return out
+
+ def _svncmdexecauth(self, cmd):
+ """ execute an svn command 'as is' """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._cmdexec(cmd)
+
+ def _cmdexec(self, cmd):
+ try:
+ out = process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if (e.err.find('File Exists') != -1 or
+ e.err.find('File already exists') != -1):
+ raise py.error.EEXIST(self)
+ raise
+ return out
+
+ def _svnpopenauth(self, cmd):
+ """ execute an svn command, return a pipe for reading stdin """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._popen(cmd)
+
+ def _popen(self, cmd):
+ return os.popen(cmd)
+
+ def _encodedurl(self):
+ return self._escape(self.strpath)
+
+ def _norev_delentry(self, path):
+ auth = self.auth and self.auth.makecmdoptions() or None
+ self._lsnorevcache.delentry((str(path), auth))
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ if mode not in ("r", "rU",):
+ raise ValueError("mode %r not supported" % (mode,))
+ assert self.check(file=1) # svn cat returns an empty file otherwise
+ if self.rev is None:
+ return self._svnpopenauth('svn cat "%s"' % (
+ self._escape(self.strpath), ))
+ else:
+ return self._svnpopenauth('svn cat -r %s "%s"' % (
+ self.rev, self._escape(self.strpath)))
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path of the current path joined
+ with any given path arguments.
+ """
+ l = self.strpath.split(self.sep)
+ if len(l) < 4:
+ raise py.error.EINVAL(self, "base is not valid")
+ elif len(l) == 4:
+ return self.join(*args, **kwargs)
+ else:
+ return self.new(basename='').join(*args, **kwargs)
+
+ # modifying methods (cache must be invalidated)
+ def mkdir(self, *args, **kwargs):
+ """ create & return the directory joined with args.
+ pass a 'msg' keyword argument to set the commit message.
+ """
+ commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
+ createpath = self.join(*args)
+ createpath._svnwrite('mkdir', '-m', commit_msg)
+ self._norev_delentry(createpath.dirpath())
+ return createpath
+
+ def copy(self, target, msg='copied by py lib invocation'):
+ """ copy path to target with checkin message msg."""
+ if getattr(target, 'rev', None) is not None:
+ raise py.error.EINVAL(target, "revisions are immutable")
+ self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
+ self._escape(self), self._escape(target)))
+ self._norev_delentry(target.dirpath())
+
+ def rename(self, target, msg="renamed by py lib invocation"):
+ """ rename this path to target with checkin message msg. """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
+ msg, self._escape(self), self._escape(target)))
+ self._norev_delentry(self.dirpath())
+ self._norev_delentry(self)
+
+ def remove(self, rec=1, msg='removed by py lib invocation'):
+ """ remove a file or directory (or a directory tree if rec=1) with
+checkin message msg."""
+ if self.rev is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
+ self._norev_delentry(self.dirpath())
+
+ def export(self, topath):
+ """ export to a local path
+
+ topath should not exist prior to calling this, returns a
+ py.path.local instance
+ """
+ topath = py.path.local(topath)
+ args = ['"%s"' % (self._escape(self),),
+ '"%s"' % (self._escape(topath),)]
+ if self.rev is not None:
+ args = ['-r', str(self.rev)] + args
+ self._svncmdexecauth('svn export %s' % (' '.join(args),))
+ return topath
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). If you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ target = self.join(*args)
+ dir = kwargs.get('dir', 0)
+ for x in target.parts(reverse=True):
+ if x.check():
+ break
+ else:
+ raise py.error.ENOENT(target, "has not any valid base!")
+ if x == target:
+ if not x.check(dir=dir):
+ raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
+ return x
+ tocreate = target.relto(x)
+ basename = tocreate.split(self.sep, 1)[0]
+ tempdir = py.path.local.mkdtemp()
+ try:
+ tempdir.ensure(tocreate, dir=dir)
+ cmd = 'svn import -m "%s" "%s" "%s"' % (
+ "ensure %s" % self._escape(tocreate),
+ self._escape(tempdir.join(basename)),
+ x.join(basename)._encodedurl())
+ self._svncmdexecauth(cmd)
+ self._norev_delentry(x)
+ finally:
+ tempdir.remove()
+ return target
+
+ # end of modifying methods
+ def _propget(self, name):
+ res = self._svnwithrev('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def _proplist(self):
+ res = self._svnwithrev('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return svncommon.PropListDict(self, lines)
+
+ def info(self):
+ """ return an Info structure with svn-provided information. """
+ parent = self.dirpath()
+ nameinfo_seq = parent._listdir_nameinfo()
+ bn = self.basename
+ for name, info in nameinfo_seq:
+ if name == bn:
+ return info
+ raise py.error.ENOENT(self)
+
+
+ def _listdir_nameinfo(self):
+ """ return sequence of name-info directory entries of self """
+ def builder():
+ try:
+ res = self._svnwithrev('ls', '-v')
+ except process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('non-existent in that revision') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('File not found') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('not part of a repository')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('Unable to open')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.lower().find('method not allowed')!=-1:
+ raise py.error.EACCES(self, e.err)
+ raise py.error.Error(e.err)
+ lines = res.split('\n')
+ nameinfo_seq = []
+ for lsline in lines:
+ if lsline:
+ info = InfoSvnCommand(lsline)
+ if info._name != '.': # svn 1.5 produces '.' dirs,
+ nameinfo_seq.append((info._name, info))
+ nameinfo_seq.sort()
+ return nameinfo_seq
+ auth = self.auth and self.auth.makecmdoptions() or None
+ if self.rev is not None:
+ return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
+ builder)
+ else:
+ return self._lsnorevcache.getorbuild((self.strpath, auth),
+ builder)
+
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ nameinfo_seq = self._listdir_nameinfo()
+ if len(nameinfo_seq) == 1:
+ name, info = nameinfo_seq[0]
+ if name == self.basename and info.kind == 'file':
+ #if not self.check(dir=1):
+ raise py.error.ENOTDIR(self)
+ paths = [self.join(name) for (name, info) in nameinfo_seq]
+ if fil:
+ paths = [x for x in paths if fil(x)]
+ self._sortlist(paths, sort)
+ return paths
+
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() #make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
+ (rev_opt, verbose_opt, self.strpath))
+ from xml.dom import minidom
+ tree = minidom.parse(xmlpipe)
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(svncommon.LogEntry(logentry))
+ return result
+
+#01234567890123456789012345678901234567890123467
+# 2256 hpk 165 Nov 24 17:55 __init__.py
+# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
+# 1312 johnny 1627 May 05 14:32 test_decorators.py
+#
+class InfoSvnCommand:
+ # the '0?' part in the middle is an indication of whether the resource is
+ # locked, see 'svn help ls'
+ lspattern = re.compile(
+ r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
r'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
- def __init__(self, line):
- # this is a typical line from 'svn ls http://...'
- #_ 1127 jum 0 Jul 13 15:28 branch/
- match = self.lspattern.match(line)
- data = match.groupdict()
- self._name = data['file']
- if self._name[-1] == '/':
- self._name = self._name[:-1]
- self.kind = 'dir'
- else:
- self.kind = 'file'
- #self.has_props = l.pop(0) == 'P'
- self.created_rev = int(data['rev'])
- self.last_author = data['author']
- self.size = data['size'] and int(data['size']) or 0
- self.mtime = parse_time_with_missing_year(data['date'])
- self.time = self.mtime * 1000000
-
- def __eq__(self, other):
- return self.__dict__ == other.__dict__
-
-
-#____________________________________________________
-#
-# helper functions
-#____________________________________________________
-def parse_time_with_missing_year(timestr):
- """ analyze the time part from a single line of "svn ls -v"
- the svn output doesn't show the year makes the 'timestr'
- ambigous.
- """
- import calendar
- t_now = time.gmtime()
-
- tparts = timestr.split()
- month = time.strptime(tparts.pop(0), '%b')[1]
- day = time.strptime(tparts.pop(0), '%d')[2]
- last = tparts.pop(0) # year or hour:minute
- try:
- if ":" in last:
- raise ValueError()
- year = time.strptime(last, '%Y')[0]
- hour = minute = 0
- except ValueError:
- hour, minute = time.strptime(last, '%H:%M')[3:5]
- year = t_now[0]
-
- t_result = (year, month, day, hour, minute, 0,0,0,0)
- if t_result > t_now:
- year -= 1
- t_result = (year, month, day, hour, minute, 0,0,0,0)
- return calendar.timegm(t_result)
-
-class PathEntry:
- def __init__(self, ppart):
- self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
- self.action = ppart.getAttribute('action').encode('UTF-8')
- if self.action == 'A':
- self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
- if self.copyfrom_path:
- self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
-
+ def __init__(self, line):
+ # this is a typical line from 'svn ls http://...'
+ #_ 1127 jum 0 Jul 13 15:28 branch/
+ match = self.lspattern.match(line)
+ data = match.groupdict()
+ self._name = data['file']
+ if self._name[-1] == '/':
+ self._name = self._name[:-1]
+ self.kind = 'dir'
+ else:
+ self.kind = 'file'
+ #self.has_props = l.pop(0) == 'P'
+ self.created_rev = int(data['rev'])
+ self.last_author = data['author']
+ self.size = data['size'] and int(data['size']) or 0
+ self.mtime = parse_time_with_missing_year(data['date'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+
+#____________________________________________________
+#
+# helper functions
+#____________________________________________________
+def parse_time_with_missing_year(timestr):
+ """ analyze the time part from a single line of "svn ls -v"
+ the svn output doesn't show the year makes the 'timestr'
+ ambigous.
+ """
+ import calendar
+ t_now = time.gmtime()
+
+ tparts = timestr.split()
+ month = time.strptime(tparts.pop(0), '%b')[1]
+ day = time.strptime(tparts.pop(0), '%d')[2]
+ last = tparts.pop(0) # year or hour:minute
+ try:
+ if ":" in last:
+ raise ValueError()
+ year = time.strptime(last, '%Y')[0]
+ hour = minute = 0
+ except ValueError:
+ hour, minute = time.strptime(last, '%H:%M')[3:5]
+ year = t_now[0]
+
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ if t_result > t_now:
+ year -= 1
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ return calendar.timegm(t_result)
+
+class PathEntry:
+ def __init__(self, ppart):
+ self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
+ self.action = ppart.getAttribute('action').encode('UTF-8')
+ if self.action == 'A':
+ self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
+ if self.copyfrom_path:
+ self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
+
diff --git a/contrib/python/py/py/_path/svnwc.py b/contrib/python/py/py/_path/svnwc.py
index 51d2b60bcf..b5b9d8d544 100644
--- a/contrib/python/py/py/_path/svnwc.py
+++ b/contrib/python/py/py/_path/svnwc.py
@@ -1,1240 +1,1240 @@
-"""
-svn-Command based Implementation of a Subversion WorkingCopy Path.
-
- SvnWCCommandPath is the main class.
-
-"""
-
-import os, sys, time, re, calendar
-import py
-import subprocess
-from py._path import common
-
-#-----------------------------------------------------------
-# Caching latest repository revision and repo-paths
-# (getting them is slow with the current implementations)
-#
-# XXX make mt-safe
-#-----------------------------------------------------------
-
-class cache:
- proplist = {}
- info = {}
- entries = {}
- prop = {}
-
-class RepoEntry:
- def __init__(self, url, rev, timestamp):
- self.url = url
- self.rev = rev
- self.timestamp = timestamp
-
- def __str__(self):
- return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
-
-class RepoCache:
- """ The Repocache manages discovered repository paths
- and their revisions. If inside a timeout the cache
- will even return the revision of the root.
- """
- timeout = 20 # seconds after which we forget that we know the last revision
-
- def __init__(self):
- self.repos = []
-
- def clear(self):
- self.repos = []
-
- def put(self, url, rev, timestamp=None):
- if rev is None:
- return
- if timestamp is None:
- timestamp = time.time()
-
- for entry in self.repos:
- if url == entry.url:
- entry.timestamp = timestamp
- entry.rev = rev
- #print "set repo", entry
- break
- else:
- entry = RepoEntry(url, rev, timestamp)
- self.repos.append(entry)
- #print "appended repo", entry
-
- def get(self, url):
- now = time.time()
- for entry in self.repos:
- if url.startswith(entry.url):
- if now < entry.timestamp + self.timeout:
- #print "returning immediate Etrny", entry
- return entry.url, entry.rev
- return entry.url, -1
- return url, -1
-
-repositories = RepoCache()
-
-
-# svn support code
-
-ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
-if sys.platform == "win32":
- ALLOWED_CHARS += ":"
-ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
-
-def _getsvnversion(ver=[]):
- try:
- return ver[0]
- except IndexError:
- v = py.process.cmdexec("svn -q --version")
- v.strip()
- v = '.'.join(v.split('.')[:2])
- ver.append(v)
- return v
-
-def _escape_helper(text):
- text = str(text)
+"""
+svn-Command based Implementation of a Subversion WorkingCopy Path.
+
+ SvnWCCommandPath is the main class.
+
+"""
+
+import os, sys, time, re, calendar
+import py
+import subprocess
+from py._path import common
+
+#-----------------------------------------------------------
+# Caching latest repository revision and repo-paths
+# (getting them is slow with the current implementations)
+#
+# XXX make mt-safe
+#-----------------------------------------------------------
+
+class cache:
+ proplist = {}
+ info = {}
+ entries = {}
+ prop = {}
+
+class RepoEntry:
+ def __init__(self, url, rev, timestamp):
+ self.url = url
+ self.rev = rev
+ self.timestamp = timestamp
+
+ def __str__(self):
+ return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
+
+class RepoCache:
+ """ The Repocache manages discovered repository paths
+ and their revisions. If inside a timeout the cache
+ will even return the revision of the root.
+ """
+ timeout = 20 # seconds after which we forget that we know the last revision
+
+ def __init__(self):
+ self.repos = []
+
+ def clear(self):
+ self.repos = []
+
+ def put(self, url, rev, timestamp=None):
+ if rev is None:
+ return
+ if timestamp is None:
+ timestamp = time.time()
+
+ for entry in self.repos:
+ if url == entry.url:
+ entry.timestamp = timestamp
+ entry.rev = rev
+ #print "set repo", entry
+ break
+ else:
+ entry = RepoEntry(url, rev, timestamp)
+ self.repos.append(entry)
+ #print "appended repo", entry
+
+ def get(self, url):
+ now = time.time()
+ for entry in self.repos:
+ if url.startswith(entry.url):
+ if now < entry.timestamp + self.timeout:
+ #print "returning immediate Etrny", entry
+ return entry.url, entry.rev
+ return entry.url, -1
+ return url, -1
+
+repositories = RepoCache()
+
+
+# svn support code
+
+ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
+if sys.platform == "win32":
+ ALLOWED_CHARS += ":"
+ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
+
+def _getsvnversion(ver=[]):
+ try:
+ return ver[0]
+ except IndexError:
+ v = py.process.cmdexec("svn -q --version")
+ v.strip()
+ v = '.'.join(v.split('.')[:2])
+ ver.append(v)
+ return v
+
+def _escape_helper(text):
+ text = str(text)
if sys.platform != 'win32':
- text = str(text).replace('$', '\\$')
- return text
-
-def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
- for c in str(text):
- if c.isalnum():
- continue
- if c in allowed_chars:
- continue
- return True
- return False
-
-def checkbadchars(url):
- # (hpk) not quite sure about the exact purpose, guido w.?
- proto, uri = url.split("://", 1)
- if proto != "file":
- host, uripath = uri.split('/', 1)
- # only check for bad chars in the non-protocol parts
- if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
- or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
- raise ValueError("bad char in %r" % (url, ))
-
-
-#_______________________________________________________________
-
-class SvnPathBase(common.PathBase):
- """ Base implementation for SvnPath implementations. """
- sep = '/'
-
- def _geturl(self):
- return self.strpath
- url = property(_geturl, None, None, "url of this svn-path.")
-
- def __str__(self):
- """ return a string representation (including rev-number) """
- return self.strpath
-
- def __hash__(self):
- return hash(self.strpath)
-
- def new(self, **kw):
- """ create a modified version of this path. A 'rev' argument
- indicates a new revision.
- the following keyword arguments modify various path parts::
-
- http://host.com/repo/path/file.ext
- |-----------------------| dirname
- |------| basename
- |--| purebasename
- |--| ext
- """
- obj = object.__new__(self.__class__)
- obj.rev = kw.get('rev', self.rev)
- obj.auth = kw.get('auth', self.auth)
- dirname, basename, purebasename, ext = self._getbyspec(
- "dirname,basename,purebasename,ext")
- if 'basename' in kw:
- if 'purebasename' in kw or 'ext' in kw:
- raise ValueError("invalid specification %r" % kw)
- else:
- pb = kw.setdefault('purebasename', purebasename)
- ext = kw.setdefault('ext', ext)
- if ext and not ext.startswith('.'):
- ext = '.' + ext
- kw['basename'] = pb + ext
-
- kw.setdefault('dirname', dirname)
- kw.setdefault('sep', self.sep)
- if kw['basename']:
- obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
- else:
- obj.strpath = "%(dirname)s" % kw
- return obj
-
- def _getbyspec(self, spec):
- """ get specified parts of the path. 'arg' is a string
- with comma separated path parts. The parts are returned
- in exactly the order of the specification.
-
- you may specify the following parts:
-
- http://host.com/repo/path/file.ext
- |-----------------------| dirname
- |------| basename
- |--| purebasename
- |--| ext
- """
- res = []
- parts = self.strpath.split(self.sep)
- for name in spec.split(','):
- name = name.strip()
- if name == 'dirname':
- res.append(self.sep.join(parts[:-1]))
- elif name == 'basename':
- res.append(parts[-1])
- else:
- basename = parts[-1]
- i = basename.rfind('.')
- if i == -1:
- purebasename, ext = basename, ''
- else:
- purebasename, ext = basename[:i], basename[i:]
- if name == 'purebasename':
- res.append(purebasename)
- elif name == 'ext':
- res.append(ext)
- else:
- raise NameError("Don't know part %r" % name)
- return res
-
- def __eq__(self, other):
- """ return true if path and rev attributes each match """
- return (str(self) == str(other) and
- (self.rev == other.rev or self.rev == other.rev))
-
- def __ne__(self, other):
- return not self == other
-
- def join(self, *args):
- """ return a new Path (with the same revision) which is composed
- of the self Path followed by 'args' path components.
- """
- if not args:
- return self
-
- args = tuple([arg.strip(self.sep) for arg in args])
- parts = (self.strpath, ) + args
- newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
- return newpath
-
- def propget(self, name):
- """ return the content of the given property. """
- value = self._propget(name)
- return value
-
- def proplist(self):
- """ list all property names. """
- content = self._proplist()
- return content
-
- def size(self):
- """ Return the size of the file content of the Path. """
- return self.info().size
-
- def mtime(self):
- """ Return the last modification time of the file. """
- return self.info().mtime
-
- # shared help methods
-
- def _escape(self, cmd):
- return _escape_helper(cmd)
-
-
- #def _childmaxrev(self):
- # """ return maximum revision number of childs (or self.rev if no childs) """
- # rev = self.rev
- # for name, info in self._listdir_nameinfo():
- # rev = max(rev, info.created_rev)
- # return rev
-
- #def _getlatestrevision(self):
- # """ return latest repo-revision for this path. """
- # url = self.strpath
- # path = self.__class__(url, None)
- #
- # # we need a long walk to find the root-repo and revision
- # while 1:
- # try:
- # rev = max(rev, path._childmaxrev())
- # previous = path
- # path = path.dirpath()
- # except (IOError, process.cmdexec.Error):
- # break
- # if rev is None:
- # raise IOError, "could not determine newest repo revision for %s" % self
- # return rev
-
- class Checkers(common.Checkers):
- def dir(self):
- try:
- return self.path.info().kind == 'dir'
- except py.error.Error:
- return self._listdirworks()
-
- def _listdirworks(self):
- try:
- self.path.listdir()
- except py.error.ENOENT:
- return False
- else:
- return True
-
- def file(self):
- try:
- return self.path.info().kind == 'file'
- except py.error.ENOENT:
- return False
-
- def exists(self):
- try:
- return self.path.info()
- except py.error.ENOENT:
- return self._listdirworks()
-
-def parse_apr_time(timestr):
- i = timestr.rfind('.')
- if i == -1:
- raise ValueError("could not parse %s" % timestr)
- timestr = timestr[:i]
- parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
- return time.mktime(parsedtime)
-
-class PropListDict(dict):
- """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
- def __init__(self, path, keynames):
- dict.__init__(self, [(x, None) for x in keynames])
- self.path = path
-
- def __getitem__(self, key):
- value = dict.__getitem__(self, key)
- if value is None:
- value = self.path.propget(key)
- dict.__setitem__(self, key, value)
- return value
-
-def fixlocale():
- if sys.platform != 'win32':
- return 'LC_ALL=C '
- return ''
-
-# some nasty chunk of code to solve path and url conversion and quoting issues
+ text = str(text).replace('$', '\\$')
+ return text
+
+def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
+ for c in str(text):
+ if c.isalnum():
+ continue
+ if c in allowed_chars:
+ continue
+ return True
+ return False
+
+def checkbadchars(url):
+ # (hpk) not quite sure about the exact purpose, guido w.?
+ proto, uri = url.split("://", 1)
+ if proto != "file":
+ host, uripath = uri.split('/', 1)
+ # only check for bad chars in the non-protocol parts
+ if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
+ or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
+ raise ValueError("bad char in %r" % (url, ))
+
+
+#_______________________________________________________________
+
+class SvnPathBase(common.PathBase):
+ """ Base implementation for SvnPath implementations. """
+ sep = '/'
+
+ def _geturl(self):
+ return self.strpath
+ url = property(_geturl, None, None, "url of this svn-path.")
+
+ def __str__(self):
+ """ return a string representation (including rev-number) """
+ return self.strpath
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts::
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ obj = object.__new__(self.__class__)
+ obj.rev = kw.get('rev', self.rev)
+ obj.auth = kw.get('auth', self.auth)
+ dirname, basename, purebasename, ext = self._getbyspec(
+ "dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ ext = kw.setdefault('ext', ext)
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ if kw['basename']:
+ obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
+ else:
+ obj.strpath = "%(dirname)s" % kw
+ return obj
+
+ def _getbyspec(self, spec):
+ """ get specified parts of the path. 'arg' is a string
+ with comma separated path parts. The parts are returned
+ in exactly the order of the specification.
+
+ you may specify the following parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ res = []
+ parts = self.strpath.split(self.sep)
+ for name in spec.split(','):
+ name = name.strip()
+ if name == 'dirname':
+ res.append(self.sep.join(parts[:-1]))
+ elif name == 'basename':
+ res.append(parts[-1])
+ else:
+ basename = parts[-1]
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ res.append(purebasename)
+ elif name == 'ext':
+ res.append(ext)
+ else:
+ raise NameError("Don't know part %r" % name)
+ return res
+
+ def __eq__(self, other):
+ """ return true if path and rev attributes each match """
+ return (str(self) == str(other) and
+ (self.rev == other.rev or self.rev == other.rev))
+
+ def __ne__(self, other):
+ return not self == other
+
+ def join(self, *args):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+
+ args = tuple([arg.strip(self.sep) for arg in args])
+ parts = (self.strpath, ) + args
+ newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
+ return newpath
+
+ def propget(self, name):
+ """ return the content of the given property. """
+ value = self._propget(name)
+ return value
+
+ def proplist(self):
+ """ list all property names. """
+ content = self._proplist()
+ return content
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ # shared help methods
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+
+ #def _childmaxrev(self):
+ # """ return maximum revision number of childs (or self.rev if no childs) """
+ # rev = self.rev
+ # for name, info in self._listdir_nameinfo():
+ # rev = max(rev, info.created_rev)
+ # return rev
+
+ #def _getlatestrevision(self):
+ # """ return latest repo-revision for this path. """
+ # url = self.strpath
+ # path = self.__class__(url, None)
+ #
+ # # we need a long walk to find the root-repo and revision
+ # while 1:
+ # try:
+ # rev = max(rev, path._childmaxrev())
+ # previous = path
+ # path = path.dirpath()
+ # except (IOError, process.cmdexec.Error):
+ # break
+ # if rev is None:
+ # raise IOError, "could not determine newest repo revision for %s" % self
+ # return rev
+
+ class Checkers(common.Checkers):
+ def dir(self):
+ try:
+ return self.path.info().kind == 'dir'
+ except py.error.Error:
+ return self._listdirworks()
+
+ def _listdirworks(self):
+ try:
+ self.path.listdir()
+ except py.error.ENOENT:
+ return False
+ else:
+ return True
+
+ def file(self):
+ try:
+ return self.path.info().kind == 'file'
+ except py.error.ENOENT:
+ return False
+
+ def exists(self):
+ try:
+ return self.path.info()
+ except py.error.ENOENT:
+ return self._listdirworks()
+
+def parse_apr_time(timestr):
+ i = timestr.rfind('.')
+ if i == -1:
+ raise ValueError("could not parse %s" % timestr)
+ timestr = timestr[:i]
+ parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
+ return time.mktime(parsedtime)
+
+class PropListDict(dict):
+ """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
+ def __init__(self, path, keynames):
+ dict.__init__(self, [(x, None) for x in keynames])
+ self.path = path
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ if value is None:
+ value = self.path.propget(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+def fixlocale():
+ if sys.platform != 'win32':
+ return 'LC_ALL=C '
+ return ''
+
+# some nasty chunk of code to solve path and url conversion and quoting issues
ILLEGAL_CHARS = '* | \\ / : < > ? \t \n \x0b \x0c \r'.split(' ')
-if os.sep in ILLEGAL_CHARS:
- ILLEGAL_CHARS.remove(os.sep)
-ISWINDOWS = sys.platform == 'win32'
-_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
-def _check_path(path):
- illegal = ILLEGAL_CHARS[:]
- sp = path.strpath
- if ISWINDOWS:
- illegal.remove(':')
- if not _reg_allow_disk.match(sp):
- raise ValueError('path may not contain a colon (:)')
- for char in sp:
- if char not in string.printable or char in illegal:
- raise ValueError('illegal character %r in path' % (char,))
-
-def path_to_fspath(path, addat=True):
- _check_path(path)
- sp = path.strpath
- if addat and path.rev != -1:
- sp = '%s@%s' % (sp, path.rev)
- elif addat:
- sp = '%s@HEAD' % (sp,)
- return sp
-
-def url_from_path(path):
- fspath = path_to_fspath(path, False)
+if os.sep in ILLEGAL_CHARS:
+ ILLEGAL_CHARS.remove(os.sep)
+ISWINDOWS = sys.platform == 'win32'
+_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
+def _check_path(path):
+ illegal = ILLEGAL_CHARS[:]
+ sp = path.strpath
+ if ISWINDOWS:
+ illegal.remove(':')
+ if not _reg_allow_disk.match(sp):
+ raise ValueError('path may not contain a colon (:)')
+ for char in sp:
+ if char not in string.printable or char in illegal:
+ raise ValueError('illegal character %r in path' % (char,))
+
+def path_to_fspath(path, addat=True):
+ _check_path(path)
+ sp = path.strpath
+ if addat and path.rev != -1:
+ sp = '%s@%s' % (sp, path.rev)
+ elif addat:
+ sp = '%s@HEAD' % (sp,)
+ return sp
+
+def url_from_path(path):
+ fspath = path_to_fspath(path, False)
from urllib import quote
- if ISWINDOWS:
- match = _reg_allow_disk.match(fspath)
- fspath = fspath.replace('\\', '/')
- if match.group(1):
- fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
- quote(fspath[len(match.group(1)):]))
- else:
- fspath = quote(fspath)
- else:
- fspath = quote(fspath)
- if path.rev != -1:
- fspath = '%s@%s' % (fspath, path.rev)
- else:
- fspath = '%s@HEAD' % (fspath,)
- return 'file://%s' % (fspath,)
-
-class SvnAuth(object):
- """ container for auth information for Subversion """
- def __init__(self, username, password, cache_auth=True, interactive=True):
- self.username = username
- self.password = password
- self.cache_auth = cache_auth
- self.interactive = interactive
-
- def makecmdoptions(self):
- uname = self.username.replace('"', '\\"')
- passwd = self.password.replace('"', '\\"')
- ret = []
- if uname:
- ret.append('--username="%s"' % (uname,))
- if passwd:
- ret.append('--password="%s"' % (passwd,))
- if not self.cache_auth:
- ret.append('--no-auth-cache')
- if not self.interactive:
- ret.append('--non-interactive')
- return ' '.join(ret)
-
- def __str__(self):
- return "<SvnAuth username=%s ...>" %(self.username,)
-
+ if ISWINDOWS:
+ match = _reg_allow_disk.match(fspath)
+ fspath = fspath.replace('\\', '/')
+ if match.group(1):
+ fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
+ quote(fspath[len(match.group(1)):]))
+ else:
+ fspath = quote(fspath)
+ else:
+ fspath = quote(fspath)
+ if path.rev != -1:
+ fspath = '%s@%s' % (fspath, path.rev)
+ else:
+ fspath = '%s@HEAD' % (fspath,)
+ return 'file://%s' % (fspath,)
+
+class SvnAuth(object):
+ """ container for auth information for Subversion """
+ def __init__(self, username, password, cache_auth=True, interactive=True):
+ self.username = username
+ self.password = password
+ self.cache_auth = cache_auth
+ self.interactive = interactive
+
+ def makecmdoptions(self):
+ uname = self.username.replace('"', '\\"')
+ passwd = self.password.replace('"', '\\"')
+ ret = []
+ if uname:
+ ret.append('--username="%s"' % (uname,))
+ if passwd:
+ ret.append('--password="%s"' % (passwd,))
+ if not self.cache_auth:
+ ret.append('--no-auth-cache')
+ if not self.interactive:
+ ret.append('--non-interactive')
+ return ' '.join(ret)
+
+ def __str__(self):
+ return "<SvnAuth username=%s ...>" %(self.username,)
+
rex_blame = re.compile(r'\s*(\d+)\s+(\S+) (.*)')
-
-class SvnWCCommandPath(common.PathBase):
- """ path implementation offering access/modification to svn working copies.
- It has methods similar to the functions in os.path and similar to the
- commands of the svn client.
- """
- sep = os.sep
-
- def __new__(cls, wcpath=None, auth=None):
- self = object.__new__(cls)
- if isinstance(wcpath, cls):
- if wcpath.__class__ == cls:
- return wcpath
- wcpath = wcpath.localpath
- if _check_for_bad_chars(str(wcpath),
- ALLOWED_CHARS):
- raise ValueError("bad char in wcpath %s" % (wcpath, ))
- self.localpath = py.path.local(wcpath)
- self.auth = auth
- return self
-
- strpath = property(lambda x: str(x.localpath), None, None, "string path")
- rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
-
- def __eq__(self, other):
- return self.localpath == getattr(other, 'localpath', None)
-
- def _geturl(self):
- if getattr(self, '_url', None) is None:
- info = self.info()
- self._url = info.url #SvnPath(info.url, info.rev)
- assert isinstance(self._url, py.builtin._basestring)
- return self._url
-
- url = property(_geturl, None, None, "url of this WC item")
-
- def _escape(self, cmd):
- return _escape_helper(cmd)
-
- def dump(self, obj):
- """ pickle object into path location"""
- return self.localpath.dump(obj)
-
- def svnurl(self):
- """ return current SvnPath for this WC-item. """
- info = self.info()
- return py.path.svnurl(info.url)
-
- def __repr__(self):
- return "svnwc(%r)" % (self.strpath) # , self._url)
-
- def __str__(self):
- return str(self.localpath)
-
- def _makeauthoptions(self):
- if self.auth is None:
- return ''
- return self.auth.makecmdoptions()
-
- def _authsvn(self, cmd, args=None):
- args = args and list(args) or []
- args.append(self._makeauthoptions())
- return self._svn(cmd, *args)
-
- def _svn(self, cmd, *args):
- l = ['svn %s' % cmd]
- args = [self._escape(item) for item in args]
- l.extend(args)
- l.append('"%s"' % self._escape(self.strpath))
- # try fixing the locale because we can't otherwise parse
- string = fixlocale() + " ".join(l)
- try:
- try:
- key = 'LC_MESSAGES'
- hold = os.environ.get(key)
- os.environ[key] = 'C'
- out = py.process.cmdexec(string)
- finally:
- if hold:
- os.environ[key] = hold
- else:
- del os.environ[key]
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- strerr = e.err.lower()
- if strerr.find('not found') != -1:
- raise py.error.ENOENT(self)
- elif strerr.find("E200009:") != -1:
- raise py.error.ENOENT(self)
- if (strerr.find('file exists') != -1 or
- strerr.find('file already exists') != -1 or
- strerr.find('w150002:') != -1 or
- strerr.find("can't create directory") != -1):
+
+class SvnWCCommandPath(common.PathBase):
+ """ path implementation offering access/modification to svn working copies.
+ It has methods similar to the functions in os.path and similar to the
+ commands of the svn client.
+ """
+ sep = os.sep
+
+ def __new__(cls, wcpath=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(wcpath, cls):
+ if wcpath.__class__ == cls:
+ return wcpath
+ wcpath = wcpath.localpath
+ if _check_for_bad_chars(str(wcpath),
+ ALLOWED_CHARS):
+ raise ValueError("bad char in wcpath %s" % (wcpath, ))
+ self.localpath = py.path.local(wcpath)
+ self.auth = auth
+ return self
+
+ strpath = property(lambda x: str(x.localpath), None, None, "string path")
+ rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
+
+ def __eq__(self, other):
+ return self.localpath == getattr(other, 'localpath', None)
+
+ def _geturl(self):
+ if getattr(self, '_url', None) is None:
+ info = self.info()
+ self._url = info.url #SvnPath(info.url, info.rev)
+ assert isinstance(self._url, py.builtin._basestring)
+ return self._url
+
+ url = property(_geturl, None, None, "url of this WC item")
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+ def dump(self, obj):
+ """ pickle object into path location"""
+ return self.localpath.dump(obj)
+
+ def svnurl(self):
+ """ return current SvnPath for this WC-item. """
+ info = self.info()
+ return py.path.svnurl(info.url)
+
+ def __repr__(self):
+ return "svnwc(%r)" % (self.strpath) # , self._url)
+
+ def __str__(self):
+ return str(self.localpath)
+
+ def _makeauthoptions(self):
+ if self.auth is None:
+ return ''
+ return self.auth.makecmdoptions()
+
+ def _authsvn(self, cmd, args=None):
+ args = args and list(args) or []
+ args.append(self._makeauthoptions())
+ return self._svn(cmd, *args)
+
+ def _svn(self, cmd, *args):
+ l = ['svn %s' % cmd]
+ args = [self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._escape(self.strpath))
+ # try fixing the locale because we can't otherwise parse
+ string = fixlocale() + " ".join(l)
+ try:
+ try:
+ key = 'LC_MESSAGES'
+ hold = os.environ.get(key)
+ os.environ[key] = 'C'
+ out = py.process.cmdexec(string)
+ finally:
+ if hold:
+ os.environ[key] = hold
+ else:
+ del os.environ[key]
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ strerr = e.err.lower()
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
+ raise py.error.ENOENT(self)
+ if (strerr.find('file exists') != -1 or
+ strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
+ strerr.find("can't create directory") != -1):
raise py.error.EEXIST(strerr) #self)
- raise
- return out
-
- def switch(self, url):
- """ switch to given URL. """
- self._authsvn('switch', [url])
-
- def checkout(self, url=None, rev=None):
- """ checkout from url to local wcpath. """
- args = []
- if url is None:
- url = self.url
- if rev is None or rev == -1:
+ raise
+ return out
+
+ def switch(self, url):
+ """ switch to given URL. """
+ self._authsvn('switch', [url])
+
+ def checkout(self, url=None, rev=None):
+ """ checkout from url to local wcpath. """
+ args = []
+ if url is None:
+ url = self.url
+ if rev is None or rev == -1:
if (sys.platform != 'win32' and
- _getsvnversion() == '1.3'):
- url += "@HEAD"
- else:
- if _getsvnversion() == '1.3':
- url += "@%d" % rev
- else:
- args.append('-r' + str(rev))
- args.append(url)
- self._authsvn('co', args)
-
- def update(self, rev='HEAD', interactive=True):
- """ update working copy item to given revision. (None -> HEAD). """
- opts = ['-r', rev]
- if not interactive:
- opts.append("--non-interactive")
- self._authsvn('up', opts)
-
- def write(self, content, mode='w'):
- """ write content into local filesystem wc. """
- self.localpath.write(content, mode)
-
- def dirpath(self, *args):
- """ return the directory Path of the current Path. """
- return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
-
- def _ensuredirs(self):
- parent = self.dirpath()
- if parent.check(dir=0):
- parent._ensuredirs()
- if self.check(dir=0):
- self.mkdir()
- return self
-
- def ensure(self, *args, **kwargs):
- """ ensure that an args-joined path exists (by default as
- a file). if you specify a keyword argument 'directory=True'
- then the path is forced to be a directory path.
- """
- p = self.join(*args)
- if p.check():
- if p.check(versioned=False):
- p.add()
- return p
- if kwargs.get('dir', 0):
- return p._ensuredirs()
- parent = p.dirpath()
- parent._ensuredirs()
- p.write("")
- p.add()
- return p
-
- def mkdir(self, *args):
- """ create & return the directory joined with args. """
- if args:
- return self.join(*args).mkdir()
- else:
- self._svn('mkdir')
- return self
-
- def add(self):
- """ add ourself to svn """
- self._svn('add')
-
- def remove(self, rec=1, force=1):
- """ remove a file or a directory tree. 'rec'ursive is
- ignored and considered always true (because of
- underlying svn semantics.
- """
- assert rec, "svn cannot remove non-recursively"
- if not self.check(versioned=True):
- # not added to svn (anymore?), just remove
- py.path.local(self).remove()
- return
- flags = []
- if force:
- flags.append('--force')
- self._svn('remove', *flags)
-
- def copy(self, target):
- """ copy path to target."""
- py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
-
- def rename(self, target):
- """ rename this path to target. """
- py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
-
- def lock(self):
- """ set a lock (exclusive) on the resource """
- out = self._authsvn('lock').strip()
- if not out:
- # warning or error, raise exception
- raise ValueError("unknown error in svn lock command")
-
- def unlock(self):
- """ unset a previously set lock """
- out = self._authsvn('unlock').strip()
- if out.startswith('svn:'):
- # warning or error, raise exception
- raise Exception(out[4:])
-
- def cleanup(self):
- """ remove any locks from the resource """
- # XXX should be fixed properly!!!
- try:
- self.unlock()
- except:
- pass
-
- def status(self, updates=0, rec=0, externals=0):
- """ return (collective) Status object for this file. """
- # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
- # 2201 2192 jum test
- # XXX
- if externals:
- raise ValueError("XXX cannot perform status() "
- "on external items yet")
- else:
- #1.2 supports: externals = '--ignore-externals'
- externals = ''
- if rec:
- rec= ''
- else:
- rec = '--non-recursive'
-
- # XXX does not work on all subversion versions
- #if not externals:
- # externals = '--ignore-externals'
-
- if updates:
- updates = '-u'
- else:
- updates = ''
-
- try:
- cmd = 'status -v --xml --no-ignore %s %s %s' % (
- updates, rec, externals)
- out = self._authsvn(cmd)
- except py.process.cmdexec.Error:
- cmd = 'status -v --no-ignore %s %s %s' % (
- updates, rec, externals)
- out = self._authsvn(cmd)
- rootstatus = WCStatus(self).fromstring(out, self)
- else:
- rootstatus = XMLWCStatus(self).fromstring(out, self)
- return rootstatus
-
- def diff(self, rev=None):
- """ return a diff of the current path against revision rev (defaulting
- to the last one).
- """
- args = []
- if rev is not None:
- args.append("-r %d" % rev)
- out = self._authsvn('diff', args)
- return out
-
- def blame(self):
- """ return a list of tuples of three elements:
- (revision, commiter, line)
- """
- out = self._svn('blame')
- result = []
- blamelines = out.splitlines()
- reallines = py.path.svnurl(self.url).readlines()
- for i, (blameline, line) in enumerate(
- zip(blamelines, reallines)):
- m = rex_blame.match(blameline)
- if not m:
- raise ValueError("output line %r of svn blame does not match "
- "expected format" % (line, ))
- rev, name, _ = m.groups()
- result.append((int(rev), name, line))
- return result
-
- _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
- def commit(self, msg='', rec=1):
- """ commit with support for non-recursive commits """
- # XXX i guess escaping should be done better here?!?
- cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
- if not rec:
- cmd += ' -N'
- out = self._authsvn(cmd)
- try:
- del cache.info[self]
- except KeyError:
- pass
- if out:
- m = self._rex_commit.match(out)
- return int(m.group(1))
-
- def propset(self, name, value, *args):
- """ set property name to value on this path. """
- d = py.path.local.mkdtemp()
- try:
- p = d.join('value')
- p.write(value)
- self._svn('propset', name, '--file', str(p), *args)
- finally:
- d.remove()
-
- def propget(self, name):
- """ get property name on this path. """
- res = self._svn('propget', name)
- return res[:-1] # strip trailing newline
-
- def propdel(self, name):
- """ delete property name on this path. """
- res = self._svn('propdel', name)
- return res[:-1] # strip trailing newline
-
- def proplist(self, rec=0):
- """ return a mapping of property names to property values.
-If rec is True, then return a dictionary mapping sub-paths to such mappings.
-"""
- if rec:
- res = self._svn('proplist -R')
- return make_recursive_propdict(self, res)
- else:
- res = self._svn('proplist')
- lines = res.split('\n')
- lines = [x.strip() for x in lines[1:]]
- return PropListDict(self, lines)
-
- def revert(self, rec=0):
- """ revert the local changes of this path. if rec is True, do so
-recursively. """
- if rec:
- result = self._svn('revert -R')
- else:
- result = self._svn('revert')
- return result
-
- def new(self, **kw):
- """ create a modified version of this path. A 'rev' argument
- indicates a new revision.
- the following keyword arguments modify various path parts:
-
- http://host.com/repo/path/file.ext
- |-----------------------| dirname
- |------| basename
- |--| purebasename
- |--| ext
- """
- if kw:
- localpath = self.localpath.new(**kw)
- else:
- localpath = self.localpath
- return self.__class__(localpath, auth=self.auth)
-
- def join(self, *args, **kwargs):
- """ return a new Path (with the same revision) which is composed
- of the self Path followed by 'args' path components.
- """
- if not args:
- return self
- localpath = self.localpath.join(*args, **kwargs)
- return self.__class__(localpath, auth=self.auth)
-
- def info(self, usecache=1):
- """ return an Info structure with svn-provided information. """
- info = usecache and cache.info.get(self)
- if not info:
- try:
- output = self._svn('info')
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- if e.err.find('Path is not a working copy directory') != -1:
- raise py.error.ENOENT(self, e.err)
- elif e.err.find("is not under version control") != -1:
- raise py.error.ENOENT(self, e.err)
- raise
- # XXX SVN 1.3 has output on stderr instead of stdout (while it does
- # return 0!), so a bit nasty, but we assume no output is output
- # to stderr...
- if (output.strip() == '' or
- output.lower().find('not a versioned resource') != -1):
- raise py.error.ENOENT(self, output)
- info = InfoSvnWCCommand(output)
-
- # Can't reliably compare on Windows without access to win32api
+ _getsvnversion() == '1.3'):
+ url += "@HEAD"
+ else:
+ if _getsvnversion() == '1.3':
+ url += "@%d" % rev
+ else:
+ args.append('-r' + str(rev))
+ args.append(url)
+ self._authsvn('co', args)
+
+ def update(self, rev='HEAD', interactive=True):
+ """ update working copy item to given revision. (None -> HEAD). """
+ opts = ['-r', rev]
+ if not interactive:
+ opts.append("--non-interactive")
+ self._authsvn('up', opts)
+
+ def write(self, content, mode='w'):
+ """ write content into local filesystem wc. """
+ self.localpath.write(content, mode)
+
+ def dirpath(self, *args):
+ """ return the directory Path of the current Path. """
+ return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ self.mkdir()
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'directory=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if p.check():
+ if p.check(versioned=False):
+ p.add()
+ return p
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ parent = p.dirpath()
+ parent._ensuredirs()
+ p.write("")
+ p.add()
+ return p
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ if args:
+ return self.join(*args).mkdir()
+ else:
+ self._svn('mkdir')
+ return self
+
+ def add(self):
+ """ add ourself to svn """
+ self._svn('add')
+
+ def remove(self, rec=1, force=1):
+ """ remove a file or a directory tree. 'rec'ursive is
+ ignored and considered always true (because of
+ underlying svn semantics.
+ """
+ assert rec, "svn cannot remove non-recursively"
+ if not self.check(versioned=True):
+ # not added to svn (anymore?), just remove
+ py.path.local(self).remove()
+ return
+ flags = []
+ if force:
+ flags.append('--force')
+ self._svn('remove', *flags)
+
+ def copy(self, target):
+ """ copy path to target."""
+ py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
+
+ def rename(self, target):
+ """ rename this path to target. """
+ py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
+
+ def lock(self):
+ """ set a lock (exclusive) on the resource """
+ out = self._authsvn('lock').strip()
+ if not out:
+ # warning or error, raise exception
+ raise ValueError("unknown error in svn lock command")
+
+ def unlock(self):
+ """ unset a previously set lock """
+ out = self._authsvn('unlock').strip()
+ if out.startswith('svn:'):
+ # warning or error, raise exception
+ raise Exception(out[4:])
+
+ def cleanup(self):
+ """ remove any locks from the resource """
+ # XXX should be fixed properly!!!
+ try:
+ self.unlock()
+ except:
+ pass
+
+ def status(self, updates=0, rec=0, externals=0):
+ """ return (collective) Status object for this file. """
+ # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
+ # 2201 2192 jum test
+ # XXX
+ if externals:
+ raise ValueError("XXX cannot perform status() "
+ "on external items yet")
+ else:
+ #1.2 supports: externals = '--ignore-externals'
+ externals = ''
+ if rec:
+ rec= ''
+ else:
+ rec = '--non-recursive'
+
+ # XXX does not work on all subversion versions
+ #if not externals:
+ # externals = '--ignore-externals'
+
+ if updates:
+ updates = '-u'
+ else:
+ updates = ''
+
+ try:
+ cmd = 'status -v --xml --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ except py.process.cmdexec.Error:
+ cmd = 'status -v --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ rootstatus = WCStatus(self).fromstring(out, self)
+ else:
+ rootstatus = XMLWCStatus(self).fromstring(out, self)
+ return rootstatus
+
+ def diff(self, rev=None):
+ """ return a diff of the current path against revision rev (defaulting
+ to the last one).
+ """
+ args = []
+ if rev is not None:
+ args.append("-r %d" % rev)
+ out = self._authsvn('diff', args)
+ return out
+
+ def blame(self):
+ """ return a list of tuples of three elements:
+ (revision, commiter, line)
+ """
+ out = self._svn('blame')
+ result = []
+ blamelines = out.splitlines()
+ reallines = py.path.svnurl(self.url).readlines()
+ for i, (blameline, line) in enumerate(
+ zip(blamelines, reallines)):
+ m = rex_blame.match(blameline)
+ if not m:
+ raise ValueError("output line %r of svn blame does not match "
+ "expected format" % (line, ))
+ rev, name, _ = m.groups()
+ result.append((int(rev), name, line))
+ return result
+
+ _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
+ def commit(self, msg='', rec=1):
+ """ commit with support for non-recursive commits """
+ # XXX i guess escaping should be done better here?!?
+ cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
+ if not rec:
+ cmd += ' -N'
+ out = self._authsvn(cmd)
+ try:
+ del cache.info[self]
+ except KeyError:
+ pass
+ if out:
+ m = self._rex_commit.match(out)
+ return int(m.group(1))
+
+ def propset(self, name, value, *args):
+ """ set property name to value on this path. """
+ d = py.path.local.mkdtemp()
+ try:
+ p = d.join('value')
+ p.write(value)
+ self._svn('propset', name, '--file', str(p), *args)
+ finally:
+ d.remove()
+
+ def propget(self, name):
+ """ get property name on this path. """
+ res = self._svn('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def propdel(self, name):
+ """ delete property name on this path. """
+ res = self._svn('propdel', name)
+ return res[:-1] # strip trailing newline
+
+ def proplist(self, rec=0):
+ """ return a mapping of property names to property values.
+If rec is True, then return a dictionary mapping sub-paths to such mappings.
+"""
+ if rec:
+ res = self._svn('proplist -R')
+ return make_recursive_propdict(self, res)
+ else:
+ res = self._svn('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return PropListDict(self, lines)
+
+ def revert(self, rec=0):
+ """ revert the local changes of this path. if rec is True, do so
+recursively. """
+ if rec:
+ result = self._svn('revert -R')
+ else:
+ result = self._svn('revert')
+ return result
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ if kw:
+ localpath = self.localpath.new(**kw)
+ else:
+ localpath = self.localpath
+ return self.__class__(localpath, auth=self.auth)
+
+ def join(self, *args, **kwargs):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+ localpath = self.localpath.join(*args, **kwargs)
+ return self.__class__(localpath, auth=self.auth)
+
+ def info(self, usecache=1):
+ """ return an Info structure with svn-provided information. """
+ info = usecache and cache.info.get(self)
+ if not info:
+ try:
+ output = self._svn('info')
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('Path is not a working copy directory') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("is not under version control") != -1:
+ raise py.error.ENOENT(self, e.err)
+ raise
+ # XXX SVN 1.3 has output on stderr instead of stdout (while it does
+ # return 0!), so a bit nasty, but we assume no output is output
+ # to stderr...
+ if (output.strip() == '' or
+ output.lower().find('not a versioned resource') != -1):
+ raise py.error.ENOENT(self, output)
+ info = InfoSvnWCCommand(output)
+
+ # Can't reliably compare on Windows without access to win32api
if sys.platform != 'win32':
- if info.path != self.localpath:
- raise py.error.ENOENT(self, "not a versioned resource:" +
- " %s != %s" % (info.path, self.localpath))
- cache.info[self] = info
- return info
-
- def listdir(self, fil=None, sort=None):
- """ return a sequence of Paths.
-
- listdir will return either a tuple or a list of paths
- depending on implementation choices.
- """
- if isinstance(fil, str):
- fil = common.FNMatcher(fil)
- # XXX unify argument naming with LocalPath.listdir
- def notsvn(path):
- return path.basename != '.svn'
-
- paths = []
- for localpath in self.localpath.listdir(notsvn):
- p = self.__class__(localpath, auth=self.auth)
- if notsvn(p) and (not fil or fil(p)):
- paths.append(p)
- self._sortlist(paths, sort)
- return paths
-
- def open(self, mode='r'):
- """ return an opened file with the given mode. """
- return open(self.strpath, mode)
-
- def _getbyspec(self, spec):
- return self.localpath._getbyspec(spec)
-
- class Checkers(py.path.local.Checkers):
- def __init__(self, path):
- self.svnwcpath = path
- self.path = path.localpath
- def versioned(self):
- try:
- s = self.svnwcpath.info()
- except (py.error.ENOENT, py.error.EEXIST):
- return False
- except py.process.cmdexec.Error:
- e = sys.exc_info()[1]
- if e.err.find('is not a working copy')!=-1:
- return False
- if e.err.lower().find('not a versioned resource') != -1:
- return False
- raise
- else:
- return True
-
- def log(self, rev_start=None, rev_end=1, verbose=False):
- """ return a list of LogEntry instances for this path.
-rev_start is the starting revision (defaulting to the first one).
-rev_end is the last revision (defaulting to HEAD).
-if verbose is True, then the LogEntry instances also know which files changed.
-"""
- assert self.check() # make it simpler for the pipe
- rev_start = rev_start is None and "HEAD" or rev_start
- rev_end = rev_end is None and "HEAD" or rev_end
- if rev_start == "HEAD" and rev_end == 1:
- rev_opt = ""
- else:
- rev_opt = "-r %s:%s" % (rev_start, rev_end)
- verbose_opt = verbose and "-v" or ""
- locale_env = fixlocale()
- # some blather on stderr
- auth_opt = self._makeauthoptions()
- #stdin, stdout, stderr = os.popen3(locale_env +
- # 'svn log --xml %s %s %s "%s"' % (
- # rev_opt, verbose_opt, auth_opt,
- # self.strpath))
- cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
- rev_opt, verbose_opt, auth_opt, self.strpath)
-
- popen = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- shell=True,
- )
- stdout, stderr = popen.communicate()
- stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
- minidom,ExpatError = importxml()
- try:
- tree = minidom.parseString(stdout)
- except ExpatError:
- raise ValueError('no such revision')
- result = []
- for logentry in filter(None, tree.firstChild.childNodes):
- if logentry.nodeType == logentry.ELEMENT_NODE:
- result.append(LogEntry(logentry))
- return result
-
- def size(self):
- """ Return the size of the file content of the Path. """
- return self.info().size
-
- def mtime(self):
- """ Return the last modification time of the file. """
- return self.info().mtime
-
- def __hash__(self):
- return hash((self.strpath, self.__class__, self.auth))
-
-
-class WCStatus:
- attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
- 'deleted', 'prop_modified', 'unknown', 'update_available',
- 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
- )
-
- def __init__(self, wcpath, rev=None, modrev=None, author=None):
- self.wcpath = wcpath
- self.rev = rev
- self.modrev = modrev
- self.author = author
-
- for name in self.attrnames:
- setattr(self, name, [])
-
- def allpath(self, sort=True, **kw):
- d = {}
- for name in self.attrnames:
- if name not in kw or kw[name]:
- for path in getattr(self, name):
- d[path] = 1
- l = d.keys()
- if sort:
- l.sort()
- return l
-
- # XXX a bit scary to assume there's always 2 spaces between username and
- # path, however with win32 allowing spaces in user names there doesn't
- # seem to be a more solid approach :(
- _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
-
- def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
- """ return a new WCStatus object from data 's'
- """
- rootstatus = WCStatus(rootwcpath, rev, modrev, author)
- update_rev = None
- for line in data.split('\n'):
- if not line.strip():
- continue
- #print "processing %r" % line
- flags, rest = line[:8], line[8:]
- # first column
- c0,c1,c2,c3,c4,c5,x6,c7 = flags
- #if '*' in line:
- # print "flags", repr(flags), "rest", repr(rest)
-
- if c0 in '?XI':
- fn = line.split(None, 1)[1]
- if c0 == '?':
- wcpath = rootwcpath.join(fn, abs=1)
- rootstatus.unknown.append(wcpath)
- elif c0 == 'X':
- wcpath = rootwcpath.__class__(
- rootwcpath.localpath.join(fn, abs=1),
- auth=rootwcpath.auth)
- rootstatus.external.append(wcpath)
- elif c0 == 'I':
- wcpath = rootwcpath.join(fn, abs=1)
- rootstatus.ignored.append(wcpath)
-
- continue
-
- #elif c0 in '~!' or c4 == 'S':
- # raise NotImplementedError("received flag %r" % c0)
-
- m = WCStatus._rex_status.match(rest)
- if not m:
- if c7 == '*':
- fn = rest.strip()
- wcpath = rootwcpath.join(fn, abs=1)
- rootstatus.update_available.append(wcpath)
- continue
- if line.lower().find('against revision:')!=-1:
- update_rev = int(rest.split(':')[1].strip())
- continue
- if line.lower().find('status on external') > -1:
- # XXX not sure what to do here... perhaps we want to
- # store some state instead of just continuing, as right
- # now it makes the top-level external get added twice
- # (once as external, once as 'normal' unchanged item)
- # because of the way SVN presents external items
- continue
- # keep trying
- raise ValueError("could not parse line %r" % line)
- else:
- rev, modrev, author, fn = m.groups()
- wcpath = rootwcpath.join(fn, abs=1)
- #assert wcpath.check()
- if c0 == 'M':
- assert wcpath.check(file=1), "didn't expect a directory with changed content here"
- rootstatus.modified.append(wcpath)
- elif c0 == 'A' or c3 == '+' :
- rootstatus.added.append(wcpath)
- elif c0 == 'D':
- rootstatus.deleted.append(wcpath)
- elif c0 == 'C':
- rootstatus.conflict.append(wcpath)
- elif c0 == '~':
- rootstatus.kindmismatch.append(wcpath)
- elif c0 == '!':
- rootstatus.incomplete.append(wcpath)
- elif c0 == 'R':
- rootstatus.replaced.append(wcpath)
- elif not c0.strip():
- rootstatus.unchanged.append(wcpath)
- else:
- raise NotImplementedError("received flag %r" % c0)
-
- if c1 == 'M':
- rootstatus.prop_modified.append(wcpath)
- # XXX do we cover all client versions here?
- if c2 == 'L' or c5 == 'K':
- rootstatus.locked.append(wcpath)
- if c7 == '*':
- rootstatus.update_available.append(wcpath)
-
- if wcpath == rootwcpath:
- rootstatus.rev = rev
- rootstatus.modrev = modrev
- rootstatus.author = author
- if update_rev:
- rootstatus.update_rev = update_rev
- continue
- return rootstatus
- fromstring = staticmethod(fromstring)
-
-class XMLWCStatus(WCStatus):
- def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
- """ parse 'data' (XML string as outputted by svn st) into a status obj
- """
- # XXX for externals, the path is shown twice: once
- # with external information, and once with full info as if
- # the item was a normal non-external... the current way of
- # dealing with this issue is by ignoring it - this does make
- # externals appear as external items as well as 'normal',
- # unchanged ones in the status object so this is far from ideal
- rootstatus = WCStatus(rootwcpath, rev, modrev, author)
- update_rev = None
- minidom, ExpatError = importxml()
- try:
- doc = minidom.parseString(data)
- except ExpatError:
- e = sys.exc_info()[1]
- raise ValueError(str(e))
- urevels = doc.getElementsByTagName('against')
- if urevels:
- rootstatus.update_rev = urevels[-1].getAttribute('revision')
- for entryel in doc.getElementsByTagName('entry'):
- path = entryel.getAttribute('path')
- statusel = entryel.getElementsByTagName('wc-status')[0]
- itemstatus = statusel.getAttribute('item')
-
- if itemstatus == 'unversioned':
- wcpath = rootwcpath.join(path, abs=1)
- rootstatus.unknown.append(wcpath)
- continue
- elif itemstatus == 'external':
- wcpath = rootwcpath.__class__(
- rootwcpath.localpath.join(path, abs=1),
- auth=rootwcpath.auth)
- rootstatus.external.append(wcpath)
- continue
- elif itemstatus == 'ignored':
- wcpath = rootwcpath.join(path, abs=1)
- rootstatus.ignored.append(wcpath)
- continue
- elif itemstatus == 'incomplete':
- wcpath = rootwcpath.join(path, abs=1)
- rootstatus.incomplete.append(wcpath)
- continue
-
- rev = statusel.getAttribute('revision')
- if itemstatus == 'added' or itemstatus == 'none':
- rev = '0'
- modrev = '?'
- author = '?'
- date = ''
- elif itemstatus == "replaced":
- pass
- else:
- #print entryel.toxml()
- commitel = entryel.getElementsByTagName('commit')[0]
- if commitel:
- modrev = commitel.getAttribute('revision')
- author = ''
- author_els = commitel.getElementsByTagName('author')
- if author_els:
- for c in author_els[0].childNodes:
- author += c.nodeValue
- date = ''
- for c in commitel.getElementsByTagName('date')[0]\
- .childNodes:
- date += c.nodeValue
-
- wcpath = rootwcpath.join(path, abs=1)
-
- assert itemstatus != 'modified' or wcpath.check(file=1), (
- 'did\'t expect a directory with changed content here')
-
- itemattrname = {
- 'normal': 'unchanged',
- 'unversioned': 'unknown',
- 'conflicted': 'conflict',
- 'none': 'added',
- }.get(itemstatus, itemstatus)
-
- attr = getattr(rootstatus, itemattrname)
- attr.append(wcpath)
-
- propsstatus = statusel.getAttribute('props')
- if propsstatus not in ('none', 'normal'):
- rootstatus.prop_modified.append(wcpath)
-
- if wcpath == rootwcpath:
- rootstatus.rev = rev
- rootstatus.modrev = modrev
- rootstatus.author = author
- rootstatus.date = date
-
- # handle repos-status element (remote info)
- rstatusels = entryel.getElementsByTagName('repos-status')
- if rstatusels:
- rstatusel = rstatusels[0]
- ritemstatus = rstatusel.getAttribute('item')
- if ritemstatus in ('added', 'modified'):
- rootstatus.update_available.append(wcpath)
-
- lockels = entryel.getElementsByTagName('lock')
- if len(lockels):
- rootstatus.locked.append(wcpath)
-
- return rootstatus
- fromstring = staticmethod(fromstring)
-
-class InfoSvnWCCommand:
- def __init__(self, output):
- # Path: test
- # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
- # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
- # Revision: 2151
- # Node Kind: directory
- # Schedule: normal
- # Last Changed Author: hpk
- # Last Changed Rev: 2100
- # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
- # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
-
- d = {}
- for line in output.split('\n'):
- if not line.strip():
- continue
- key, value = line.split(':', 1)
- key = key.lower().replace(' ', '')
- value = value.strip()
- d[key] = value
- try:
- self.url = d['url']
- except KeyError:
- raise ValueError("Not a versioned resource")
- #raise ValueError, "Not a versioned resource %r" % path
- self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
- try:
- self.rev = int(d['revision'])
- except KeyError:
- self.rev = None
-
- self.path = py.path.local(d['path'])
- self.size = self.path.size()
- if 'lastchangedrev' in d:
- self.created_rev = int(d['lastchangedrev'])
- if 'lastchangedauthor' in d:
- self.last_author = d['lastchangedauthor']
- if 'lastchangeddate' in d:
- self.mtime = parse_wcinfotime(d['lastchangeddate'])
- self.time = self.mtime * 1000000
-
- def __eq__(self, other):
- return self.__dict__ == other.__dict__
-
-def parse_wcinfotime(timestr):
- """ Returns seconds since epoch, UTC. """
- # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
- m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
- if not m:
- raise ValueError("timestring %r does not match" % timestr)
- timestr, timezone = m.groups()
- # do not handle timezone specially, return value should be UTC
- parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
- return calendar.timegm(parsedtime)
-
-def make_recursive_propdict(wcroot,
- output,
- rex = re.compile("Properties on '(.*)':")):
- """ Return a dictionary of path->PropListDict mappings. """
- lines = [x for x in output.split('\n') if x]
- pdict = {}
- while lines:
- line = lines.pop(0)
- m = rex.match(line)
- if not m:
- raise ValueError("could not parse propget-line: %r" % line)
- path = m.groups()[0]
- wcpath = wcroot.join(path, abs=1)
- propnames = []
- while lines and lines[0].startswith(' '):
- propname = lines.pop(0).strip()
- propnames.append(propname)
- assert propnames, "must have found properties!"
- pdict[wcpath] = PropListDict(wcpath, propnames)
- return pdict
-
-
-def importxml(cache=[]):
- if cache:
- return cache
- from xml.dom import minidom
- from xml.parsers.expat import ExpatError
- cache.extend([minidom, ExpatError])
- return cache
-
-class LogEntry:
- def __init__(self, logentry):
- self.rev = int(logentry.getAttribute('revision'))
- for lpart in filter(None, logentry.childNodes):
- if lpart.nodeType == lpart.ELEMENT_NODE:
- if lpart.nodeName == 'author':
- self.author = lpart.firstChild.nodeValue
- elif lpart.nodeName == 'msg':
- if lpart.firstChild:
- self.msg = lpart.firstChild.nodeValue
- else:
- self.msg = ''
- elif lpart.nodeName == 'date':
- #2003-07-29T20:05:11.598637Z
- timestr = lpart.firstChild.nodeValue
- self.date = parse_apr_time(timestr)
- elif lpart.nodeName == 'paths':
- self.strpaths = []
- for ppart in filter(None, lpart.childNodes):
- if ppart.nodeType == ppart.ELEMENT_NODE:
- self.strpaths.append(PathEntry(ppart))
- def __repr__(self):
- return '<Logentry rev=%d author=%s date=%s>' % (
- self.rev, self.author, self.date)
-
-
+ if info.path != self.localpath:
+ raise py.error.ENOENT(self, "not a versioned resource:" +
+ " %s != %s" % (info.path, self.localpath))
+ cache.info[self] = info
+ return info
+
+ def listdir(self, fil=None, sort=None):
+ """ return a sequence of Paths.
+
+ listdir will return either a tuple or a list of paths
+ depending on implementation choices.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ # XXX unify argument naming with LocalPath.listdir
+ def notsvn(path):
+ return path.basename != '.svn'
+
+ paths = []
+ for localpath in self.localpath.listdir(notsvn):
+ p = self.__class__(localpath, auth=self.auth)
+ if notsvn(p) and (not fil or fil(p)):
+ paths.append(p)
+ self._sortlist(paths, sort)
+ return paths
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ return open(self.strpath, mode)
+
+ def _getbyspec(self, spec):
+ return self.localpath._getbyspec(spec)
+
+ class Checkers(py.path.local.Checkers):
+ def __init__(self, path):
+ self.svnwcpath = path
+ self.path = path.localpath
+ def versioned(self):
+ try:
+ s = self.svnwcpath.info()
+ except (py.error.ENOENT, py.error.EEXIST):
+ return False
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('is not a working copy')!=-1:
+ return False
+ if e.err.lower().find('not a versioned resource') != -1:
+ return False
+ raise
+ else:
+ return True
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() # make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ locale_env = fixlocale()
+ # some blather on stderr
+ auth_opt = self._makeauthoptions()
+ #stdin, stdout, stderr = os.popen3(locale_env +
+ # 'svn log --xml %s %s %s "%s"' % (
+ # rev_opt, verbose_opt, auth_opt,
+ # self.strpath))
+ cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
+ rev_opt, verbose_opt, auth_opt, self.strpath)
+
+ popen = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ )
+ stdout, stderr = popen.communicate()
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ minidom,ExpatError = importxml()
+ try:
+ tree = minidom.parseString(stdout)
+ except ExpatError:
+ raise ValueError('no such revision')
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(LogEntry(logentry))
+ return result
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ def __hash__(self):
+ return hash((self.strpath, self.__class__, self.auth))
+
+
+class WCStatus:
+ attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
+ 'deleted', 'prop_modified', 'unknown', 'update_available',
+ 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
+ )
+
+ def __init__(self, wcpath, rev=None, modrev=None, author=None):
+ self.wcpath = wcpath
+ self.rev = rev
+ self.modrev = modrev
+ self.author = author
+
+ for name in self.attrnames:
+ setattr(self, name, [])
+
+ def allpath(self, sort=True, **kw):
+ d = {}
+ for name in self.attrnames:
+ if name not in kw or kw[name]:
+ for path in getattr(self, name):
+ d[path] = 1
+ l = d.keys()
+ if sort:
+ l.sort()
+ return l
+
+ # XXX a bit scary to assume there's always 2 spaces between username and
+ # path, however with win32 allowing spaces in user names there doesn't
+ # seem to be a more solid approach :(
+ _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
+
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ return a new WCStatus object from data 's'
+ """
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ for line in data.split('\n'):
+ if not line.strip():
+ continue
+ #print "processing %r" % line
+ flags, rest = line[:8], line[8:]
+ # first column
+ c0,c1,c2,c3,c4,c5,x6,c7 = flags
+ #if '*' in line:
+ # print "flags", repr(flags), "rest", repr(rest)
+
+ if c0 in '?XI':
+ fn = line.split(None, 1)[1]
+ if c0 == '?':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.unknown.append(wcpath)
+ elif c0 == 'X':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(fn, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ elif c0 == 'I':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.ignored.append(wcpath)
+
+ continue
+
+ #elif c0 in '~!' or c4 == 'S':
+ # raise NotImplementedError("received flag %r" % c0)
+
+ m = WCStatus._rex_status.match(rest)
+ if not m:
+ if c7 == '*':
+ fn = rest.strip()
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.update_available.append(wcpath)
+ continue
+ if line.lower().find('against revision:')!=-1:
+ update_rev = int(rest.split(':')[1].strip())
+ continue
+ if line.lower().find('status on external') > -1:
+ # XXX not sure what to do here... perhaps we want to
+ # store some state instead of just continuing, as right
+ # now it makes the top-level external get added twice
+ # (once as external, once as 'normal' unchanged item)
+ # because of the way SVN presents external items
+ continue
+ # keep trying
+ raise ValueError("could not parse line %r" % line)
+ else:
+ rev, modrev, author, fn = m.groups()
+ wcpath = rootwcpath.join(fn, abs=1)
+ #assert wcpath.check()
+ if c0 == 'M':
+ assert wcpath.check(file=1), "didn't expect a directory with changed content here"
+ rootstatus.modified.append(wcpath)
+ elif c0 == 'A' or c3 == '+' :
+ rootstatus.added.append(wcpath)
+ elif c0 == 'D':
+ rootstatus.deleted.append(wcpath)
+ elif c0 == 'C':
+ rootstatus.conflict.append(wcpath)
+ elif c0 == '~':
+ rootstatus.kindmismatch.append(wcpath)
+ elif c0 == '!':
+ rootstatus.incomplete.append(wcpath)
+ elif c0 == 'R':
+ rootstatus.replaced.append(wcpath)
+ elif not c0.strip():
+ rootstatus.unchanged.append(wcpath)
+ else:
+ raise NotImplementedError("received flag %r" % c0)
+
+ if c1 == 'M':
+ rootstatus.prop_modified.append(wcpath)
+ # XXX do we cover all client versions here?
+ if c2 == 'L' or c5 == 'K':
+ rootstatus.locked.append(wcpath)
+ if c7 == '*':
+ rootstatus.update_available.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ if update_rev:
+ rootstatus.update_rev = update_rev
+ continue
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class XMLWCStatus(WCStatus):
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ parse 'data' (XML string as outputted by svn st) into a status obj
+ """
+ # XXX for externals, the path is shown twice: once
+ # with external information, and once with full info as if
+ # the item was a normal non-external... the current way of
+ # dealing with this issue is by ignoring it - this does make
+ # externals appear as external items as well as 'normal',
+ # unchanged ones in the status object so this is far from ideal
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ minidom, ExpatError = importxml()
+ try:
+ doc = minidom.parseString(data)
+ except ExpatError:
+ e = sys.exc_info()[1]
+ raise ValueError(str(e))
+ urevels = doc.getElementsByTagName('against')
+ if urevels:
+ rootstatus.update_rev = urevels[-1].getAttribute('revision')
+ for entryel in doc.getElementsByTagName('entry'):
+ path = entryel.getAttribute('path')
+ statusel = entryel.getElementsByTagName('wc-status')[0]
+ itemstatus = statusel.getAttribute('item')
+
+ if itemstatus == 'unversioned':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.unknown.append(wcpath)
+ continue
+ elif itemstatus == 'external':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(path, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ continue
+ elif itemstatus == 'ignored':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.ignored.append(wcpath)
+ continue
+ elif itemstatus == 'incomplete':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.incomplete.append(wcpath)
+ continue
+
+ rev = statusel.getAttribute('revision')
+ if itemstatus == 'added' or itemstatus == 'none':
+ rev = '0'
+ modrev = '?'
+ author = '?'
+ date = ''
+ elif itemstatus == "replaced":
+ pass
+ else:
+ #print entryel.toxml()
+ commitel = entryel.getElementsByTagName('commit')[0]
+ if commitel:
+ modrev = commitel.getAttribute('revision')
+ author = ''
+ author_els = commitel.getElementsByTagName('author')
+ if author_els:
+ for c in author_els[0].childNodes:
+ author += c.nodeValue
+ date = ''
+ for c in commitel.getElementsByTagName('date')[0]\
+ .childNodes:
+ date += c.nodeValue
+
+ wcpath = rootwcpath.join(path, abs=1)
+
+ assert itemstatus != 'modified' or wcpath.check(file=1), (
+ 'did\'t expect a directory with changed content here')
+
+ itemattrname = {
+ 'normal': 'unchanged',
+ 'unversioned': 'unknown',
+ 'conflicted': 'conflict',
+ 'none': 'added',
+ }.get(itemstatus, itemstatus)
+
+ attr = getattr(rootstatus, itemattrname)
+ attr.append(wcpath)
+
+ propsstatus = statusel.getAttribute('props')
+ if propsstatus not in ('none', 'normal'):
+ rootstatus.prop_modified.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ rootstatus.date = date
+
+ # handle repos-status element (remote info)
+ rstatusels = entryel.getElementsByTagName('repos-status')
+ if rstatusels:
+ rstatusel = rstatusels[0]
+ ritemstatus = rstatusel.getAttribute('item')
+ if ritemstatus in ('added', 'modified'):
+ rootstatus.update_available.append(wcpath)
+
+ lockels = entryel.getElementsByTagName('lock')
+ if len(lockels):
+ rootstatus.locked.append(wcpath)
+
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class InfoSvnWCCommand:
+ def __init__(self, output):
+ # Path: test
+ # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
+ # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ # Revision: 2151
+ # Node Kind: directory
+ # Schedule: normal
+ # Last Changed Author: hpk
+ # Last Changed Rev: 2100
+ # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
+
+ d = {}
+ for line in output.split('\n'):
+ if not line.strip():
+ continue
+ key, value = line.split(':', 1)
+ key = key.lower().replace(' ', '')
+ value = value.strip()
+ d[key] = value
+ try:
+ self.url = d['url']
+ except KeyError:
+ raise ValueError("Not a versioned resource")
+ #raise ValueError, "Not a versioned resource %r" % path
+ self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
+ self.path = py.path.local(d['path'])
+ self.size = self.path.size()
+ if 'lastchangedrev' in d:
+ self.created_rev = int(d['lastchangedrev'])
+ if 'lastchangedauthor' in d:
+ self.last_author = d['lastchangedauthor']
+ if 'lastchangeddate' in d:
+ self.mtime = parse_wcinfotime(d['lastchangeddate'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+def parse_wcinfotime(timestr):
+ """ Returns seconds since epoch, UTC. """
+ # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
+ if not m:
+ raise ValueError("timestring %r does not match" % timestr)
+ timestr, timezone = m.groups()
+ # do not handle timezone specially, return value should be UTC
+ parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
+ return calendar.timegm(parsedtime)
+
+def make_recursive_propdict(wcroot,
+ output,
+ rex = re.compile("Properties on '(.*)':")):
+ """ Return a dictionary of path->PropListDict mappings. """
+ lines = [x for x in output.split('\n') if x]
+ pdict = {}
+ while lines:
+ line = lines.pop(0)
+ m = rex.match(line)
+ if not m:
+ raise ValueError("could not parse propget-line: %r" % line)
+ path = m.groups()[0]
+ wcpath = wcroot.join(path, abs=1)
+ propnames = []
+ while lines and lines[0].startswith(' '):
+ propname = lines.pop(0).strip()
+ propnames.append(propname)
+ assert propnames, "must have found properties!"
+ pdict[wcpath] = PropListDict(wcpath, propnames)
+ return pdict
+
+
+def importxml(cache=[]):
+ if cache:
+ return cache
+ from xml.dom import minidom
+ from xml.parsers.expat import ExpatError
+ cache.extend([minidom, ExpatError])
+ return cache
+
+class LogEntry:
+ def __init__(self, logentry):
+ self.rev = int(logentry.getAttribute('revision'))
+ for lpart in filter(None, logentry.childNodes):
+ if lpart.nodeType == lpart.ELEMENT_NODE:
+ if lpart.nodeName == 'author':
+ self.author = lpart.firstChild.nodeValue
+ elif lpart.nodeName == 'msg':
+ if lpart.firstChild:
+ self.msg = lpart.firstChild.nodeValue
+ else:
+ self.msg = ''
+ elif lpart.nodeName == 'date':
+ #2003-07-29T20:05:11.598637Z
+ timestr = lpart.firstChild.nodeValue
+ self.date = parse_apr_time(timestr)
+ elif lpart.nodeName == 'paths':
+ self.strpaths = []
+ for ppart in filter(None, lpart.childNodes):
+ if ppart.nodeType == ppart.ELEMENT_NODE:
+ self.strpaths.append(PathEntry(ppart))
+ def __repr__(self):
+ return '<Logentry rev=%d author=%s date=%s>' % (
+ self.rev, self.author, self.date)
+
+
diff --git a/contrib/python/py/py/_process/__init__.py b/contrib/python/py/py/_process/__init__.py
index 26cabaa74a..86c714ad1a 100644
--- a/contrib/python/py/py/_process/__init__.py
+++ b/contrib/python/py/py/_process/__init__.py
@@ -1 +1 @@
-""" high-level sub-process handling """
+""" high-level sub-process handling """
diff --git a/contrib/python/py/py/_process/cmdexec.py b/contrib/python/py/py/_process/cmdexec.py
index b0c74b1e89..f83a249402 100644
--- a/contrib/python/py/py/_process/cmdexec.py
+++ b/contrib/python/py/py/_process/cmdexec.py
@@ -1,49 +1,49 @@
-import sys
-import subprocess
-import py
-from subprocess import Popen, PIPE
-
-def cmdexec(cmd):
- """ return unicode output of executing 'cmd' in a separate process.
-
- raise cmdexec.Error exeception if the command failed.
- the exception will provide an 'err' attribute containing
- the error-output from the command.
- if the subprocess module does not provide a proper encoding/unicode strings
- sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
- """
- process = subprocess.Popen(cmd, shell=True,
- universal_newlines=True,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = process.communicate()
- if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
- try:
- default_encoding = sys.getdefaultencoding() # jython may not have it
- except AttributeError:
- default_encoding = sys.stdout.encoding or 'UTF-8'
- out = unicode(out, process.stdout.encoding or default_encoding)
- err = unicode(err, process.stderr.encoding or default_encoding)
- status = process.poll()
- if status:
- raise ExecutionFailed(status, status, cmd, out, err)
- return out
-
-class ExecutionFailed(py.error.Error):
- def __init__(self, status, systemstatus, cmd, out, err):
- Exception.__init__(self)
- self.status = status
- self.systemstatus = systemstatus
- self.cmd = cmd
- self.err = err
- self.out = out
-
- def __str__(self):
- return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
-
-# export the exception under the name 'py.process.cmdexec.Error'
-cmdexec.Error = ExecutionFailed
-try:
- ExecutionFailed.__module__ = 'py.process.cmdexec'
- ExecutionFailed.__name__ = 'Error'
-except (AttributeError, TypeError):
- pass
+import sys
+import subprocess
+import py
+from subprocess import Popen, PIPE
+
+def cmdexec(cmd):
+ """ return unicode output of executing 'cmd' in a separate process.
+
+ raise cmdexec.Error exeception if the command failed.
+ the exception will provide an 'err' attribute containing
+ the error-output from the command.
+ if the subprocess module does not provide a proper encoding/unicode strings
+ sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
+ """
+ process = subprocess.Popen(cmd, shell=True,
+ universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = process.communicate()
+ if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
+ try:
+ default_encoding = sys.getdefaultencoding() # jython may not have it
+ except AttributeError:
+ default_encoding = sys.stdout.encoding or 'UTF-8'
+ out = unicode(out, process.stdout.encoding or default_encoding)
+ err = unicode(err, process.stderr.encoding or default_encoding)
+ status = process.poll()
+ if status:
+ raise ExecutionFailed(status, status, cmd, out, err)
+ return out
+
+class ExecutionFailed(py.error.Error):
+ def __init__(self, status, systemstatus, cmd, out, err):
+ Exception.__init__(self)
+ self.status = status
+ self.systemstatus = systemstatus
+ self.cmd = cmd
+ self.err = err
+ self.out = out
+
+ def __str__(self):
+ return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
+
+# export the exception under the name 'py.process.cmdexec.Error'
+cmdexec.Error = ExecutionFailed
+try:
+ ExecutionFailed.__module__ = 'py.process.cmdexec'
+ ExecutionFailed.__name__ = 'Error'
+except (AttributeError, TypeError):
+ pass
diff --git a/contrib/python/py/py/_process/forkedfunc.py b/contrib/python/py/py/_process/forkedfunc.py
index d5b94751fa..1c28530688 100644
--- a/contrib/python/py/py/_process/forkedfunc.py
+++ b/contrib/python/py/py/_process/forkedfunc.py
@@ -1,120 +1,120 @@
-
-"""
- ForkedFunc provides a way to run a function in a forked process
- and get at its return value, stdout and stderr output as well
- as signals and exitstatusus.
-"""
-
-import py
-import os
-import sys
-import marshal
-
-
-def get_unbuffered_io(fd, filename):
- f = open(str(filename), "w")
- if fd != f.fileno():
- os.dup2(f.fileno(), fd)
- class AutoFlush:
- def write(self, data):
- f.write(data)
- f.flush()
- def __getattr__(self, name):
- return getattr(f, name)
- return AutoFlush()
-
-
-class ForkedFunc:
- EXITSTATUS_EXCEPTION = 3
-
-
- def __init__(self, fun, args=None, kwargs=None, nice_level=0,
- child_on_start=None, child_on_exit=None):
- if args is None:
- args = []
- if kwargs is None:
- kwargs = {}
- self.fun = fun
- self.args = args
- self.kwargs = kwargs
- self.tempdir = tempdir = py.path.local.mkdtemp()
- self.RETVAL = tempdir.ensure('retval')
- self.STDOUT = tempdir.ensure('stdout')
- self.STDERR = tempdir.ensure('stderr')
-
- pid = os.fork()
- if pid: # in parent process
- self.pid = pid
- else: # in child process
- self.pid = None
- self._child(nice_level, child_on_start, child_on_exit)
-
- def _child(self, nice_level, child_on_start, child_on_exit):
- # right now we need to call a function, but first we need to
- # map all IO that might happen
- sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
- sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
- retvalf = self.RETVAL.open("wb")
- EXITSTATUS = 0
- try:
- if nice_level:
- os.nice(nice_level)
- try:
- if child_on_start is not None:
- child_on_start()
- retval = self.fun(*self.args, **self.kwargs)
- retvalf.write(marshal.dumps(retval))
- if child_on_exit is not None:
- child_on_exit()
- except:
- excinfo = py.code.ExceptionInfo()
- stderr.write(str(excinfo._getreprcrash()))
- EXITSTATUS = self.EXITSTATUS_EXCEPTION
- finally:
- stdout.close()
- stderr.close()
- retvalf.close()
- os.close(1)
- os.close(2)
- os._exit(EXITSTATUS)
-
- def waitfinish(self, waiter=os.waitpid):
- pid, systemstatus = waiter(self.pid, 0)
- if systemstatus:
- if os.WIFSIGNALED(systemstatus):
- exitstatus = os.WTERMSIG(systemstatus) + 128
- else:
- exitstatus = os.WEXITSTATUS(systemstatus)
- else:
- exitstatus = 0
- signal = systemstatus & 0x7f
- if not exitstatus and not signal:
- retval = self.RETVAL.open('rb')
- try:
- retval_data = retval.read()
- finally:
- retval.close()
- retval = marshal.loads(retval_data)
- else:
- retval = None
- stdout = self.STDOUT.read()
- stderr = self.STDERR.read()
- self._removetemp()
- return Result(exitstatus, signal, retval, stdout, stderr)
-
- def _removetemp(self):
- if self.tempdir.check():
- self.tempdir.remove()
-
- def __del__(self):
- if self.pid is not None: # only clean up in main process
- self._removetemp()
-
-
-class Result(object):
- def __init__(self, exitstatus, signal, retval, stdout, stderr):
- self.exitstatus = exitstatus
- self.signal = signal
- self.retval = retval
- self.out = stdout
- self.err = stderr
+
+"""
+ ForkedFunc provides a way to run a function in a forked process
+ and get at its return value, stdout and stderr output as well
+ as signals and exitstatusus.
+"""
+
+import py
+import os
+import sys
+import marshal
+
+
+def get_unbuffered_io(fd, filename):
+ f = open(str(filename), "w")
+ if fd != f.fileno():
+ os.dup2(f.fileno(), fd)
+ class AutoFlush:
+ def write(self, data):
+ f.write(data)
+ f.flush()
+ def __getattr__(self, name):
+ return getattr(f, name)
+ return AutoFlush()
+
+
+class ForkedFunc:
+ EXITSTATUS_EXCEPTION = 3
+
+
+ def __init__(self, fun, args=None, kwargs=None, nice_level=0,
+ child_on_start=None, child_on_exit=None):
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = {}
+ self.fun = fun
+ self.args = args
+ self.kwargs = kwargs
+ self.tempdir = tempdir = py.path.local.mkdtemp()
+ self.RETVAL = tempdir.ensure('retval')
+ self.STDOUT = tempdir.ensure('stdout')
+ self.STDERR = tempdir.ensure('stderr')
+
+ pid = os.fork()
+ if pid: # in parent process
+ self.pid = pid
+ else: # in child process
+ self.pid = None
+ self._child(nice_level, child_on_start, child_on_exit)
+
+ def _child(self, nice_level, child_on_start, child_on_exit):
+ # right now we need to call a function, but first we need to
+ # map all IO that might happen
+ sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
+ sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
+ retvalf = self.RETVAL.open("wb")
+ EXITSTATUS = 0
+ try:
+ if nice_level:
+ os.nice(nice_level)
+ try:
+ if child_on_start is not None:
+ child_on_start()
+ retval = self.fun(*self.args, **self.kwargs)
+ retvalf.write(marshal.dumps(retval))
+ if child_on_exit is not None:
+ child_on_exit()
+ except:
+ excinfo = py.code.ExceptionInfo()
+ stderr.write(str(excinfo._getreprcrash()))
+ EXITSTATUS = self.EXITSTATUS_EXCEPTION
+ finally:
+ stdout.close()
+ stderr.close()
+ retvalf.close()
+ os.close(1)
+ os.close(2)
+ os._exit(EXITSTATUS)
+
+ def waitfinish(self, waiter=os.waitpid):
+ pid, systemstatus = waiter(self.pid, 0)
+ if systemstatus:
+ if os.WIFSIGNALED(systemstatus):
+ exitstatus = os.WTERMSIG(systemstatus) + 128
+ else:
+ exitstatus = os.WEXITSTATUS(systemstatus)
+ else:
+ exitstatus = 0
+ signal = systemstatus & 0x7f
+ if not exitstatus and not signal:
+ retval = self.RETVAL.open('rb')
+ try:
+ retval_data = retval.read()
+ finally:
+ retval.close()
+ retval = marshal.loads(retval_data)
+ else:
+ retval = None
+ stdout = self.STDOUT.read()
+ stderr = self.STDERR.read()
+ self._removetemp()
+ return Result(exitstatus, signal, retval, stdout, stderr)
+
+ def _removetemp(self):
+ if self.tempdir.check():
+ self.tempdir.remove()
+
+ def __del__(self):
+ if self.pid is not None: # only clean up in main process
+ self._removetemp()
+
+
+class Result(object):
+ def __init__(self, exitstatus, signal, retval, stdout, stderr):
+ self.exitstatus = exitstatus
+ self.signal = signal
+ self.retval = retval
+ self.out = stdout
+ self.err = stderr
diff --git a/contrib/python/py/py/_process/killproc.py b/contrib/python/py/py/_process/killproc.py
index e153b99b2f..18e8310b5f 100644
--- a/contrib/python/py/py/_process/killproc.py
+++ b/contrib/python/py/py/_process/killproc.py
@@ -1,23 +1,23 @@
-import py
-import os, sys
-
-if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
- try:
- import ctypes
- except ImportError:
- def dokill(pid):
- py.process.cmdexec("taskkill /F /PID %d" %(pid,))
- else:
- def dokill(pid):
- PROCESS_TERMINATE = 1
- handle = ctypes.windll.kernel32.OpenProcess(
- PROCESS_TERMINATE, False, pid)
- ctypes.windll.kernel32.TerminateProcess(handle, -1)
- ctypes.windll.kernel32.CloseHandle(handle)
-else:
- def dokill(pid):
- os.kill(pid, 15)
-
-def kill(pid):
- """ kill process by id. """
- dokill(pid)
+import py
+import os, sys
+
+if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
+ try:
+ import ctypes
+ except ImportError:
+ def dokill(pid):
+ py.process.cmdexec("taskkill /F /PID %d" %(pid,))
+ else:
+ def dokill(pid):
+ PROCESS_TERMINATE = 1
+ handle = ctypes.windll.kernel32.OpenProcess(
+ PROCESS_TERMINATE, False, pid)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ ctypes.windll.kernel32.CloseHandle(handle)
+else:
+ def dokill(pid):
+ os.kill(pid, 15)
+
+def kill(pid):
+ """ kill process by id. """
+ dokill(pid)
diff --git a/contrib/python/py/py/_std.py b/contrib/python/py/py/_std.py
index 7cdc9b8610..66adb7b023 100644
--- a/contrib/python/py/py/_std.py
+++ b/contrib/python/py/py/_std.py
@@ -1,27 +1,27 @@
-import sys
+import sys
import warnings
-
+
class PyStdIsDeprecatedWarning(DeprecationWarning):
pass
-class Std(object):
- """ makes top-level python modules available as an attribute,
- importing them on first access.
- """
-
- def __init__(self):
- self.__dict__ = sys.modules
-
- def __getattr__(self, name):
+class Std(object):
+ """ makes top-level python modules available as an attribute,
+ importing them on first access.
+ """
+
+ def __init__(self):
+ self.__dict__ = sys.modules
+
+ def __getattr__(self, name):
warnings.warn("py.std is deprecated, please import %s directly" % name,
category=PyStdIsDeprecatedWarning,
stacklevel=2)
- try:
- m = __import__(name)
- except ImportError:
- raise AttributeError("py.std: could not import %s" % name)
- return m
-
-std = Std()
+ try:
+ m = __import__(name)
+ except ImportError:
+ raise AttributeError("py.std: could not import %s" % name)
+ return m
+
+std = Std()
diff --git a/contrib/python/py/py/_xmlgen.py b/contrib/python/py/py/_xmlgen.py
index 7cf5ceaeed..1c83545884 100644
--- a/contrib/python/py/py/_xmlgen.py
+++ b/contrib/python/py/py/_xmlgen.py
@@ -1,79 +1,79 @@
-"""
-module for generating and serializing xml and html structures
-by using simple python objects.
-
-(c) holger krekel, holger at merlinux eu. 2009
-"""
-import sys, re
-
-if sys.version_info >= (3,0):
- def u(s):
- return s
- def unicode(x, errors=None):
- if hasattr(x, '__unicode__'):
- return x.__unicode__()
- return str(x)
-else:
- def u(s):
- return unicode(s)
- unicode = unicode
-
-
-class NamespaceMetaclass(type):
- def __getattr__(self, name):
- if name[:1] == '_':
- raise AttributeError(name)
- if self == Namespace:
- raise ValueError("Namespace class is abstract")
- tagspec = self.__tagspec__
- if tagspec is not None and name not in tagspec:
- raise AttributeError(name)
- classattr = {}
- if self.__stickyname__:
- classattr['xmlname'] = name
- cls = type(name, (self.__tagclass__,), classattr)
- setattr(self, name, cls)
- return cls
-
-class Tag(list):
- class Attr(object):
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
-
- def __init__(self, *args, **kwargs):
- super(Tag, self).__init__(args)
- self.attr = self.Attr(**kwargs)
-
- def __unicode__(self):
- return self.unicode(indent=0)
- __str__ = __unicode__
-
- def unicode(self, indent=2):
- l = []
- SimpleUnicodeVisitor(l.append, indent).visit(self)
- return u("").join(l)
-
- def __repr__(self):
- name = self.__class__.__name__
- return "<%r tag object %d>" % (name, id(self))
-
-Namespace = NamespaceMetaclass('Namespace', (object, ), {
- '__tagspec__': None,
- '__tagclass__': Tag,
- '__stickyname__': False,
-})
-
-class HtmlTag(Tag):
- def unicode(self, indent=2):
- l = []
- HtmlVisitor(l.append, indent, shortempty=False).visit(self)
- return u("").join(l)
-
-# exported plain html namespace
-class html(Namespace):
- __tagclass__ = HtmlTag
- __stickyname__ = True
- __tagspec__ = dict([(x,1) for x in (
+"""
+module for generating and serializing xml and html structures
+by using simple python objects.
+
+(c) holger krekel, holger at merlinux eu. 2009
+"""
+import sys, re
+
+if sys.version_info >= (3,0):
+ def u(s):
+ return s
+ def unicode(x, errors=None):
+ if hasattr(x, '__unicode__'):
+ return x.__unicode__()
+ return str(x)
+else:
+ def u(s):
+ return unicode(s)
+ unicode = unicode
+
+
+class NamespaceMetaclass(type):
+ def __getattr__(self, name):
+ if name[:1] == '_':
+ raise AttributeError(name)
+ if self == Namespace:
+ raise ValueError("Namespace class is abstract")
+ tagspec = self.__tagspec__
+ if tagspec is not None and name not in tagspec:
+ raise AttributeError(name)
+ classattr = {}
+ if self.__stickyname__:
+ classattr['xmlname'] = name
+ cls = type(name, (self.__tagclass__,), classattr)
+ setattr(self, name, cls)
+ return cls
+
+class Tag(list):
+ class Attr(object):
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super(Tag, self).__init__(args)
+ self.attr = self.Attr(**kwargs)
+
+ def __unicode__(self):
+ return self.unicode(indent=0)
+ __str__ = __unicode__
+
+ def unicode(self, indent=2):
+ l = []
+ SimpleUnicodeVisitor(l.append, indent).visit(self)
+ return u("").join(l)
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return "<%r tag object %d>" % (name, id(self))
+
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+ '__tagspec__': None,
+ '__tagclass__': Tag,
+ '__stickyname__': False,
+})
+
+class HtmlTag(Tag):
+ def unicode(self, indent=2):
+ l = []
+ HtmlVisitor(l.append, indent, shortempty=False).visit(self)
+ return u("").join(l)
+
+# exported plain html namespace
+class html(Namespace):
+ __tagclass__ = HtmlTag
+ __stickyname__ = True
+ __tagspec__ = dict([(x,1) for x in (
'a,abbr,acronym,address,applet,area,article,aside,audio,b,'
'base,basefont,bdi,bdo,big,blink,blockquote,body,br,button,'
'canvas,caption,center,cite,code,col,colgroup,command,comment,'
@@ -86,170 +86,170 @@ class html(Namespace):
'q,rp,rt,ruby,s,samp,script,section,select,small,source,span,'
'strike,strong,style,sub,summary,sup,table,tbody,td,textarea,'
'tfoot,th,thead,time,title,tr,track,tt,u,ul,xmp,var,video,wbr'
- ).split(',') if x])
-
- class Style(object):
- def __init__(self, **kw):
- for x, y in kw.items():
- x = x.replace('_', '-')
- setattr(self, x, y)
-
-
-class raw(object):
- """just a box that can contain a unicode string that will be
- included directly in the output"""
- def __init__(self, uniobj):
- self.uniobj = uniobj
-
-class SimpleUnicodeVisitor(object):
- """ recursive visitor to write unicode. """
- def __init__(self, write, indent=0, curindent=0, shortempty=True):
- self.write = write
- self.cache = {}
- self.visited = {} # for detection of recursion
- self.indent = indent
- self.curindent = curindent
- self.parents = []
- self.shortempty = shortempty # short empty tags or not
-
- def visit(self, node):
- """ dispatcher on node's class/bases name. """
- cls = node.__class__
- try:
- visitmethod = self.cache[cls]
- except KeyError:
- for subclass in cls.__mro__:
- visitmethod = getattr(self, subclass.__name__, None)
- if visitmethod is not None:
- break
- else:
- visitmethod = self.__object
- self.cache[cls] = visitmethod
- visitmethod(node)
-
- # the default fallback handler is marked private
- # to avoid clashes with the tag name object
- def __object(self, obj):
- #self.write(obj)
- self.write(escape(unicode(obj)))
-
- def raw(self, obj):
- self.write(obj.uniobj)
-
- def list(self, obj):
- assert id(obj) not in self.visited
- self.visited[id(obj)] = 1
- for elem in obj:
- self.visit(elem)
-
- def Tag(self, tag):
- assert id(tag) not in self.visited
- try:
- tag.parent = self.parents[-1]
- except IndexError:
- tag.parent = None
- self.visited[id(tag)] = 1
- tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
- if self.curindent and not self._isinline(tagname):
- self.write("\n" + u(' ') * self.curindent)
- if tag:
- self.curindent += self.indent
- self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
- self.parents.append(tag)
- for x in tag:
- self.visit(x)
- self.parents.pop()
- self.write(u('</%s>') % tagname)
- self.curindent -= self.indent
- else:
- nameattr = tagname+self.attributes(tag)
- if self._issingleton(tagname):
- self.write(u('<%s/>') % (nameattr,))
- else:
- self.write(u('<%s></%s>') % (nameattr, tagname))
-
- def attributes(self, tag):
- # serialize attributes
- attrlist = dir(tag.attr)
- attrlist.sort()
- l = []
- for name in attrlist:
- res = self.repr_attribute(tag.attr, name)
- if res is not None:
- l.append(res)
- l.extend(self.getstyle(tag))
- return u("").join(l)
-
- def repr_attribute(self, attrs, name):
- if name[:2] != '__':
- value = getattr(attrs, name)
- if name.endswith('_'):
- name = name[:-1]
- if isinstance(value, raw):
- insert = value.uniobj
- else:
- insert = escape(unicode(value))
- return ' %s="%s"' % (name, insert)
-
- def getstyle(self, tag):
- """ return attribute list suitable for styling. """
- try:
- styledict = tag.style.__dict__
- except AttributeError:
- return []
- else:
- stylelist = [x+': ' + y for x,y in styledict.items()]
- return [u(' style="%s"') % u('; ').join(stylelist)]
-
- def _issingleton(self, tagname):
- """can (and will) be overridden in subclasses"""
- return self.shortempty
-
- def _isinline(self, tagname):
- """can (and will) be overridden in subclasses"""
- return False
-
-class HtmlVisitor(SimpleUnicodeVisitor):
-
- single = dict([(x, 1) for x in
- ('br,img,area,param,col,hr,meta,link,base,'
- 'input,frame').split(',')])
- inline = dict([(x, 1) for x in
- ('a abbr acronym b basefont bdo big br cite code dfn em font '
- 'i img input kbd label q s samp select small span strike '
- 'strong sub sup textarea tt u var'.split(' '))])
-
- def repr_attribute(self, attrs, name):
- if name == 'class_':
- value = getattr(attrs, name)
- if value is None:
- return
- return super(HtmlVisitor, self).repr_attribute(attrs, name)
-
- def _issingleton(self, tagname):
- return tagname in self.single
-
- def _isinline(self, tagname):
- return tagname in self.inline
-
-
-class _escape:
- def __init__(self):
- self.escape = {
- u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
- u('&') : u('&amp;'), u("'") : u('&apos;'),
- }
- self.charef_rex = re.compile(u("|").join(self.escape.keys()))
-
- def _replacer(self, match):
- return self.escape[match.group(0)]
-
- def __call__(self, ustring):
- """ xml-escape the given unicode string. """
- try:
- ustring = unicode(ustring)
- except UnicodeDecodeError:
- ustring = unicode(ustring, 'utf-8', errors='replace')
- return self.charef_rex.sub(self._replacer, ustring)
-
-escape = _escape()
+ ).split(',') if x])
+
+ class Style(object):
+ def __init__(self, **kw):
+ for x, y in kw.items():
+ x = x.replace('_', '-')
+ setattr(self, x, y)
+
+
+class raw(object):
+ """just a box that can contain a unicode string that will be
+ included directly in the output"""
+ def __init__(self, uniobj):
+ self.uniobj = uniobj
+
+class SimpleUnicodeVisitor(object):
+ """ recursive visitor to write unicode. """
+ def __init__(self, write, indent=0, curindent=0, shortempty=True):
+ self.write = write
+ self.cache = {}
+ self.visited = {} # for detection of recursion
+ self.indent = indent
+ self.curindent = curindent
+ self.parents = []
+ self.shortempty = shortempty # short empty tags or not
+
+ def visit(self, node):
+ """ dispatcher on node's class/bases name. """
+ cls = node.__class__
+ try:
+ visitmethod = self.cache[cls]
+ except KeyError:
+ for subclass in cls.__mro__:
+ visitmethod = getattr(self, subclass.__name__, None)
+ if visitmethod is not None:
+ break
+ else:
+ visitmethod = self.__object
+ self.cache[cls] = visitmethod
+ visitmethod(node)
+
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
+ #self.write(obj)
+ self.write(escape(unicode(obj)))
+
+ def raw(self, obj):
+ self.write(obj.uniobj)
+
+ def list(self, obj):
+ assert id(obj) not in self.visited
+ self.visited[id(obj)] = 1
+ for elem in obj:
+ self.visit(elem)
+
+ def Tag(self, tag):
+ assert id(tag) not in self.visited
+ try:
+ tag.parent = self.parents[-1]
+ except IndexError:
+ tag.parent = None
+ self.visited[id(tag)] = 1
+ tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+ if self.curindent and not self._isinline(tagname):
+ self.write("\n" + u(' ') * self.curindent)
+ if tag:
+ self.curindent += self.indent
+ self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+ self.parents.append(tag)
+ for x in tag:
+ self.visit(x)
+ self.parents.pop()
+ self.write(u('</%s>') % tagname)
+ self.curindent -= self.indent
+ else:
+ nameattr = tagname+self.attributes(tag)
+ if self._issingleton(tagname):
+ self.write(u('<%s/>') % (nameattr,))
+ else:
+ self.write(u('<%s></%s>') % (nameattr, tagname))
+
+ def attributes(self, tag):
+ # serialize attributes
+ attrlist = dir(tag.attr)
+ attrlist.sort()
+ l = []
+ for name in attrlist:
+ res = self.repr_attribute(tag.attr, name)
+ if res is not None:
+ l.append(res)
+ l.extend(self.getstyle(tag))
+ return u("").join(l)
+
+ def repr_attribute(self, attrs, name):
+ if name[:2] != '__':
+ value = getattr(attrs, name)
+ if name.endswith('_'):
+ name = name[:-1]
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
+
+ def getstyle(self, tag):
+ """ return attribute list suitable for styling. """
+ try:
+ styledict = tag.style.__dict__
+ except AttributeError:
+ return []
+ else:
+ stylelist = [x+': ' + y for x,y in styledict.items()]
+ return [u(' style="%s"') % u('; ').join(stylelist)]
+
+ def _issingleton(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return self.shortempty
+
+ def _isinline(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return False
+
+class HtmlVisitor(SimpleUnicodeVisitor):
+
+ single = dict([(x, 1) for x in
+ ('br,img,area,param,col,hr,meta,link,base,'
+ 'input,frame').split(',')])
+ inline = dict([(x, 1) for x in
+ ('a abbr acronym b basefont bdo big br cite code dfn em font '
+ 'i img input kbd label q s samp select small span strike '
+ 'strong sub sup textarea tt u var'.split(' '))])
+
+ def repr_attribute(self, attrs, name):
+ if name == 'class_':
+ value = getattr(attrs, name)
+ if value is None:
+ return
+ return super(HtmlVisitor, self).repr_attribute(attrs, name)
+
+ def _issingleton(self, tagname):
+ return tagname in self.single
+
+ def _isinline(self, tagname):
+ return tagname in self.inline
+
+
+class _escape:
+ def __init__(self):
+ self.escape = {
+ u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
+ u('&') : u('&amp;'), u("'") : u('&apos;'),
+ }
+ self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+ def _replacer(self, match):
+ return self.escape[match.group(0)]
+
+ def __call__(self, ustring):
+ """ xml-escape the given unicode string. """
+ try:
+ ustring = unicode(ustring)
+ except UnicodeDecodeError:
+ ustring = unicode(ustring, 'utf-8', errors='replace')
+ return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()
diff --git a/contrib/python/py/py/test.py b/contrib/python/py/py/test.py
index ee3de1adf2..aa5beb1789 100644
--- a/contrib/python/py/py/test.py
+++ b/contrib/python/py/py/test.py
@@ -1,10 +1,10 @@
-import sys
-if __name__ == '__main__':
- import pytest
- sys.exit(pytest.main())
-else:
- import sys, pytest
- sys.modules['py.test'] = pytest
-
-# for more API entry points see the 'tests' definition
-# in __init__.py
+import sys
+if __name__ == '__main__':
+ import pytest
+ sys.exit(pytest.main())
+else:
+ import sys, pytest
+ sys.modules['py.test'] = pytest
+
+# for more API entry points see the 'tests' definition
+# in __init__.py
diff --git a/contrib/python/requests/requests/__init__.py b/contrib/python/requests/requests/__init__.py
index ae59f2d6ae..53a5b42af6 100644
--- a/contrib/python/requests/requests/__init__.py
+++ b/contrib/python/requests/requests/__init__.py
@@ -1,49 +1,49 @@
-# -*- coding: utf-8 -*-
-
-# __
-# /__) _ _ _ _ _/ _
-# / ( (- (/ (/ (- _) / _)
-# /
-
-"""
+# -*- coding: utf-8 -*-
+
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
Requests HTTP Library
-~~~~~~~~~~~~~~~~~~~~~
-
+~~~~~~~~~~~~~~~~~~~~~
+
Requests is an HTTP library, written in Python, for human beings.
Basic GET usage:
-
- >>> import requests
- >>> r = requests.get('https://www.python.org')
- >>> r.status_code
- 200
+
+ >>> import requests
+ >>> r = requests.get('https://www.python.org')
+ >>> r.status_code
+ 200
>>> b'Python is a programming language' in r.content
- True
-
-... or POST:
-
- >>> payload = dict(key1='value1', key2='value2')
+ True
+
+... or POST:
+
+ >>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post('https://httpbin.org/post', data=payload)
- >>> print(r.text)
- {
- ...
- "form": {
+ >>> print(r.text)
+ {
+ ...
+ "form": {
"key1": "value1",
"key2": "value2"
- },
- ...
- }
-
-The other HTTP methods are supported - see `requests.api`. Full documentation
+ },
+ ...
+ }
+
+The other HTTP methods are supported - see `requests.api`. Full documentation
is at <https://requests.readthedocs.io>.
-
+
:copyright: (c) 2017 by Kenneth Reitz.
-:license: Apache 2.0, see LICENSE for more details.
-"""
-
+:license: Apache 2.0, see LICENSE for more details.
+"""
+
import urllib3
import warnings
from .exceptions import RequestsDependencyWarning
-
+
try:
from charset_normalizer import __version__ as charset_normalizer_version
except ImportError:
@@ -106,7 +106,7 @@ except (AssertionError, ValueError):
# Attempt to enable urllib3's fallback for SNI support
# if the standard library doesn't support SNI or the
# 'ssl' library isn't available.
-try:
+try:
try:
import ssl
except ImportError:
@@ -119,9 +119,9 @@ try:
# Check cryptography version
from cryptography import __version__ as cryptography_version
_check_cryptography(cryptography_version)
-except ImportError:
- pass
-
+except ImportError:
+ pass
+
# urllib3's DependencyWarnings should be silenced.
from urllib3.exceptions import DependencyWarning
warnings.simplefilter('ignore', DependencyWarning)
@@ -130,23 +130,23 @@ from .__version__ import __title__, __description__, __url__, __version__
from .__version__ import __build__, __author__, __author_email__, __license__
from .__version__ import __copyright__, __cake__
-from . import utils
+from . import utils
from . import packages
-from .models import Request, Response, PreparedRequest
-from .api import request, get, head, post, patch, put, delete, options
-from .sessions import session, Session
-from .status_codes import codes
-from .exceptions import (
- RequestException, Timeout, URLRequired,
+from .models import Request, Response, PreparedRequest
+from .api import request, get, head, post, patch, put, delete, options
+from .sessions import session, Session
+from .status_codes import codes
+from .exceptions import (
+ RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError,
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
-)
-
-# Set default logging handler to avoid "No handler found" warnings.
-import logging
+)
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
from logging import NullHandler
-
-logging.getLogger(__name__).addHandler(NullHandler())
+
+logging.getLogger(__name__).addHandler(NullHandler())
# FileModeWarnings go off per the default.
warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/contrib/python/requests/requests/adapters.py b/contrib/python/requests/requests/adapters.py
index 99a8c82fc5..3a8463b7db 100644
--- a/contrib/python/requests/requests/adapters.py
+++ b/contrib/python/requests/requests/adapters.py
@@ -1,16 +1,16 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.adapters
-~~~~~~~~~~~~~~~~~
-
-This module contains the transport adapters that Requests uses to define
-and maintain connections.
-"""
-
+# -*- coding: utf-8 -*-
+
+"""
+requests.adapters
+~~~~~~~~~~~~~~~~~
+
+This module contains the transport adapters that Requests uses to define
+and maintain connections.
+"""
+
import os.path
-import socket
-
+import socket
+
from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.response import HTTPResponse
from urllib3.util import parse_url
@@ -29,36 +29,36 @@ from urllib3.exceptions import SSLError as _SSLError
from urllib3.exceptions import ResponseError
from urllib3.exceptions import LocationValueError
-from .models import Response
-from .compat import urlparse, basestring
+from .models import Response
+from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
get_encoding_from_headers, prepend_scheme_if_needed,
get_auth_from_url, urldefragauth, select_proxy)
-from .structures import CaseInsensitiveDict
-from .cookies import extract_cookies_to_jar
-from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
+from .structures import CaseInsensitiveDict
+from .cookies import extract_cookies_to_jar
+from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
InvalidURL, InvalidHeader)
-from .auth import _basic_auth_str
-
+from .auth import _basic_auth_str
+
try:
from urllib3.contrib.socks import SOCKSProxyManager
except ImportError:
def SOCKSProxyManager(*args, **kwargs):
raise InvalidSchema("Missing dependencies for SOCKS support.")
-DEFAULT_POOLBLOCK = False
-DEFAULT_POOLSIZE = 10
-DEFAULT_RETRIES = 0
+DEFAULT_POOLBLOCK = False
+DEFAULT_POOLSIZE = 10
+DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None
-
-
-class BaseAdapter(object):
- """The Base Transport Adapter"""
-
- def __init__(self):
- super(BaseAdapter, self).__init__()
-
+
+
+class BaseAdapter(object):
+ """The Base Transport Adapter"""
+
+ def __init__(self):
+ super(BaseAdapter, self).__init__()
+
def send(self, request, stream=False, timeout=None, verify=True,
cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
@@ -75,107 +75,107 @@ class BaseAdapter(object):
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
- raise NotImplementedError
-
- def close(self):
+ raise NotImplementedError
+
+ def close(self):
"""Cleans up adapter specific items."""
- raise NotImplementedError
-
-
-class HTTPAdapter(BaseAdapter):
- """The built-in HTTP Adapter for urllib3.
-
- Provides a general-case interface for Requests sessions to contact HTTP and
- HTTPS urls by implementing the Transport Adapter interface. This class will
- usually be created by the :class:`Session <Session>` class under the
- covers.
-
- :param pool_connections: The number of urllib3 connection pools to cache.
- :param pool_maxsize: The maximum number of connections to save in the pool.
+ raise NotImplementedError
+
+
+class HTTPAdapter(BaseAdapter):
+ """The built-in HTTP Adapter for urllib3.
+
+ Provides a general-case interface for Requests sessions to contact HTTP and
+ HTTPS urls by implementing the Transport Adapter interface. This class will
+ usually be created by the :class:`Session <Session>` class under the
+ covers.
+
+ :param pool_connections: The number of urllib3 connection pools to cache.
+ :param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection
- should attempt. Note, this applies only to failed DNS lookups, socket
- connections and connection timeouts, never to requests where data has
- made it to the server. By default, Requests does not retry failed
- connections. If you need granular control over the conditions under
- which we retry a request, import urllib3's ``Retry`` class and pass
- that instead.
- :param pool_block: Whether the connection pool should block for connections.
-
- Usage::
-
- >>> import requests
- >>> s = requests.Session()
- >>> a = requests.adapters.HTTPAdapter(max_retries=3)
- >>> s.mount('http://', a)
- """
- __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
- '_pool_block']
-
- def __init__(self, pool_connections=DEFAULT_POOLSIZE,
- pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
- pool_block=DEFAULT_POOLBLOCK):
- if max_retries == DEFAULT_RETRIES:
- self.max_retries = Retry(0, read=False)
- else:
- self.max_retries = Retry.from_int(max_retries)
- self.config = {}
- self.proxy_manager = {}
-
- super(HTTPAdapter, self).__init__()
-
- self._pool_connections = pool_connections
- self._pool_maxsize = pool_maxsize
- self._pool_block = pool_block
-
- self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
-
- def __getstate__(self):
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
+ :param pool_block: Whether the connection pool should block for connections.
+
+ Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
+ >>> s.mount('http://', a)
+ """
+ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
+ '_pool_block']
+
+ def __init__(self, pool_connections=DEFAULT_POOLSIZE,
+ pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
+ pool_block=DEFAULT_POOLBLOCK):
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
+ self.config = {}
+ self.proxy_manager = {}
+
+ super(HTTPAdapter, self).__init__()
+
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+ self._pool_block = pool_block
+
+ self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
+
+ def __getstate__(self):
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
-
- def __setstate__(self, state):
- # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+
+ def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
# self.poolmanager uses a lambda function, which isn't pickleable.
- self.proxy_manager = {}
- self.config = {}
-
- for attr, value in state.items():
- setattr(self, attr, value)
-
- self.init_poolmanager(self._pool_connections, self._pool_maxsize,
- block=self._pool_block)
-
- def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
- """Initializes a urllib3 PoolManager.
-
- This method should not be called from user code, and is only
- exposed for use when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- :param connections: The number of urllib3 connection pools to cache.
- :param maxsize: The maximum number of connections to save in the pool.
- :param block: Block when no free connections are available.
- :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
- """
- # save these values for pickling
- self._pool_connections = connections
- self._pool_maxsize = maxsize
- self._pool_block = block
-
- self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
- block=block, strict=True, **pool_kwargs)
-
- def proxy_manager_for(self, proxy, **proxy_kwargs):
- """Return urllib3 ProxyManager for the given proxy.
-
- This method should not be called from user code, and is only
- exposed for use when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- :param proxy: The proxy to return a urllib3 ProxyManager for.
- :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
- :returns: ProxyManager
+ self.proxy_manager = {}
+ self.config = {}
+
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(self._pool_connections, self._pool_maxsize,
+ block=self._pool_block)
+
+ def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param connections: The number of urllib3 connection pools to cache.
+ :param maxsize: The maximum number of connections to save in the pool.
+ :param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
+ """
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+ self._pool_block = block
+
+ self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
+ block=block, strict=True, **pool_kwargs)
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
:rtype: urllib3.ProxyManager
- """
+ """
if proxy in self.proxy_manager:
manager = self.proxy_manager[proxy]
elif proxy.lower().startswith('socks'):
@@ -190,61 +190,61 @@ class HTTPAdapter(BaseAdapter):
**proxy_kwargs
)
else:
- proxy_headers = self.proxy_headers(proxy)
+ proxy_headers = self.proxy_headers(proxy)
manager = self.proxy_manager[proxy] = proxy_from_url(
- proxy,
- proxy_headers=proxy_headers,
- num_pools=self._pool_connections,
- maxsize=self._pool_maxsize,
- block=self._pool_block,
- **proxy_kwargs)
-
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs)
+
return manager
-
- def cert_verify(self, conn, url, verify, cert):
- """Verify a SSL certificate. This method should not be called from user
- code, and is only exposed for use when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- :param conn: The urllib3 connection object associated with the cert.
- :param url: The requested URL.
+
+ def cert_verify(self, conn, url, verify, cert):
+ """Verify a SSL certificate. This method should not be called from user
+ code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param conn: The urllib3 connection object associated with the cert.
+ :param url: The requested URL.
:param verify: Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use
- :param cert: The SSL certificate to verify.
- """
- if url.lower().startswith('https') and verify:
-
- cert_loc = None
-
- # Allow self-specified cert location.
- if verify is not True:
- cert_loc = verify
-
- if not cert_loc:
+ :param cert: The SSL certificate to verify.
+ """
+ if url.lower().startswith('https') and verify:
+
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if verify is not True:
+ cert_loc = verify
+
+ if not cert_loc:
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
-
+
if not cert_loc or isinstance(cert_loc, basestring) and not os.path.exists(cert_loc):
raise IOError("Could not find a suitable TLS CA certificate bundle, "
"invalid path: {}".format(cert_loc))
-
- conn.cert_reqs = 'CERT_REQUIRED'
+
+ conn.cert_reqs = 'CERT_REQUIRED'
if not isinstance(cert_loc, basestring) or not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
- else:
- conn.cert_reqs = 'CERT_NONE'
- conn.ca_certs = None
+ else:
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
conn.ca_cert_dir = None
-
- if cert:
- if not isinstance(cert, basestring):
- conn.cert_file = cert[0]
- conn.key_file = cert[1]
- else:
- conn.cert_file = cert
+
+ if cert:
+ if not isinstance(cert, basestring):
+ conn.cert_file = cert[0]
+ conn.key_file = cert[1]
+ else:
+ conn.cert_file = cert
conn.key_file = None
if conn.cert_file and not os.path.exists(conn.cert_file):
raise IOError("Could not find the TLS certificate file, "
@@ -252,98 +252,98 @@ class HTTPAdapter(BaseAdapter):
if conn.key_file and not os.path.exists(conn.key_file):
raise IOError("Could not find the TLS key file, "
"invalid path: {}".format(conn.key_file))
-
- def build_response(self, req, resp):
- """Builds a :class:`Response <requests.Response>` object from a urllib3
- response. This should not be called from user code, and is only exposed
- for use when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
-
- :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
- :param resp: The urllib3 response object.
+
+ def build_response(self, req, resp):
+ """Builds a :class:`Response <requests.Response>` object from a urllib3
+ response. This should not be called from user code, and is only exposed
+ for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
+
+ :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
+ :param resp: The urllib3 response object.
:rtype: requests.Response
- """
- response = Response()
-
- # Fallback to None if there's no status_code, for whatever reason.
- response.status_code = getattr(resp, 'status', None)
-
- # Make headers case-insensitive.
- response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
-
- # Set encoding.
- response.encoding = get_encoding_from_headers(response.headers)
- response.raw = resp
- response.reason = response.raw.reason
-
- if isinstance(req.url, bytes):
- response.url = req.url.decode('utf-8')
- else:
- response.url = req.url
-
- # Add new cookies from the server.
- extract_cookies_to_jar(response.cookies, req, resp)
-
- # Give the Response some context.
- response.request = req
- response.connection = self
-
- return response
-
- def get_connection(self, url, proxies=None):
- """Returns a urllib3 connection for the given URL. This should not be
- called from user code, and is only exposed for use when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- :param url: The URL to connect to.
- :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
+ """
+ response = Response()
+
+ # Fallback to None if there's no status_code, for whatever reason.
+ response.status_code = getattr(resp, 'status', None)
+
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+ response.raw = resp
+ response.reason = response.raw.reason
+
+ if isinstance(req.url, bytes):
+ response.url = req.url.decode('utf-8')
+ else:
+ response.url = req.url
+
+ # Add new cookies from the server.
+ extract_cookies_to_jar(response.cookies, req, resp)
+
+ # Give the Response some context.
+ response.request = req
+ response.connection = self
+
+ return response
+
+ def get_connection(self, url, proxies=None):
+ """Returns a urllib3 connection for the given URL. This should not be
+ called from user code, and is only exposed for use when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param url: The URL to connect to.
+ :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
:rtype: urllib3.ConnectionPool
- """
+ """
proxy = select_proxy(url, proxies)
-
- if proxy:
- proxy = prepend_scheme_if_needed(proxy, 'http')
+
+ if proxy:
+ proxy = prepend_scheme_if_needed(proxy, 'http')
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL("Please check proxy URL. It is malformed"
" and could be missing the host.")
- proxy_manager = self.proxy_manager_for(proxy)
- conn = proxy_manager.connection_from_url(url)
- else:
- # Only scheme should be lower case
- parsed = urlparse(url)
- url = parsed.geturl()
- conn = self.poolmanager.connection_from_url(url)
-
- return conn
-
- def close(self):
- """Disposes of any internal state.
-
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
+ else:
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
+
+ return conn
+
+ def close(self):
+ """Disposes of any internal state.
+
Currently, this closes the PoolManager and any active ProxyManager,
which closes any pooled connections.
- """
- self.poolmanager.clear()
+ """
+ self.poolmanager.clear()
for proxy in self.proxy_manager.values():
proxy.clear()
-
- def request_url(self, request, proxies):
- """Obtain the url to use when making the final request.
-
- If the message is being sent through a HTTP proxy, the full URL has to
- be used. Otherwise, we should only use the path portion of the URL.
-
- This should not be called from user code, and is only exposed for use
- when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+
+ def request_url(self, request, proxies):
+ """Obtain the url to use when making the final request.
+
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
:rtype: str
- """
+ """
proxy = select_proxy(request.url, proxies)
- scheme = urlparse(request.url).scheme
-
+ scheme = urlparse(request.url).scheme
+
is_proxied_http_request = (proxy and scheme != 'https')
using_socks_proxy = False
if proxy:
@@ -352,130 +352,130 @@ class HTTPAdapter(BaseAdapter):
url = request.path_url
if is_proxied_http_request and not using_socks_proxy:
- url = urldefragauth(request.url)
-
- return url
-
- def add_headers(self, request, **kwargs):
- """Add any headers needed by the connection. As of v2.0 this does
- nothing by default, but is left for overriding by users that subclass
- the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- This should not be called from user code, and is only exposed for use
- when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
- :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
- :param kwargs: The keyword arguments from the call to send().
- """
- pass
-
- def proxy_headers(self, proxy):
- """Returns a dictionary of the headers to add to any request sent
- through a proxy. This works with urllib3 magic to ensure that they are
- correctly sent to the proxy, rather than in a tunnelled request if
- CONNECT is being used.
-
- This should not be called from user code, and is only exposed for use
- when subclassing the
- :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
+ url = urldefragauth(request.url)
+
+ return url
+
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. As of v2.0 this does
+ nothing by default, but is left for overriding by users that subclass
+ the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
+ :param kwargs: The keyword arguments from the call to send().
+ """
+ pass
+
+ def proxy_headers(self, proxy):
+ """Returns a dictionary of the headers to add to any request sent
+ through a proxy. This works with urllib3 magic to ensure that they are
+ correctly sent to the proxy, rather than in a tunnelled request if
+ CONNECT is being used.
+
+ This should not be called from user code, and is only exposed for use
+ when subclassing the
+ :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
+
:param proxy: The url of the proxy being used for this request.
:rtype: dict
- """
- headers = {}
- username, password = get_auth_from_url(proxy)
-
+ """
+ headers = {}
+ username, password = get_auth_from_url(proxy)
+
if username:
- headers['Proxy-Authorization'] = _basic_auth_str(username,
- password)
-
- return headers
-
- def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
- """Sends PreparedRequest object. Returns Response object.
-
- :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
- :param stream: (optional) Whether to stream the request content.
- :param timeout: (optional) How long to wait for the server to send
+ headers['Proxy-Authorization'] = _basic_auth_str(username,
+ password)
+
+ return headers
+
+ def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
+ """Sends PreparedRequest object. Returns Response object.
+
+ :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
+ :param stream: (optional) Whether to stream the request content.
+ :param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple or urllib3 Timeout object
:param verify: (optional) Either a boolean, in which case it controls whether
we verify the server's TLS certificate, or a string, in which case it
must be a path to a CA bundle to use
- :param cert: (optional) Any user-provided SSL certificate to be trusted.
- :param proxies: (optional) The proxies dictionary to apply to the request.
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
+ :param proxies: (optional) The proxies dictionary to apply to the request.
:rtype: requests.Response
- """
-
+ """
+
try:
conn = self.get_connection(request.url, proxies)
except LocationValueError as e:
raise InvalidURL(e, request=request)
-
- self.cert_verify(conn, request.url, verify, cert)
- url = self.request_url(request, proxies)
+
+ self.cert_verify(conn, request.url, verify, cert)
+ url = self.request_url(request, proxies)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
-
- chunked = not (request.body is None or 'Content-Length' in request.headers)
-
- if isinstance(timeout, tuple):
- try:
- connect, read = timeout
- timeout = TimeoutSauce(connect=connect, read=read)
- except ValueError as e:
- # this may raise a string formatting error.
+
+ chunked = not (request.body is None or 'Content-Length' in request.headers)
+
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError as e:
+ # this may raise a string formatting error.
err = ("Invalid timeout {}. Pass a (connect, read) "
- "timeout tuple, or a single float to set "
- "both timeouts to the same value".format(timeout))
- raise ValueError(err)
+ "timeout tuple, or a single float to set "
+ "both timeouts to the same value".format(timeout))
+ raise ValueError(err)
elif isinstance(timeout, TimeoutSauce):
pass
- else:
- timeout = TimeoutSauce(connect=timeout, read=timeout)
-
- try:
- if not chunked:
- resp = conn.urlopen(
- method=request.method,
- url=url,
- body=request.body,
- headers=request.headers,
- redirect=False,
- assert_same_host=False,
- preload_content=False,
- decode_content=False,
- retries=self.max_retries,
- timeout=timeout
- )
-
- # Send the request.
- else:
- if hasattr(conn, 'proxy_pool'):
- conn = conn.proxy_pool
-
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
+
+ try:
+ if not chunked:
+ resp = conn.urlopen(
+ method=request.method,
+ url=url,
+ body=request.body,
+ headers=request.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=False,
+ retries=self.max_retries,
+ timeout=timeout
+ )
+
+ # Send the request.
+ else:
+ if hasattr(conn, 'proxy_pool'):
+ conn = conn.proxy_pool
+
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
-
- try:
+
+ try:
skip_host = 'Host' in request.headers
- low_conn.putrequest(request.method,
- url,
+ low_conn.putrequest(request.method,
+ url,
skip_accept_encoding=True,
skip_host=skip_host)
-
- for header, value in request.headers.items():
- low_conn.putheader(header, value)
-
- low_conn.endheaders()
-
- for i in request.body:
- low_conn.send(hex(len(i))[2:].encode('utf-8'))
- low_conn.send(b'\r\n')
- low_conn.send(i)
- low_conn.send(b'\r\n')
- low_conn.send(b'0\r\n\r\n')
-
+
+ for header, value in request.headers.items():
+ low_conn.putheader(header, value)
+
+ low_conn.endheaders()
+
+ for i in request.body:
+ low_conn.send(hex(len(i))[2:].encode('utf-8'))
+ low_conn.send(b'\r\n')
+ low_conn.send(i)
+ low_conn.send(b'\r\n')
+ low_conn.send(b'0\r\n\r\n')
+
# Receive the response from the server
try:
# For Python 2.7, use buffering of HTTP responses
@@ -484,31 +484,31 @@ class HTTPAdapter(BaseAdapter):
# For compatibility with Python 3.3+
r = low_conn.getresponse()
- resp = HTTPResponse.from_httplib(
- r,
- pool=conn,
- connection=low_conn,
- preload_content=False,
- decode_content=False
- )
- except:
- # If we hit any problems here, clean up the connection.
- # Then, reraise so that we can handle the actual exception.
- low_conn.close()
- raise
-
- except (ProtocolError, socket.error) as err:
- raise ConnectionError(err, request=request)
-
- except MaxRetryError as e:
- if isinstance(e.reason, ConnectTimeoutError):
+ resp = HTTPResponse.from_httplib(
+ r,
+ pool=conn,
+ connection=low_conn,
+ preload_content=False,
+ decode_content=False
+ )
+ except:
+ # If we hit any problems here, clean up the connection.
+ # Then, reraise so that we can handle the actual exception.
+ low_conn.close()
+ raise
+
+ except (ProtocolError, socket.error) as err:
+ raise ConnectionError(err, request=request)
+
+ except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
-
- if isinstance(e.reason, ResponseError):
- raise RetryError(e, request=request)
-
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
if isinstance(e.reason, _ProxyError):
raise ProxyError(e, request=request)
@@ -516,23 +516,23 @@ class HTTPAdapter(BaseAdapter):
# This branch is for urllib3 v1.22 and later.
raise SSLError(e, request=request)
- raise ConnectionError(e, request=request)
-
+ raise ConnectionError(e, request=request)
+
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
- except _ProxyError as e:
- raise ProxyError(e)
-
- except (_SSLError, _HTTPError) as e:
- if isinstance(e, _SSLError):
+ except _ProxyError as e:
+ raise ProxyError(e)
+
+ except (_SSLError, _HTTPError) as e:
+ if isinstance(e, _SSLError):
# This branch is for urllib3 versions earlier than v1.22
- raise SSLError(e, request=request)
- elif isinstance(e, ReadTimeoutError):
- raise ReadTimeout(e, request=request)
+ raise SSLError(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
elif isinstance(e, _InvalidHeader):
raise InvalidHeader(e, request=request)
- else:
- raise
-
- return self.build_response(request, resp)
+ else:
+ raise
+
+ return self.build_response(request, resp)
diff --git a/contrib/python/requests/requests/api.py b/contrib/python/requests/requests/api.py
index 3599cd68dd..4cba90eefe 100644
--- a/contrib/python/requests/requests/api.py
+++ b/contrib/python/requests/requests/api.py
@@ -1,159 +1,159 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.api
-~~~~~~~~~~~~
-
-This module implements the Requests API.
-
-:copyright: (c) 2012 by Kenneth Reitz.
-:license: Apache2, see LICENSE for more details.
-"""
-
-from . import sessions
-
-
-def request(method, url, **kwargs):
- """Constructs and sends a :class:`Request <Request>`.
-
+# -*- coding: utf-8 -*-
+
+"""
+requests.api
+~~~~~~~~~~~~
+
+This module implements the Requests API.
+
+:copyright: (c) 2012 by Kenneth Reitz.
+:license: Apache2, see LICENSE for more details.
+"""
+
+from . import sessions
+
+
+def request(method, url, **kwargs):
+ """Constructs and sends a :class:`Request <Request>`.
+
:param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
- :param url: URL for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the query string for the :class:`Request`.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
- :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
- :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file.
- :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How many seconds to wait for the server to send data
before giving up, as a float, or a :ref:`(connect timeout, read
timeout) <timeouts>` tuple.
- :type timeout: float or tuple
+ :type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
- :type allow_redirects: bool
- :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :type allow_redirects: bool
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use. Defaults to ``True``.
- :param stream: (optional) if ``False``, the response content will be immediately downloaded.
- :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :param stream: (optional) if ``False``, the response content will be immediately downloaded.
+ :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
-
- Usage::
-
- >>> import requests
+
+ Usage::
+
+ >>> import requests
>>> req = requests.request('GET', 'https://httpbin.org/get')
>>> req
- <Response [200]>
- """
-
+ <Response [200]>
+ """
+
# By using the 'with' statement we are sure the session is closed, thus we
# avoid leaving sockets open which can trigger a ResourceWarning in some
# cases, and look like a memory leak in others.
with sessions.Session() as session:
return session.request(method=method, url=url, **kwargs)
-
-
+
+
def get(url, params=None, **kwargs):
r"""Sends a GET request.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the query string for the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
+ """
+
return request('get', url, params=params, **kwargs)
-
-
-def options(url, **kwargs):
+
+
+def options(url, **kwargs):
r"""Sends an OPTIONS request.
-
- :param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
- return request('options', url, **kwargs)
-
-
-def head(url, **kwargs):
+ """
+
+ return request('options', url, **kwargs)
+
+
+def head(url, **kwargs):
r"""Sends a HEAD request.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. If
`allow_redirects` is not provided, it will be set to `False` (as
opposed to the default :meth:`request` behavior).
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
- kwargs.setdefault('allow_redirects', False)
- return request('head', url, **kwargs)
-
-
-def post(url, data=None, json=None, **kwargs):
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return request('head', url, **kwargs)
+
+
+def post(url, data=None, json=None, **kwargs):
r"""Sends a POST request.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
- :param json: (optional) json data to send in the body of the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
- return request('post', url, data=data, json=json, **kwargs)
-
-
-def put(url, data=None, **kwargs):
+ """
+
+ return request('post', url, data=data, json=json, **kwargs)
+
+
+def put(url, data=None, **kwargs):
r"""Sends a PUT request.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
- return request('put', url, data=data, **kwargs)
-
-
-def patch(url, data=None, **kwargs):
+ """
+
+ return request('put', url, data=data, **kwargs)
+
+
+def patch(url, data=None, **kwargs):
r"""Sends a PATCH request.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
+ """
+
return request('patch', url, data=data, **kwargs)
-
-
-def delete(url, **kwargs):
+
+
+def delete(url, **kwargs):
r"""Sends a DELETE request.
-
- :param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
- """
-
- return request('delete', url, **kwargs)
+ """
+
+ return request('delete', url, **kwargs)
diff --git a/contrib/python/requests/requests/auth.py b/contrib/python/requests/requests/auth.py
index a9021f10b4..eeface39ae 100644
--- a/contrib/python/requests/requests/auth.py
+++ b/contrib/python/requests/requests/auth.py
@@ -1,33 +1,33 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.auth
-~~~~~~~~~~~~~
-
-This module contains the authentication handlers for Requests.
-"""
-
-import os
-import re
-import time
-import hashlib
+# -*- coding: utf-8 -*-
+
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import os
+import re
+import time
+import hashlib
import threading
import warnings
-
-from base64 import b64encode
-
+
+from base64 import b64encode
+
from .compat import urlparse, str, basestring
-from .cookies import extract_cookies_to_jar
+from .cookies import extract_cookies_to_jar
from ._internal_utils import to_native_string
from .utils import parse_dict_header
-
-CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
-CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
-
-
-def _basic_auth_str(username, password):
- """Returns a Basic Auth string."""
-
+
+CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
+CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
+
+
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
# "I want us to put a big-ol' comment on top of it that
# says that this behaviour is dumb but we need to preserve
# it because people are relying on it."
@@ -62,27 +62,27 @@ def _basic_auth_str(username, password):
if isinstance(password, str):
password = password.encode('latin1')
- authstr = 'Basic ' + to_native_string(
+ authstr = 'Basic ' + to_native_string(
b64encode(b':'.join((username, password))).strip()
- )
-
- return authstr
-
-
-class AuthBase(object):
- """Base class that all auth implementations derive from"""
-
- def __call__(self, r):
- raise NotImplementedError('Auth hooks must be callable.')
-
-
-class HTTPBasicAuth(AuthBase):
- """Attaches HTTP Basic Authentication to the given Request object."""
-
- def __init__(self, username, password):
- self.username = username
- self.password = password
-
+ )
+
+ return authstr
+
+
+class AuthBase(object):
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError('Auth hooks must be callable.')
+
+
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
def __eq__(self, other):
return all([
self.username == getattr(other, 'username', None),
@@ -92,28 +92,28 @@ class HTTPBasicAuth(AuthBase):
def __ne__(self, other):
return not self == other
- def __call__(self, r):
- r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
- return r
-
-
-class HTTPProxyAuth(HTTPBasicAuth):
- """Attaches HTTP Proxy Authentication to a given Request object."""
-
- def __call__(self, r):
- r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
- return r
-
-
-class HTTPDigestAuth(AuthBase):
- """Attaches HTTP Digest Authentication to the given Request object."""
-
- def __init__(self, username, password):
- self.username = username
- self.password = password
+ def __call__(self, r):
+ r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authentication to a given Request object."""
+
+ def __call__(self, r):
+ r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
# Keep state in per-thread local storage
self._thread_local = threading.local()
-
+
def init_per_thread_state(self):
# Ensure state is initialized just once per-thread
if not hasattr(self._thread_local, 'init'):
@@ -124,35 +124,35 @@ class HTTPDigestAuth(AuthBase):
self._thread_local.pos = None
self._thread_local.num_401_calls = None
- def build_digest_header(self, method, url):
+ def build_digest_header(self, method, url):
"""
:rtype: str
"""
-
+
realm = self._thread_local.chal['realm']
nonce = self._thread_local.chal['nonce']
qop = self._thread_local.chal.get('qop')
algorithm = self._thread_local.chal.get('algorithm')
opaque = self._thread_local.chal.get('opaque')
hash_utf8 = None
-
- if algorithm is None:
- _algorithm = 'MD5'
- else:
- _algorithm = algorithm.upper()
- # lambdas assume digest modules are imported at the top level
- if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
- def md5_utf8(x):
- if isinstance(x, str):
- x = x.encode('utf-8')
- return hashlib.md5(x).hexdigest()
- hash_utf8 = md5_utf8
- elif _algorithm == 'SHA':
- def sha_utf8(x):
- if isinstance(x, str):
- x = x.encode('utf-8')
- return hashlib.sha1(x).hexdigest()
- hash_utf8 = sha_utf8
+
+ if algorithm is None:
+ _algorithm = 'MD5'
+ else:
+ _algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
+ def md5_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.md5(x).hexdigest()
+ hash_utf8 = md5_utf8
+ elif _algorithm == 'SHA':
+ def sha_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha1(x).hexdigest()
+ hash_utf8 = sha_utf8
elif _algorithm == 'SHA-256':
def sha256_utf8(x):
if isinstance(x, str):
@@ -165,76 +165,76 @@ class HTTPDigestAuth(AuthBase):
x = x.encode('utf-8')
return hashlib.sha512(x).hexdigest()
hash_utf8 = sha512_utf8
-
- KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
-
- if hash_utf8 is None:
- return None
-
- # XXX not implemented yet
- entdig = None
- p_parsed = urlparse(url)
+
+ KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
+
+ if hash_utf8 is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(url)
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
- if p_parsed.query:
- path += '?' + p_parsed.query
-
- A1 = '%s:%s:%s' % (self.username, realm, self.password)
- A2 = '%s:%s' % (method, path)
-
- HA1 = hash_utf8(A1)
- HA2 = hash_utf8(A2)
-
+ if p_parsed.query:
+ path += '?' + p_parsed.query
+
+ A1 = '%s:%s:%s' % (self.username, realm, self.password)
+ A2 = '%s:%s' % (method, path)
+
+ HA1 = hash_utf8(A1)
+ HA2 = hash_utf8(A2)
+
if nonce == self._thread_local.last_nonce:
self._thread_local.nonce_count += 1
- else:
+ else:
self._thread_local.nonce_count = 1
ncvalue = '%08x' % self._thread_local.nonce_count
s = str(self._thread_local.nonce_count).encode('utf-8')
- s += nonce.encode('utf-8')
- s += time.ctime().encode('utf-8')
- s += os.urandom(8)
-
- cnonce = (hashlib.sha1(s).hexdigest()[:16])
- if _algorithm == 'MD5-SESS':
- HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
-
+ s += nonce.encode('utf-8')
+ s += time.ctime().encode('utf-8')
+ s += os.urandom(8)
+
+ cnonce = (hashlib.sha1(s).hexdigest()[:16])
+ if _algorithm == 'MD5-SESS':
+ HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
+
if not qop:
- respdig = KD(HA1, "%s:%s" % (nonce, HA2))
- elif qop == 'auth' or 'auth' in qop.split(','):
+ respdig = KD(HA1, "%s:%s" % (nonce, HA2))
+ elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
- respdig = KD(HA1, noncebit)
- else:
- # XXX handle auth-int.
- return None
-
+ respdig = KD(HA1, noncebit)
+ else:
+ # XXX handle auth-int.
+ return None
+
self._thread_local.last_nonce = nonce
-
- # XXX should the partial digests be encoded too?
- base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
- 'response="%s"' % (self.username, realm, nonce, path, respdig)
- if opaque:
- base += ', opaque="%s"' % opaque
- if algorithm:
- base += ', algorithm="%s"' % algorithm
- if entdig:
- base += ', digest="%s"' % entdig
- if qop:
- base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
-
- return 'Digest %s' % (base)
-
- def handle_redirect(self, r, **kwargs):
- """Reset num_401_calls counter on redirects."""
- if r.is_redirect:
+
+ # XXX should the partial digests be encoded too?
+ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
+ 'response="%s"' % (self.username, realm, nonce, path, respdig)
+ if opaque:
+ base += ', opaque="%s"' % opaque
+ if algorithm:
+ base += ', algorithm="%s"' % algorithm
+ if entdig:
+ base += ', digest="%s"' % entdig
+ if qop:
+ base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
+
+ return 'Digest %s' % (base)
+
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
self._thread_local.num_401_calls = 1
-
- def handle_401(self, r, **kwargs):
+
+ def handle_401(self, r, **kwargs):
"""
Takes the given response and tries digest-auth, if needed.
-
+
:rtype: requests.Response
"""
@@ -245,55 +245,55 @@ class HTTPDigestAuth(AuthBase):
return r
if self._thread_local.pos is not None:
- # Rewind the file position indicator of the body to where
- # it was to resend the request.
+ # Rewind the file position indicator of the body to where
+ # it was to resend the request.
r.request.body.seek(self._thread_local.pos)
- s_auth = r.headers.get('www-authenticate', '')
-
+ s_auth = r.headers.get('www-authenticate', '')
+
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
-
+
self._thread_local.num_401_calls += 1
- pat = re.compile(r'digest ', flags=re.IGNORECASE)
+ pat = re.compile(r'digest ', flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
-
- # Consume content and release the original connection
- # to allow our new request to reuse the same one.
- r.content
+
+ # Consume content and release the original connection
+ # to allow our new request to reuse the same one.
+ r.content
r.close()
- prep = r.request.copy()
- extract_cookies_to_jar(prep._cookies, r.request, r.raw)
- prep.prepare_cookies(prep._cookies)
-
- prep.headers['Authorization'] = self.build_digest_header(
- prep.method, prep.url)
- _r = r.connection.send(prep, **kwargs)
- _r.history.append(r)
- _r.request = prep
-
- return _r
-
+ prep = r.request.copy()
+ extract_cookies_to_jar(prep._cookies, r.request, r.raw)
+ prep.prepare_cookies(prep._cookies)
+
+ prep.headers['Authorization'] = self.build_digest_header(
+ prep.method, prep.url)
+ _r = r.connection.send(prep, **kwargs)
+ _r.history.append(r)
+ _r.request = prep
+
+ return _r
+
self._thread_local.num_401_calls = 1
- return r
-
- def __call__(self, r):
+ return r
+
+ def __call__(self, r):
# Initialize per-thread state, if needed
self.init_per_thread_state()
- # If we have a saved nonce, skip the 401
+ # If we have a saved nonce, skip the 401
if self._thread_local.last_nonce:
- r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
- try:
+ r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
+ try:
self._thread_local.pos = r.body.tell()
- except AttributeError:
- # In the case of HTTPDigestAuth being reused and the body of
- # the previous request was a file-like object, pos has the
- # file position of the previous body. Ensure it's set to
- # None.
+ except AttributeError:
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
self._thread_local.pos = None
- r.register_hook('response', self.handle_401)
- r.register_hook('response', self.handle_redirect)
+ r.register_hook('response', self.handle_401)
+ r.register_hook('response', self.handle_redirect)
self._thread_local.num_401_calls = 1
- return r
+ return r
def __eq__(self, other):
return all([
diff --git a/contrib/python/requests/requests/certs.py b/contrib/python/requests/requests/certs.py
index 67765b467f..d1a378d787 100644
--- a/contrib/python/requests/requests/certs.py
+++ b/contrib/python/requests/requests/certs.py
@@ -1,18 +1,18 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
requests.certs
~~~~~~~~~~~~~~
-
+
This module returns the preferred default CA certificate bundle. There is
only one — the one from the certifi package.
-
-If you are packaging Requests, e.g., for a Linux distribution or a managed
-environment, you can change the definition of where() to return a separately
-packaged CA bundle.
-"""
+
+If you are packaging Requests, e.g., for a Linux distribution or a managed
+environment, you can change the definition of where() to return a separately
+packaged CA bundle.
+"""
from certifi import where
-
+
if __name__ == '__main__':
print(where())
diff --git a/contrib/python/requests/requests/compat.py b/contrib/python/requests/requests/compat.py
index 8ea5934b9b..029ae62ac3 100644
--- a/contrib/python/requests/requests/compat.py
+++ b/contrib/python/requests/requests/compat.py
@@ -1,81 +1,81 @@
-# -*- coding: utf-8 -*-
-
-"""
+# -*- coding: utf-8 -*-
+
+"""
requests.compat
~~~~~~~~~~~~~~~
This module handles import compatibility issues between Python 2 and
Python 3.
-"""
-
+"""
+
try:
import chardet
except ImportError:
import charset_normalizer as chardet
-
-import sys
-
-# -------
-# Pythons
-# -------
-
-# Syntax sugar.
-_ver = sys.version_info
-
-#: Python 2.x?
-is_py2 = (_ver[0] == 2)
-
-#: Python 3.x?
-is_py3 = (_ver[0] == 3)
-
+
+import sys
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = (_ver[0] == 2)
+
+#: Python 3.x?
+is_py3 = (_ver[0] == 3)
+
has_simplejson = False
-try:
- import simplejson as json
+try:
+ import simplejson as json
has_simplejson = True
except ImportError:
- import json
-
-# ---------
-# Specifics
-# ---------
-
-if is_py2:
+ import json
+
+# ---------
+# Specifics
+# ---------
+
+if is_py2:
from urllib import (
quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
proxy_bypass, proxy_bypass_environment, getproxies_environment)
- from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
- from urllib2 import parse_http_list
- import cookielib
- from Cookie import Morsel
- from StringIO import StringIO
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
+ from urllib2 import parse_http_list
+ import cookielib
+ from Cookie import Morsel
+ from StringIO import StringIO
# Keep OrderedDict for backwards compatibility.
from collections import Callable, Mapping, MutableMapping, OrderedDict
-
- builtin_str = str
- bytes = str
- str = unicode
- basestring = basestring
- numeric_types = (int, long, float)
+
+ builtin_str = str
+ bytes = str
+ str = unicode
+ basestring = basestring
+ numeric_types = (int, long, float)
integer_types = (int, long)
JSONDecodeError = ValueError
-
-elif is_py3:
- from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
+
+elif is_py3:
+ from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
- from http import cookiejar as cookielib
- from http.cookies import Morsel
- from io import StringIO
+ from http import cookiejar as cookielib
+ from http.cookies import Morsel
+ from io import StringIO
# Keep OrderedDict for backwards compatibility.
- from collections import OrderedDict
+ from collections import OrderedDict
from collections.abc import Callable, Mapping, MutableMapping
if has_simplejson:
from simplejson import JSONDecodeError
else:
from json import JSONDecodeError
-
- builtin_str = str
- str = str
- bytes = bytes
- basestring = (str, bytes)
- numeric_types = (int, float)
+
+ builtin_str = str
+ str = str
+ bytes = bytes
+ basestring = (str, bytes)
+ numeric_types = (int, float)
integer_types = (int,)
diff --git a/contrib/python/requests/requests/cookies.py b/contrib/python/requests/requests/cookies.py
index 31362433b7..56fccd9c25 100644
--- a/contrib/python/requests/requests/cookies.py
+++ b/contrib/python/requests/requests/cookies.py
@@ -1,155 +1,155 @@
-# -*- coding: utf-8 -*-
-
-"""
+# -*- coding: utf-8 -*-
+
+"""
requests.cookies
~~~~~~~~~~~~~~~~
-Compatibility code to be able to use `cookielib.CookieJar` with requests.
-
-requests.utils imports from here, so be careful with imports.
-"""
-
+Compatibility code to be able to use `cookielib.CookieJar` with requests.
+
+requests.utils imports from here, so be careful with imports.
+"""
+
import copy
-import time
+import time
import calendar
from ._internal_utils import to_native_string
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
-
-
-class MockRequest(object):
- """Wraps a `requests.Request` to mimic a `urllib2.Request`.
-
- The code in `cookielib.CookieJar` expects this interface in order to correctly
- manage cookie policies, i.e., determine whether a cookie can be set, given the
- domains of the request and the cookie.
-
- The original request object is read-only. The client is responsible for collecting
- the new headers via `get_new_headers()` and interpreting them appropriately. You
- probably want `get_cookie_header`, defined below.
- """
-
- def __init__(self, request):
- self._r = request
- self._new_headers = {}
- self.type = urlparse(self._r.url).scheme
-
- def get_type(self):
- return self.type
-
- def get_host(self):
- return urlparse(self._r.url).netloc
-
- def get_origin_req_host(self):
- return self.get_host()
-
- def get_full_url(self):
- # Only return the response's URL if the user hadn't set the Host
- # header
- if not self._r.headers.get('Host'):
- return self._r.url
- # If they did set it, retrieve it and reconstruct the expected domain
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading
+
+
+class MockRequest(object):
+ """Wraps a `requests.Request` to mimic a `urllib2.Request`.
+
+ The code in `cookielib.CookieJar` expects this interface in order to correctly
+ manage cookie policies, i.e., determine whether a cookie can be set, given the
+ domains of the request and the cookie.
+
+ The original request object is read-only. The client is responsible for collecting
+ the new headers via `get_new_headers()` and interpreting them appropriately. You
+ probably want `get_cookie_header`, defined below.
+ """
+
+ def __init__(self, request):
+ self._r = request
+ self._new_headers = {}
+ self.type = urlparse(self._r.url).scheme
+
+ def get_type(self):
+ return self.type
+
+ def get_host(self):
+ return urlparse(self._r.url).netloc
+
+ def get_origin_req_host(self):
+ return self.get_host()
+
+ def get_full_url(self):
+ # Only return the response's URL if the user hadn't set the Host
+ # header
+ if not self._r.headers.get('Host'):
+ return self._r.url
+ # If they did set it, retrieve it and reconstruct the expected domain
host = to_native_string(self._r.headers['Host'], encoding='utf-8')
- parsed = urlparse(self._r.url)
- # Reconstruct the URL as we expect it
- return urlunparse([
- parsed.scheme, host, parsed.path, parsed.params, parsed.query,
- parsed.fragment
- ])
-
- def is_unverifiable(self):
- return True
-
- def has_header(self, name):
- return name in self._r.headers or name in self._new_headers
-
- def get_header(self, name, default=None):
- return self._r.headers.get(name, self._new_headers.get(name, default))
-
- def add_header(self, key, val):
- """cookielib has no legitimate use for this method; add it back if you find one."""
- raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
-
- def add_unredirected_header(self, name, value):
- self._new_headers[name] = value
-
- def get_new_headers(self):
- return self._new_headers
-
- @property
- def unverifiable(self):
- return self.is_unverifiable()
-
- @property
- def origin_req_host(self):
- return self.get_origin_req_host()
-
- @property
- def host(self):
- return self.get_host()
-
-
-class MockResponse(object):
- """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
-
- ...what? Basically, expose the parsed HTTP headers from the server response
- the way `cookielib` expects to see them.
- """
-
- def __init__(self, headers):
- """Make a MockResponse for `cookielib` to read.
-
- :param headers: a httplib.HTTPMessage or analogous carrying the headers
- """
- self._headers = headers
-
- def info(self):
- return self._headers
-
- def getheaders(self, name):
- self._headers.getheaders(name)
-
-
-def extract_cookies_to_jar(jar, request, response):
- """Extract the cookies from the response into a CookieJar.
-
- :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
- :param request: our own requests.Request object
- :param response: urllib3.HTTPResponse object
- """
- if not (hasattr(response, '_original_response') and
- response._original_response):
- return
- # the _original_response field is the wrapped httplib.HTTPResponse object,
- req = MockRequest(request)
- # pull out the HTTPMessage with the headers and put it in the mock:
- res = MockResponse(response._original_response.msg)
- jar.extract_cookies(res, req)
-
-
-def get_cookie_header(jar, request):
+ parsed = urlparse(self._r.url)
+ # Reconstruct the URL as we expect it
+ return urlunparse([
+ parsed.scheme, host, parsed.path, parsed.params, parsed.query,
+ parsed.fragment
+ ])
+
+ def is_unverifiable(self):
+ return True
+
+ def has_header(self, name):
+ return name in self._r.headers or name in self._new_headers
+
+ def get_header(self, name, default=None):
+ return self._r.headers.get(name, self._new_headers.get(name, default))
+
+ def add_header(self, key, val):
+ """cookielib has no legitimate use for this method; add it back if you find one."""
+ raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
+
+ def add_unredirected_header(self, name, value):
+ self._new_headers[name] = value
+
+ def get_new_headers(self):
+ return self._new_headers
+
+ @property
+ def unverifiable(self):
+ return self.is_unverifiable()
+
+ @property
+ def origin_req_host(self):
+ return self.get_origin_req_host()
+
+ @property
+ def host(self):
+ return self.get_host()
+
+
+class MockResponse(object):
+ """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
+
+ ...what? Basically, expose the parsed HTTP headers from the server response
+ the way `cookielib` expects to see them.
+ """
+
+ def __init__(self, headers):
+ """Make a MockResponse for `cookielib` to read.
+
+ :param headers: a httplib.HTTPMessage or analogous carrying the headers
+ """
+ self._headers = headers
+
+ def info(self):
+ return self._headers
+
+ def getheaders(self, name):
+ self._headers.getheaders(name)
+
+
+def extract_cookies_to_jar(jar, request, response):
+ """Extract the cookies from the response into a CookieJar.
+
+ :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
+ :param request: our own requests.Request object
+ :param response: urllib3.HTTPResponse object
+ """
+ if not (hasattr(response, '_original_response') and
+ response._original_response):
+ return
+ # the _original_response field is the wrapped httplib.HTTPResponse object,
+ req = MockRequest(request)
+ # pull out the HTTPMessage with the headers and put it in the mock:
+ res = MockResponse(response._original_response.msg)
+ jar.extract_cookies(res, req)
+
+
+def get_cookie_header(jar, request):
"""
Produce an appropriate Cookie header string to be sent with `request`, or None.
:rtype: str
"""
- r = MockRequest(request)
- jar.add_cookie_header(r)
- return r.get_new_headers().get('Cookie')
-
-
-def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
- """Unsets a cookie by name, by default over all domains and paths.
-
- Wraps CookieJar.clear(), is O(n).
- """
- clearables = []
- for cookie in cookiejar:
+ r = MockRequest(request)
+ jar.add_cookie_header(r)
+ return r.get_new_headers().get('Cookie')
+
+
+def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
+ """Unsets a cookie by name, by default over all domains and paths.
+
+ Wraps CookieJar.clear(), is O(n).
+ """
+ clearables = []
+ for cookie in cookiejar:
if cookie.name != name:
continue
if domain is not None and domain != cookie.domain:
@@ -157,203 +157,203 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
if path is not None and path != cookie.path:
continue
clearables.append((cookie.domain, cookie.path, cookie.name))
-
- for domain, path, name in clearables:
- cookiejar.clear(domain, path, name)
-
-
-class CookieConflictError(RuntimeError):
- """There are two cookies that meet the criteria specified in the cookie jar.
+
+ for domain, path, name in clearables:
+ cookiejar.clear(domain, path, name)
+
+
+class CookieConflictError(RuntimeError):
+ """There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific.
"""
-
-
+
+
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
-
- This is the CookieJar we create by default for requests and sessions that
- don't specify one, since some clients may expect response.cookies and
- session.cookies to support dict operations.
-
+
+ This is the CookieJar we create by default for requests and sessions that
+ don't specify one, since some clients may expect response.cookies and
+ session.cookies to support dict operations.
+
Requests does not use the dict interface internally; it's just for
compatibility with external client code. All requests code should work
out of the box with externally provided instances of ``CookieJar``, e.g.
``LWPCookieJar`` and ``FileCookieJar``.
-
+
Unlike a regular CookieJar, this class is pickleable.
-
+
.. warning:: dictionary operations that are normally O(1) may be O(n).
- """
-
- def get(self, name, default=None, domain=None, path=None):
- """Dict-like get() that also supports optional domain and path args in
- order to resolve naming collisions from using one cookie jar over
+ """
+
+ def get(self, name, default=None, domain=None, path=None):
+ """Dict-like get() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
multiple domains.
.. warning:: operation is O(n), not O(1).
"""
- try:
- return self._find_no_duplicates(name, domain, path)
- except KeyError:
- return default
-
- def set(self, name, value, **kwargs):
- """Dict-like set() that also supports optional domain and path args in
- order to resolve naming collisions from using one cookie jar over
+ try:
+ return self._find_no_duplicates(name, domain, path)
+ except KeyError:
+ return default
+
+ def set(self, name, value, **kwargs):
+ """Dict-like set() that also supports optional domain and path args in
+ order to resolve naming collisions from using one cookie jar over
multiple domains.
"""
- # support client code that unsets cookies by assignment of a None value:
- if value is None:
- remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
- return
-
- if isinstance(value, Morsel):
- c = morsel_to_cookie(value)
- else:
- c = create_cookie(name, value, **kwargs)
- self.set_cookie(c)
- return c
-
- def iterkeys(self):
+ # support client code that unsets cookies by assignment of a None value:
+ if value is None:
+ remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
+ return
+
+ if isinstance(value, Morsel):
+ c = morsel_to_cookie(value)
+ else:
+ c = create_cookie(name, value, **kwargs)
+ self.set_cookie(c)
+ return c
+
+ def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies
from the jar.
.. seealso:: itervalues() and iteritems().
"""
- for cookie in iter(self):
- yield cookie.name
-
- def keys(self):
+ for cookie in iter(self):
+ yield cookie.name
+
+ def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the
jar.
.. seealso:: values() and items().
"""
- return list(self.iterkeys())
-
- def itervalues(self):
+ return list(self.iterkeys())
+
+ def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies
from the jar.
.. seealso:: iterkeys() and iteritems().
"""
- for cookie in iter(self):
- yield cookie.value
-
- def values(self):
+ for cookie in iter(self):
+ yield cookie.value
+
+ def values(self):
"""Dict-like values() that returns a list of values of cookies from the
jar.
.. seealso:: keys() and items().
"""
- return list(self.itervalues())
-
- def iteritems(self):
+ return list(self.itervalues())
+
+ def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples
from the jar.
.. seealso:: iterkeys() and itervalues().
"""
- for cookie in iter(self):
- yield cookie.name, cookie.value
-
- def items(self):
+ for cookie in iter(self):
+ yield cookie.name, cookie.value
+
+ def items(self):
"""Dict-like items() that returns a list of name-value tuples from the
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
vanilla python dict of key value pairs.
.. seealso:: keys() and values().
"""
- return list(self.iteritems())
-
- def list_domains(self):
- """Utility method to list all the domains in the jar."""
- domains = []
- for cookie in iter(self):
- if cookie.domain not in domains:
- domains.append(cookie.domain)
- return domains
-
- def list_paths(self):
- """Utility method to list all the paths in the jar."""
- paths = []
- for cookie in iter(self):
- if cookie.path not in paths:
- paths.append(cookie.path)
- return paths
-
- def multiple_domains(self):
- """Returns True if there are multiple domains in the jar.
+ return list(self.iteritems())
+
+ def list_domains(self):
+ """Utility method to list all the domains in the jar."""
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain not in domains:
+ domains.append(cookie.domain)
+ return domains
+
+ def list_paths(self):
+ """Utility method to list all the paths in the jar."""
+ paths = []
+ for cookie in iter(self):
+ if cookie.path not in paths:
+ paths.append(cookie.path)
+ return paths
+
+ def multiple_domains(self):
+ """Returns True if there are multiple domains in the jar.
Returns False otherwise.
:rtype: bool
"""
- domains = []
- for cookie in iter(self):
- if cookie.domain is not None and cookie.domain in domains:
- return True
- domains.append(cookie.domain)
- return False # there is only one domain in jar
-
- def get_dict(self, domain=None, path=None):
+ domains = []
+ for cookie in iter(self):
+ if cookie.domain is not None and cookie.domain in domains:
+ return True
+ domains.append(cookie.domain)
+ return False # there is only one domain in jar
+
+ def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain
old Python dict of name-value pairs of cookies that meet the
requirements.
:rtype: dict
"""
- dictionary = {}
- for cookie in iter(self):
+ dictionary = {}
+ for cookie in iter(self):
if (
(domain is None or cookie.domain == domain) and
(path is None or cookie.path == path)
):
- dictionary[cookie.name] = cookie.value
- return dictionary
-
+ dictionary[cookie.name] = cookie.value
+ return dictionary
+
def __contains__(self, name):
try:
return super(RequestsCookieJar, self).__contains__(name)
except CookieConflictError:
return True
- def __getitem__(self, name):
+ def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws
exception if there are more than one cookie with name. In that case,
use the more explicit get() method instead.
-
+
.. warning:: operation is O(n), not O(1).
"""
- return self._find_no_duplicates(name)
-
- def __setitem__(self, name, value):
+ return self._find_no_duplicates(name)
+
+ def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws
exception if there is already a cookie of that name in the jar. In that
case, use the more explicit set() method instead.
"""
- self.set(name, value)
-
- def __delitem__(self, name):
+ self.set(name, value)
+
+ def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``.
"""
- remove_cookie_by_name(self, name)
-
- def set_cookie(self, cookie, *args, **kwargs):
- if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
- cookie.value = cookie.value.replace('\\"', '')
- return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
-
- def update(self, other):
- """Updates this jar with cookies from another CookieJar or dict-like"""
- if isinstance(other, cookielib.CookieJar):
- for cookie in other:
+ remove_cookie_by_name(self, name)
+
+ def set_cookie(self, cookie, *args, **kwargs):
+ if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
+ cookie.value = cookie.value.replace('\\"', '')
+ return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
+
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
self.set_cookie(copy.copy(cookie))
- else:
- super(RequestsCookieJar, self).update(other)
-
- def _find(self, name, domain=None, path=None):
+ else:
+ super(RequestsCookieJar, self).update(other)
+
+ def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values.
If there are conflicting cookies, _find arbitrarily chooses one.
@@ -365,15 +365,15 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
:param path: (optional) string containing path of cookie
:return: cookie.value
"""
- for cookie in iter(self):
- if cookie.name == name:
- if domain is None or cookie.domain == domain:
- if path is None or cookie.path == path:
- return cookie.value
-
- raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
-
- def _find_no_duplicates(self, name, domain=None, path=None):
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ return cookie.value
+
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never
used elsewhere in Requests.
@@ -385,43 +385,43 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
that match name and optionally domain and path
:return: cookie.value
"""
- toReturn = None
- for cookie in iter(self):
- if cookie.name == name:
- if domain is None or cookie.domain == domain:
- if path is None or cookie.path == path:
- if toReturn is not None: # if there are multiple cookies that meet passed in criteria
- raise CookieConflictError('There are multiple cookies with name, %r' % (name))
- toReturn = cookie.value # we will eventually return this as long as no cookie conflict
-
- if toReturn:
- return toReturn
- raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
-
- def __getstate__(self):
- """Unlike a normal CookieJar, this class is pickleable."""
- state = self.__dict__.copy()
- # remove the unpickleable RLock object
- state.pop('_cookies_lock')
- return state
-
- def __setstate__(self, state):
- """Unlike a normal CookieJar, this class is pickleable."""
- self.__dict__.update(state)
- if '_cookies_lock' not in self.__dict__:
- self._cookies_lock = threading.RLock()
-
- def copy(self):
- """Return a copy of this RequestsCookieJar."""
- new_cj = RequestsCookieJar()
+ toReturn = None
+ for cookie in iter(self):
+ if cookie.name == name:
+ if domain is None or cookie.domain == domain:
+ if path is None or cookie.path == path:
+ if toReturn is not None: # if there are multiple cookies that meet passed in criteria
+ raise CookieConflictError('There are multiple cookies with name, %r' % (name))
+ toReturn = cookie.value # we will eventually return this as long as no cookie conflict
+
+ if toReturn:
+ return toReturn
+ raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
+
+ def __getstate__(self):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ state = self.__dict__.copy()
+ # remove the unpickleable RLock object
+ state.pop('_cookies_lock')
+ return state
+
+ def __setstate__(self, state):
+ """Unlike a normal CookieJar, this class is pickleable."""
+ self.__dict__.update(state)
+ if '_cookies_lock' not in self.__dict__:
+ self._cookies_lock = threading.RLock()
+
+ def copy(self):
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
new_cj.set_policy(self.get_policy())
- new_cj.update(self)
- return new_cj
-
+ new_cj.update(self)
+ return new_cj
+
def get_policy(self):
"""Return the CookiePolicy instance used."""
return self._policy
-
+
def _copy_cookie_jar(jar):
if jar is None:
@@ -438,12 +438,12 @@ def _copy_cookie_jar(jar):
return new_jar
-def create_cookie(name, value, **kwargs):
- """Make a cookie from underspecified parameters.
-
- By default, the pair of `name` and `value` will be set for the domain ''
- and sent on every request (this is sometimes called a "supercookie").
- """
+def create_cookie(name, value, **kwargs):
+ """Make a cookie from underspecified parameters.
+
+ By default, the pair of `name` and `value` will be set for the domain ''
+ and sent on every request (this is sometimes called a "supercookie").
+ """
result = {
'version': 0,
'name': name,
@@ -459,91 +459,91 @@ def create_cookie(name, value, **kwargs):
'rest': {'HttpOnly': None},
'rfc2109': False,
}
-
- badargs = set(kwargs) - set(result)
- if badargs:
- err = 'create_cookie() got unexpected keyword arguments: %s'
- raise TypeError(err % list(badargs))
-
- result.update(kwargs)
- result['port_specified'] = bool(result['port'])
- result['domain_specified'] = bool(result['domain'])
- result['domain_initial_dot'] = result['domain'].startswith('.')
- result['path_specified'] = bool(result['path'])
-
- return cookielib.Cookie(**result)
-
-
-def morsel_to_cookie(morsel):
- """Convert a Morsel object into a Cookie containing the one k/v pair."""
-
- expires = None
- if morsel['max-age']:
+
+ badargs = set(kwargs) - set(result)
+ if badargs:
+ err = 'create_cookie() got unexpected keyword arguments: %s'
+ raise TypeError(err % list(badargs))
+
+ result.update(kwargs)
+ result['port_specified'] = bool(result['port'])
+ result['domain_specified'] = bool(result['domain'])
+ result['domain_initial_dot'] = result['domain'].startswith('.')
+ result['path_specified'] = bool(result['path'])
+
+ return cookielib.Cookie(**result)
+
+
+def morsel_to_cookie(morsel):
+ """Convert a Morsel object into a Cookie containing the one k/v pair."""
+
+ expires = None
+ if morsel['max-age']:
try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
- elif morsel['expires']:
- time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
+ elif morsel['expires']:
+ time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = calendar.timegm(
time.strptime(morsel['expires'], time_template)
)
- return create_cookie(
- comment=morsel['comment'],
- comment_url=bool(morsel['comment']),
- discard=False,
- domain=morsel['domain'],
- expires=expires,
- name=morsel.key,
- path=morsel['path'],
- port=None,
- rest={'HttpOnly': morsel['httponly']},
- rfc2109=False,
- secure=bool(morsel['secure']),
- value=morsel.value,
- version=morsel['version'] or 0,
- )
-
-
-def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
- """Returns a CookieJar from a key/value dictionary.
-
- :param cookie_dict: Dict of key/values to insert into CookieJar.
- :param cookiejar: (optional) A cookiejar to add the cookies to.
- :param overwrite: (optional) If False, will not replace cookies
- already in the jar with new ones.
+ return create_cookie(
+ comment=morsel['comment'],
+ comment_url=bool(morsel['comment']),
+ discard=False,
+ domain=morsel['domain'],
+ expires=expires,
+ name=morsel.key,
+ path=morsel['path'],
+ port=None,
+ rest={'HttpOnly': morsel['httponly']},
+ rfc2109=False,
+ secure=bool(morsel['secure']),
+ value=morsel.value,
+ version=morsel['version'] or 0,
+ )
+
+
+def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
+ :param cookiejar: (optional) A cookiejar to add the cookies to.
+ :param overwrite: (optional) If False, will not replace cookies
+ already in the jar with new ones.
:rtype: CookieJar
- """
- if cookiejar is None:
- cookiejar = RequestsCookieJar()
-
- if cookie_dict is not None:
- names_from_jar = [cookie.name for cookie in cookiejar]
- for name in cookie_dict:
- if overwrite or (name not in names_from_jar):
- cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
-
- return cookiejar
-
-
-def merge_cookies(cookiejar, cookies):
- """Add cookies to cookiejar and returns a merged CookieJar.
-
- :param cookiejar: CookieJar object to add the cookies to.
- :param cookies: Dictionary or CookieJar object to be added.
+ """
+ if cookiejar is None:
+ cookiejar = RequestsCookieJar()
+
+ if cookie_dict is not None:
+ names_from_jar = [cookie.name for cookie in cookiejar]
+ for name in cookie_dict:
+ if overwrite or (name not in names_from_jar):
+ cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
+
+ return cookiejar
+
+
+def merge_cookies(cookiejar, cookies):
+ """Add cookies to cookiejar and returns a merged CookieJar.
+
+ :param cookiejar: CookieJar object to add the cookies to.
+ :param cookies: Dictionary or CookieJar object to be added.
:rtype: CookieJar
- """
- if not isinstance(cookiejar, cookielib.CookieJar):
- raise ValueError('You can only merge into CookieJar')
-
- if isinstance(cookies, dict):
- cookiejar = cookiejar_from_dict(
- cookies, cookiejar=cookiejar, overwrite=False)
- elif isinstance(cookies, cookielib.CookieJar):
- try:
- cookiejar.update(cookies)
- except AttributeError:
- for cookie_in_jar in cookies:
- cookiejar.set_cookie(cookie_in_jar)
-
- return cookiejar
+ """
+ if not isinstance(cookiejar, cookielib.CookieJar):
+ raise ValueError('You can only merge into CookieJar')
+
+ if isinstance(cookies, dict):
+ cookiejar = cookiejar_from_dict(
+ cookies, cookiejar=cookiejar, overwrite=False)
+ elif isinstance(cookies, cookielib.CookieJar):
+ try:
+ cookiejar.update(cookies)
+ except AttributeError:
+ for cookie_in_jar in cookies:
+ cookiejar.set_cookie(cookie_in_jar)
+
+ return cookiejar
diff --git a/contrib/python/requests/requests/exceptions.py b/contrib/python/requests/requests/exceptions.py
index a373d1c435..79697635a5 100644
--- a/contrib/python/requests/requests/exceptions.py
+++ b/contrib/python/requests/requests/exceptions.py
@@ -1,32 +1,32 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.exceptions
-~~~~~~~~~~~~~~~~~~~
-
-This module contains the set of Requests' exceptions.
-"""
+# -*- coding: utf-8 -*-
+
+"""
+requests.exceptions
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
+"""
from urllib3.exceptions import HTTPError as BaseHTTPError
-
+
from .compat import JSONDecodeError as CompatJSONDecodeError
-
-class RequestException(IOError):
- """There was an ambiguous exception that occurred while handling your
+
+class RequestException(IOError):
+ """There was an ambiguous exception that occurred while handling your
request.
"""
-
- def __init__(self, *args, **kwargs):
+
+ def __init__(self, *args, **kwargs):
"""Initialize RequestException with `request` and `response` objects."""
- response = kwargs.pop('response', None)
- self.response = response
- self.request = kwargs.pop('request', None)
- if (response is not None and not self.request and
- hasattr(response, 'request')):
- self.request = self.response.request
- super(RequestException, self).__init__(*args, **kwargs)
-
-
+ response = kwargs.pop('response', None)
+ self.response = response
+ self.request = kwargs.pop('request', None)
+ if (response is not None and not self.request and
+ hasattr(response, 'request')):
+ self.request = self.response.request
+ super(RequestException, self).__init__(*args, **kwargs)
+
+
class InvalidJSONError(RequestException):
"""A JSON error occurred."""
@@ -35,62 +35,62 @@ class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
"""Couldn't decode the text into json"""
-class HTTPError(RequestException):
- """An HTTP error occurred."""
-
-
-class ConnectionError(RequestException):
- """A Connection error occurred."""
-
-
-class ProxyError(ConnectionError):
- """A proxy error occurred."""
-
-
-class SSLError(ConnectionError):
- """An SSL error occurred."""
-
-
-class Timeout(RequestException):
- """The request timed out.
-
- Catching this error will catch both
- :exc:`~requests.exceptions.ConnectTimeout` and
- :exc:`~requests.exceptions.ReadTimeout` errors.
- """
-
-
-class ConnectTimeout(ConnectionError, Timeout):
- """The request timed out while trying to connect to the remote server.
-
- Requests that produced this error are safe to retry.
- """
-
-
-class ReadTimeout(Timeout):
- """The server did not send any data in the allotted amount of time."""
-
-
-class URLRequired(RequestException):
- """A valid URL is required to make a request."""
-
-
-class TooManyRedirects(RequestException):
- """Too many redirects."""
-
-
-class MissingSchema(RequestException, ValueError):
+class HTTPError(RequestException):
+ """An HTTP error occurred."""
+
+
+class ConnectionError(RequestException):
+ """A Connection error occurred."""
+
+
+class ProxyError(ConnectionError):
+ """A proxy error occurred."""
+
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
+
+
+class Timeout(RequestException):
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
+
+
+class URLRequired(RequestException):
+ """A valid URL is required to make a request."""
+
+
+class TooManyRedirects(RequestException):
+ """Too many redirects."""
+
+
+class MissingSchema(RequestException, ValueError):
"""The URL scheme (e.g. http or https) is missing."""
-
-
-class InvalidSchema(RequestException, ValueError):
+
+
+class InvalidSchema(RequestException, ValueError):
"""The URL scheme provided is either invalid or unsupported."""
-
-
-class InvalidURL(RequestException, ValueError):
+
+
+class InvalidURL(RequestException, ValueError):
"""The URL provided was somehow invalid."""
-
-
+
+
class InvalidHeader(RequestException, ValueError):
"""The header value provided was somehow invalid."""
@@ -99,20 +99,20 @@ class InvalidProxyURL(InvalidURL):
"""The proxy URL provided is invalid."""
-class ChunkedEncodingError(RequestException):
- """The server declared chunked encoding but sent an invalid chunk."""
-
-
-class ContentDecodingError(RequestException, BaseHTTPError):
+class ChunkedEncodingError(RequestException):
+ """The server declared chunked encoding but sent an invalid chunk."""
+
+
+class ContentDecodingError(RequestException, BaseHTTPError):
"""Failed to decode response content."""
-
-
-class StreamConsumedError(RequestException, TypeError):
+
+
+class StreamConsumedError(RequestException, TypeError):
"""The content for this response was already consumed."""
-
-
-class RetryError(RequestException):
- """Custom retries logic failed"""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
class UnrewindableBodyError(RequestException):
diff --git a/contrib/python/requests/requests/hooks.py b/contrib/python/requests/requests/hooks.py
index 1005cb7c51..7a51f212c8 100644
--- a/contrib/python/requests/requests/hooks.py
+++ b/contrib/python/requests/requests/hooks.py
@@ -1,34 +1,34 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.hooks
-~~~~~~~~~~~~~~
-
-This module provides the capabilities for the Requests hooks system.
-
-Available hooks:
-
-``response``:
- The response generated from a Request.
-"""
-HOOKS = ['response']
-
-
-def default_hooks():
+# -*- coding: utf-8 -*-
+
+"""
+requests.hooks
+~~~~~~~~~~~~~~
+
+This module provides the capabilities for the Requests hooks system.
+
+Available hooks:
+
+``response``:
+ The response generated from a Request.
+"""
+HOOKS = ['response']
+
+
+def default_hooks():
return {event: [] for event in HOOKS}
-
-# TODO: response is the only one
-
-
-def dispatch_hook(key, hooks, hook_data, **kwargs):
- """Dispatches a hook dictionary on a given piece of data."""
+
+# TODO: response is the only one
+
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
+ """Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or {}
hooks = hooks.get(key)
if hooks:
- if hasattr(hooks, '__call__'):
- hooks = [hooks]
- for hook in hooks:
- _hook_data = hook(hook_data, **kwargs)
- if _hook_data is not None:
- hook_data = _hook_data
- return hook_data
+ if hasattr(hooks, '__call__'):
+ hooks = [hooks]
+ for hook in hooks:
+ _hook_data = hook(hook_data, **kwargs)
+ if _hook_data is not None:
+ hook_data = _hook_data
+ return hook_data
diff --git a/contrib/python/requests/requests/models.py b/contrib/python/requests/requests/models.py
index 30a27cda71..dfbea854f9 100644
--- a/contrib/python/requests/requests/models.py
+++ b/contrib/python/requests/requests/models.py
@@ -1,15 +1,15 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.models
-~~~~~~~~~~~~~~~
-
-This module contains the primary objects that power Requests.
-"""
-
-import datetime
+# -*- coding: utf-8 -*-
+
+"""
+requests.models
+~~~~~~~~~~~~~~~
+
+This module contains the primary objects that power Requests.
+"""
+
+import datetime
import sys
-
+
# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
@@ -22,139 +22,139 @@ from urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from io import UnsupportedOperation
-from .hooks import default_hooks
-from .structures import CaseInsensitiveDict
-
-from .auth import HTTPBasicAuth
+from .hooks import default_hooks
+from .structures import CaseInsensitiveDict
+
+from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
-from .exceptions import (
- HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
+from .exceptions import (
+ HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError,
InvalidJSONError)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from ._internal_utils import to_native_string, unicode_is_ascii
-from .utils import (
- guess_filename, get_auth_from_url, requote_uri,
- stream_decode_response_unicode, to_key_val_list, parse_header_links,
+from .utils import (
+ guess_filename, get_auth_from_url, requote_uri,
+ stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
-from .compat import (
+from .compat import (
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
from .compat import json as complexjson
-from .status_codes import codes
-
-#: The set of HTTP status codes that indicate an automatically
-#: processable redirect.
-REDIRECT_STATI = (
+from .status_codes import codes
+
+#: The set of HTTP status codes that indicate an automatically
+#: processable redirect.
+REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
-)
-
-DEFAULT_REDIRECT_LIMIT = 30
-CONTENT_CHUNK_SIZE = 10 * 1024
-ITER_CHUNK_SIZE = 512
-
-
-class RequestEncodingMixin(object):
- @property
- def path_url(self):
- """Build the path URL to use."""
-
- url = []
-
- p = urlsplit(self.url)
-
- path = p.path
- if not path:
- path = '/'
-
- url.append(path)
-
- query = p.query
- if query:
- url.append('?')
- url.append(query)
-
- return ''.join(url)
-
- @staticmethod
- def _encode_params(data):
- """Encode parameters in a piece of data.
-
- Will successfully encode parameters when passed as a dict or a list of
- 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
- if parameters are supplied as a dict.
- """
-
- if isinstance(data, (str, bytes)):
- return data
- elif hasattr(data, 'read'):
- return data
- elif hasattr(data, '__iter__'):
- result = []
- for k, vs in to_key_val_list(data):
- if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
- vs = [vs]
- for v in vs:
- if v is not None:
- result.append(
- (k.encode('utf-8') if isinstance(k, str) else k,
- v.encode('utf-8') if isinstance(v, str) else v))
- return urlencode(result, doseq=True)
- else:
- return data
-
- @staticmethod
- def _encode_files(files, data):
- """Build the body for a multipart/form-data request.
-
- Will successfully encode files when passed as a dict or a list of
+)
+
+DEFAULT_REDIRECT_LIMIT = 30
+CONTENT_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
+
+
+class RequestEncodingMixin(object):
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.url)
+
+ path = p.path
+ if not path:
+ path = '/'
+
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append('?')
+ url.append(query)
+
+ return ''.join(url)
+
+ @staticmethod
+ def _encode_params(data):
+ """Encode parameters in a piece of data.
+
+ Will successfully encode parameters when passed as a dict or a list of
+ 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
+ if parameters are supplied as a dict.
+ """
+
+ if isinstance(data, (str, bytes)):
+ return data
+ elif hasattr(data, 'read'):
+ return data
+ elif hasattr(data, '__iter__'):
+ result = []
+ for k, vs in to_key_val_list(data):
+ if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
+ vs = [vs]
+ for v in vs:
+ if v is not None:
+ result.append(
+ (k.encode('utf-8') if isinstance(k, str) else k,
+ v.encode('utf-8') if isinstance(v, str) else v))
+ return urlencode(result, doseq=True)
+ else:
+ return data
+
+ @staticmethod
+ def _encode_files(files, data):
+ """Build the body for a multipart/form-data request.
+
+ Will successfully encode files when passed as a dict or a list of
tuples. Order is retained if data is a list of tuples but arbitrary
- if parameters are supplied as a dict.
+ if parameters are supplied as a dict.
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers).
- """
- if (not files):
- raise ValueError("Files must be provided.")
- elif isinstance(data, basestring):
- raise ValueError("Data must not be a string.")
-
- new_fields = []
- fields = to_key_val_list(data or {})
- files = to_key_val_list(files or {})
-
- for field, val in fields:
- if isinstance(val, basestring) or not hasattr(val, '__iter__'):
- val = [val]
- for v in val:
- if v is not None:
- # Don't call str() on bytestrings: in Py3 it all goes wrong.
- if not isinstance(v, bytes):
- v = str(v)
-
- new_fields.append(
- (field.decode('utf-8') if isinstance(field, bytes) else field,
- v.encode('utf-8') if isinstance(v, str) else v))
-
- for (k, v) in files:
- # support for explicit filename
- ft = None
- fh = None
- if isinstance(v, (tuple, list)):
- if len(v) == 2:
- fn, fp = v
- elif len(v) == 3:
- fn, fp, ft = v
- else:
- fn, fp, ft, fh = v
- else:
- fn = guess_filename(v) or k
- fp = v
-
+ """
+ if (not files):
+ raise ValueError("Files must be provided.")
+ elif isinstance(data, basestring):
+ raise ValueError("Data must not be a string.")
+
+ new_fields = []
+ fields = to_key_val_list(data or {})
+ files = to_key_val_list(files or {})
+
+ for field, val in fields:
+ if isinstance(val, basestring) or not hasattr(val, '__iter__'):
+ val = [val]
+ for v in val:
+ if v is not None:
+ # Don't call str() on bytestrings: in Py3 it all goes wrong.
+ if not isinstance(v, bytes):
+ v = str(v)
+
+ new_fields.append(
+ (field.decode('utf-8') if isinstance(field, bytes) else field,
+ v.encode('utf-8') if isinstance(v, str) else v))
+
+ for (k, v) in files:
+ # support for explicit filename
+ ft = None
+ fh = None
+ if isinstance(v, (tuple, list)):
+ if len(v) == 2:
+ fn, fp = v
+ elif len(v) == 3:
+ fn, fp, ft = v
+ else:
+ fn, fp, ft, fh = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
elif hasattr(fp, 'read'):
@@ -165,47 +165,47 @@ class RequestEncodingMixin(object):
fdata = fp
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
- rf.make_multipart(content_type=ft)
- new_fields.append(rf)
-
- body, content_type = encode_multipart_formdata(new_fields)
-
- return body, content_type
-
-
-class RequestHooksMixin(object):
- def register_hook(self, event, hook):
- """Properly register a hook."""
-
- if event not in self.hooks:
- raise ValueError('Unsupported event specified, with event name "%s"' % (event))
-
+ rf.make_multipart(content_type=ft)
+ new_fields.append(rf)
+
+ body, content_type = encode_multipart_formdata(new_fields)
+
+ return body, content_type
+
+
+class RequestHooksMixin(object):
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ if event not in self.hooks:
+ raise ValueError('Unsupported event specified, with event name "%s"' % (event))
+
if isinstance(hook, Callable):
- self.hooks[event].append(hook)
- elif hasattr(hook, '__iter__'):
+ self.hooks[event].append(hook)
+ elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
-
- def deregister_hook(self, event, hook):
- """Deregister a previously registered hook.
- Returns True if the hook existed, False if not.
- """
-
- try:
- self.hooks[event].remove(hook)
- return True
- except ValueError:
- return False
-
-
-class Request(RequestHooksMixin):
- """A user-created :class:`Request <Request>` object.
-
- Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
-
- :param method: HTTP method to use.
- :param url: URL to send.
- :param headers: dictionary of headers to send.
- :param files: dictionary of {filename: fileobject} files to multipart upload.
+
+ def deregister_hook(self, event, hook):
+ """Deregister a previously registered hook.
+ Returns True if the hook existed, False if not.
+ """
+
+ try:
+ self.hooks[event].remove(hook)
+ return True
+ except ValueError:
+ return False
+
+
+class Request(RequestHooksMixin):
+ """A user-created :class:`Request <Request>` object.
+
+ Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
+
+ :param method: HTTP method to use.
+ :param url: URL to send.
+ :param headers: dictionary of headers to send.
+ :param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary or
list of tuples ``[(key, value)]`` is provided, form-encoding will
take place.
@@ -213,140 +213,140 @@ class Request(RequestHooksMixin):
:param params: URL parameters to append to the URL. If a dictionary or
list of tuples ``[(key, value)]`` is provided, form-encoding will
take place.
- :param auth: Auth handler or (user, pass) tuple.
- :param cookies: dictionary or CookieJar of cookies to attach to this request.
- :param hooks: dictionary of callback hooks, for internal usage.
-
- Usage::
-
- >>> import requests
+ :param auth: Auth handler or (user, pass) tuple.
+ :param cookies: dictionary or CookieJar of cookies to attach to this request.
+ :param hooks: dictionary of callback hooks, for internal usage.
+
+ Usage::
+
+ >>> import requests
>>> req = requests.Request('GET', 'https://httpbin.org/get')
- >>> req.prepare()
- <PreparedRequest [GET]>
+ >>> req.prepare()
+ <PreparedRequest [GET]>
"""
-
- def __init__(self,
+
+ def __init__(self,
method=None, url=None, headers=None, files=None, data=None,
params=None, auth=None, cookies=None, hooks=None, json=None):
-
- # Default empty dicts for dict params.
- data = [] if data is None else data
- files = [] if files is None else files
- headers = {} if headers is None else headers
- params = {} if params is None else params
- hooks = {} if hooks is None else hooks
-
- self.hooks = default_hooks()
- for (k, v) in list(hooks.items()):
- self.register_hook(event=k, hook=v)
-
- self.method = method
- self.url = url
- self.headers = headers
- self.files = files
- self.data = data
- self.json = json
- self.params = params
- self.auth = auth
- self.cookies = cookies
-
- def __repr__(self):
- return '<Request [%s]>' % (self.method)
-
- def prepare(self):
- """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
- p = PreparedRequest()
- p.prepare(
- method=self.method,
- url=self.url,
- headers=self.headers,
- files=self.files,
- data=self.data,
- json=self.json,
- params=self.params,
- auth=self.auth,
- cookies=self.cookies,
- hooks=self.hooks,
- )
- return p
-
-
-class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
- """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
- containing the exact bytes that will be sent to the server.
-
+
+ # Default empty dicts for dict params.
+ data = [] if data is None else data
+ files = [] if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+
+ self.hooks = default_hooks()
+ for (k, v) in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
+
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.files = files
+ self.data = data
+ self.json = json
+ self.params = params
+ self.auth = auth
+ self.cookies = cookies
+
+ def __repr__(self):
+ return '<Request [%s]>' % (self.method)
+
+ def prepare(self):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
+ p = PreparedRequest()
+ p.prepare(
+ method=self.method,
+ url=self.url,
+ headers=self.headers,
+ files=self.files,
+ data=self.data,
+ json=self.json,
+ params=self.params,
+ auth=self.auth,
+ cookies=self.cookies,
+ hooks=self.hooks,
+ )
+ return p
+
+
+class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
+ """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
+ containing the exact bytes that will be sent to the server.
+
Instances are generated from a :class:`Request <Request>` object, and
should not be instantiated manually; doing so may produce undesirable
effects.
-
- Usage::
-
- >>> import requests
+
+ Usage::
+
+ >>> import requests
>>> req = requests.Request('GET', 'https://httpbin.org/get')
- >>> r = req.prepare()
+ >>> r = req.prepare()
>>> r
- <PreparedRequest [GET]>
-
- >>> s = requests.Session()
- >>> s.send(r)
- <Response [200]>
- """
-
- def __init__(self):
- #: HTTP verb to send to the server.
- self.method = None
- #: HTTP URL to send the request to.
- self.url = None
- #: dictionary of HTTP headers.
- self.headers = None
- # The `CookieJar` used to create the Cookie header will be stored here
- # after prepare_cookies is called
- self._cookies = None
- #: request body to send to the server.
- self.body = None
- #: dictionary of callback hooks, for internal usage.
- self.hooks = default_hooks()
+ <PreparedRequest [GET]>
+
+ >>> s = requests.Session()
+ >>> s.send(r)
+ <Response [200]>
+ """
+
+ def __init__(self):
+ #: HTTP verb to send to the server.
+ self.method = None
+ #: HTTP URL to send the request to.
+ self.url = None
+ #: dictionary of HTTP headers.
+ self.headers = None
+ # The `CookieJar` used to create the Cookie header will be stored here
+ # after prepare_cookies is called
+ self._cookies = None
+ #: request body to send to the server.
+ self.body = None
+ #: dictionary of callback hooks, for internal usage.
+ self.hooks = default_hooks()
#: integer denoting starting position of a readable file-like body.
self._body_position = None
-
+
def prepare(self,
method=None, url=None, headers=None, files=None, data=None,
params=None, auth=None, cookies=None, hooks=None, json=None):
- """Prepares the entire request with the given parameters."""
-
- self.prepare_method(method)
- self.prepare_url(url, params)
- self.prepare_headers(headers)
- self.prepare_cookies(cookies)
- self.prepare_body(data, files, json)
- self.prepare_auth(auth, url)
-
- # Note that prepare_auth must be last to enable authentication schemes
- # such as OAuth to work on a fully prepared request.
-
- # This MUST go after prepare_auth. Authenticators could add a hook
- self.prepare_hooks(hooks)
-
- def __repr__(self):
- return '<PreparedRequest [%s]>' % (self.method)
-
- def copy(self):
- p = PreparedRequest()
- p.method = self.method
- p.url = self.url
- p.headers = self.headers.copy() if self.headers is not None else None
+ """Prepares the entire request with the given parameters."""
+
+ self.prepare_method(method)
+ self.prepare_url(url, params)
+ self.prepare_headers(headers)
+ self.prepare_cookies(cookies)
+ self.prepare_body(data, files, json)
+ self.prepare_auth(auth, url)
+
+ # Note that prepare_auth must be last to enable authentication schemes
+ # such as OAuth to work on a fully prepared request.
+
+ # This MUST go after prepare_auth. Authenticators could add a hook
+ self.prepare_hooks(hooks)
+
+ def __repr__(self):
+ return '<PreparedRequest [%s]>' % (self.method)
+
+ def copy(self):
+ p = PreparedRequest()
+ p.method = self.method
+ p.url = self.url
+ p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
- p.body = self.body
- p.hooks = self.hooks
+ p.body = self.body
+ p.hooks = self.hooks
p._body_position = self._body_position
- return p
-
- def prepare_method(self, method):
- """Prepares the given HTTP method."""
- self.method = method
- if self.method is not None:
+ return p
+
+ def prepare_method(self, method):
+ """Prepares the given HTTP method."""
+ self.method = method
+ if self.method is not None:
self.method = to_native_string(self.method.upper())
-
+
@staticmethod
def _get_idna_encoded_host(host):
import idna
@@ -357,43 +357,43 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise UnicodeError
return host
- def prepare_url(self, url, params):
- """Prepares the given HTTP URL."""
- #: Accept objects that have string representations.
+ def prepare_url(self, url, params):
+ """Prepares the given HTTP URL."""
+ #: Accept objects that have string representations.
#: We're unable to blindly call unicode/str functions
- #: as this will include the bytestring indicator (b'')
- #: on python 3.x.
+ #: as this will include the bytestring indicator (b'')
+ #: on python 3.x.
#: https://github.com/psf/requests/pull/2238
- if isinstance(url, bytes):
- url = url.decode('utf8')
- else:
- url = unicode(url) if is_py2 else str(url)
-
+ if isinstance(url, bytes):
+ url = url.decode('utf8')
+ else:
+ url = unicode(url) if is_py2 else str(url)
+
# Remove leading whitespaces from url
url = url.lstrip()
- # Don't do any URL preparation for non-HTTP schemes like `mailto`,
- # `data` etc to work around exceptions from `url_parse`, which
- # handles RFC 3986 only.
- if ':' in url and not url.lower().startswith('http'):
- self.url = url
- return
-
- # Support for unicode domain names and paths.
- try:
- scheme, auth, host, port, path, query, fragment = parse_url(url)
- except LocationParseError as e:
- raise InvalidURL(*e.args)
-
- if not scheme:
+ # Don't do any URL preparation for non-HTTP schemes like `mailto`,
+ # `data` etc to work around exceptions from `url_parse`, which
+ # handles RFC 3986 only.
+ if ':' in url and not url.lower().startswith('http'):
+ self.url = url
+ return
+
+ # Support for unicode domain names and paths.
+ try:
+ scheme, auth, host, port, path, query, fragment = parse_url(url)
+ except LocationParseError as e:
+ raise InvalidURL(*e.args)
+
+ if not scheme:
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
-
+
raise MissingSchema(error)
- if not host:
- raise InvalidURL("Invalid URL %r: No host supplied" % url)
-
+ if not host:
+ raise InvalidURL("Invalid URL %r: No host supplied" % url)
+
# In general, we want to try IDNA encoding the hostname if the string contains
# non-ASCII characters. This allows users to automatically get the correct IDNA
# behaviour. For strings containing only ASCII characters, we need to also verify
@@ -404,70 +404,70 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
elif host.startswith((u'*', u'.')):
- raise InvalidURL('URL has an invalid label.')
-
- # Carefully reconstruct the network location
- netloc = auth or ''
- if netloc:
- netloc += '@'
- netloc += host
- if port:
- netloc += ':' + str(port)
-
- # Bare domains aren't valid URLs.
- if not path:
- path = '/'
-
- if is_py2:
- if isinstance(scheme, str):
- scheme = scheme.encode('utf-8')
- if isinstance(netloc, str):
- netloc = netloc.encode('utf-8')
- if isinstance(path, str):
- path = path.encode('utf-8')
- if isinstance(query, str):
- query = query.encode('utf-8')
- if isinstance(fragment, str):
- fragment = fragment.encode('utf-8')
-
+ raise InvalidURL('URL has an invalid label.')
+
+ # Carefully reconstruct the network location
+ netloc = auth or ''
+ if netloc:
+ netloc += '@'
+ netloc += host
+ if port:
+ netloc += ':' + str(port)
+
+ # Bare domains aren't valid URLs.
+ if not path:
+ path = '/'
+
+ if is_py2:
+ if isinstance(scheme, str):
+ scheme = scheme.encode('utf-8')
+ if isinstance(netloc, str):
+ netloc = netloc.encode('utf-8')
+ if isinstance(path, str):
+ path = path.encode('utf-8')
+ if isinstance(query, str):
+ query = query.encode('utf-8')
+ if isinstance(fragment, str):
+ fragment = fragment.encode('utf-8')
+
if isinstance(params, (str, bytes)):
params = to_native_string(params)
- enc_params = self._encode_params(params)
- if enc_params:
- if query:
- query = '%s&%s' % (query, enc_params)
- else:
- query = enc_params
-
- url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
- self.url = url
-
- def prepare_headers(self, headers):
- """Prepares the given HTTP headers."""
-
+ enc_params = self._encode_params(params)
+ if enc_params:
+ if query:
+ query = '%s&%s' % (query, enc_params)
+ else:
+ query = enc_params
+
+ url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
+ self.url = url
+
+ def prepare_headers(self, headers):
+ """Prepares the given HTTP headers."""
+
self.headers = CaseInsensitiveDict()
- if headers:
+ if headers:
for header in headers.items():
# Raise exception on invalid header value.
check_header_validity(header)
name, value = header
self.headers[to_native_string(name)] = value
-
- def prepare_body(self, data, files, json=None):
- """Prepares the given HTTP body data."""
-
- # Check if file, fo, generator, iterator.
- # If not, run through normal process.
-
- # Nottin' on you.
- body = None
- content_type = None
-
+
+ def prepare_body(self, data, files, json=None):
+ """Prepares the given HTTP body data."""
+
+ # Check if file, fo, generator, iterator.
+ # If not, run through normal process.
+
+ # Nottin' on you.
+ body = None
+ content_type = None
+
if not data and json is not None:
# urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string.
- content_type = 'application/json'
+ content_type = 'application/json'
try:
body = complexjson.dumps(json, allow_nan=False)
@@ -476,20 +476,20 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if not isinstance(body, bytes):
body = body.encode('utf-8')
-
- is_stream = all([
- hasattr(data, '__iter__'),
+
+ is_stream = all([
+ hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, Mapping))
- ])
-
+ ])
+
if is_stream:
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
-
- body = data
-
+
+ body = data
+
if getattr(body, 'tell', None) is not None:
# Record the current file position before reading.
# This will allow us to rewind a file in the event
@@ -501,34 +501,34 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# a failed `tell()` later when trying to rewind the body
self._body_position = object()
- if files:
- raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
-
+ if files:
+ raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
+
if length:
- self.headers['Content-Length'] = builtin_str(length)
- else:
- self.headers['Transfer-Encoding'] = 'chunked'
- else:
- # Multi-part file uploads.
- if files:
- (body, content_type) = self._encode_files(files, data)
- else:
+ self.headers['Content-Length'] = builtin_str(length)
+ else:
+ self.headers['Transfer-Encoding'] = 'chunked'
+ else:
+ # Multi-part file uploads.
+ if files:
+ (body, content_type) = self._encode_files(files, data)
+ else:
if data:
- body = self._encode_params(data)
- if isinstance(data, basestring) or hasattr(data, 'read'):
- content_type = None
- else:
- content_type = 'application/x-www-form-urlencoded'
-
- self.prepare_content_length(body)
-
- # Add content-type if it wasn't explicitly provided.
- if content_type and ('content-type' not in self.headers):
- self.headers['Content-Type'] = content_type
-
- self.body = body
-
- def prepare_content_length(self, body):
+ body = self._encode_params(data)
+ if isinstance(data, basestring) or hasattr(data, 'read'):
+ content_type = None
+ else:
+ content_type = 'application/x-www-form-urlencoded'
+
+ self.prepare_content_length(body)
+
+ # Add content-type if it wasn't explicitly provided.
+ if content_type and ('content-type' not in self.headers):
+ self.headers['Content-Type'] = content_type
+
+ self.body = body
+
+ def prepare_content_length(self, body):
"""Prepare Content-Length header based on request method and body"""
if body is not None:
length = super_len(body)
@@ -539,33 +539,33 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
# Set Content-Length to 0 for methods that can have a body
# but don't provide one. (i.e. not GET or HEAD)
- self.headers['Content-Length'] = '0'
-
- def prepare_auth(self, auth, url=''):
- """Prepares the given HTTP auth data."""
-
- # If no Auth is explicitly provided, extract it from the URL first.
- if auth is None:
- url_auth = get_auth_from_url(self.url)
- auth = url_auth if any(url_auth) else None
-
- if auth:
- if isinstance(auth, tuple) and len(auth) == 2:
- # special-case basic HTTP auth
- auth = HTTPBasicAuth(*auth)
-
- # Allow auth to make its changes.
- r = auth(self)
-
- # Update self to reflect the auth changes.
- self.__dict__.update(r.__dict__)
-
- # Recompute Content-Length
- self.prepare_content_length(self.body)
-
- def prepare_cookies(self, cookies):
+ self.headers['Content-Length'] = '0'
+
+ def prepare_auth(self, auth, url=''):
+ """Prepares the given HTTP auth data."""
+
+ # If no Auth is explicitly provided, extract it from the URL first.
+ if auth is None:
+ url_auth = get_auth_from_url(self.url)
+ auth = url_auth if any(url_auth) else None
+
+ if auth:
+ if isinstance(auth, tuple) and len(auth) == 2:
+ # special-case basic HTTP auth
+ auth = HTTPBasicAuth(*auth)
+
+ # Allow auth to make its changes.
+ r = auth(self)
+
+ # Update self to reflect the auth changes.
+ self.__dict__.update(r.__dict__)
+
+ # Recompute Content-Length
+ self.prepare_content_length(self.body)
+
+ def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
-
+
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
@@ -574,108 +574,108 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand.
"""
- if isinstance(cookies, cookielib.CookieJar):
- self._cookies = cookies
- else:
- self._cookies = cookiejar_from_dict(cookies)
-
- cookie_header = get_cookie_header(self._cookies, self)
- if cookie_header is not None:
- self.headers['Cookie'] = cookie_header
-
- def prepare_hooks(self, hooks):
- """Prepares the given hooks."""
+ if isinstance(cookies, cookielib.CookieJar):
+ self._cookies = cookies
+ else:
+ self._cookies = cookiejar_from_dict(cookies)
+
+ cookie_header = get_cookie_header(self._cookies, self)
+ if cookie_header is not None:
+ self.headers['Cookie'] = cookie_header
+
+ def prepare_hooks(self, hooks):
+ """Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
- for event in hooks:
- self.register_hook(event, hooks[event])
-
-
-class Response(object):
- """The :class:`Response <Response>` object, which contains a
- server's response to an HTTP request.
- """
-
- __attrs__ = [
+ for event in hooks:
+ self.register_hook(event, hooks[event])
+
+
+class Response(object):
+ """The :class:`Response <Response>` object, which contains a
+ server's response to an HTTP request.
+ """
+
+ __attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
- ]
-
- def __init__(self):
- self._content = False
- self._content_consumed = False
+ ]
+
+ def __init__(self):
+ self._content = False
+ self._content_consumed = False
self._next = None
-
- #: Integer Code of responded HTTP Status, e.g. 404 or 200.
- self.status_code = None
-
- #: Case-insensitive Dictionary of Response Headers.
- #: For example, ``headers['content-encoding']`` will return the
- #: value of a ``'Content-Encoding'`` response header.
- self.headers = CaseInsensitiveDict()
-
- #: File-like object representation of response (for advanced usage).
- #: Use of ``raw`` requires that ``stream=True`` be set on the request.
+
+ #: Integer Code of responded HTTP Status, e.g. 404 or 200.
+ self.status_code = None
+
+ #: Case-insensitive Dictionary of Response Headers.
+ #: For example, ``headers['content-encoding']`` will return the
+ #: value of a ``'Content-Encoding'`` response header.
+ self.headers = CaseInsensitiveDict()
+
+ #: File-like object representation of response (for advanced usage).
+ #: Use of ``raw`` requires that ``stream=True`` be set on the request.
#: This requirement does not apply for use internally to Requests.
- self.raw = None
-
- #: Final URL location of Response.
- self.url = None
-
- #: Encoding to decode with when accessing r.text.
- self.encoding = None
-
- #: A list of :class:`Response <Response>` objects from
- #: the history of the Request. Any redirect responses will end
- #: up here. The list is sorted from the oldest to the most recent request.
- self.history = []
-
- #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
- self.reason = None
-
- #: A CookieJar of Cookies the server sent back.
- self.cookies = cookiejar_from_dict({})
-
- #: The amount of time elapsed between sending the request
+ self.raw = None
+
+ #: Final URL location of Response.
+ self.url = None
+
+ #: Encoding to decode with when accessing r.text.
+ self.encoding = None
+
+ #: A list of :class:`Response <Response>` objects from
+ #: the history of the Request. Any redirect responses will end
+ #: up here. The list is sorted from the oldest to the most recent request.
+ self.history = []
+
+ #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
+ self.reason = None
+
+ #: A CookieJar of Cookies the server sent back.
+ self.cookies = cookiejar_from_dict({})
+
+ #: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
- self.elapsed = datetime.timedelta(0)
-
- #: The :class:`PreparedRequest <PreparedRequest>` object to which this
- #: is a response.
- self.request = None
-
+ self.elapsed = datetime.timedelta(0)
+
+ #: The :class:`PreparedRequest <PreparedRequest>` object to which this
+ #: is a response.
+ self.request = None
+
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
- def __getstate__(self):
- # Consume everything; accessing the content attribute makes
- # sure the content has been fully read.
- if not self._content_consumed:
- self.content
-
+ def __getstate__(self):
+ # Consume everything; accessing the content attribute makes
+ # sure the content has been fully read.
+ if not self._content_consumed:
+ self.content
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
-
- def __setstate__(self, state):
- for name, value in state.items():
- setattr(self, name, value)
-
- # pickled objects do not have .raw
- setattr(self, '_content_consumed', True)
- setattr(self, 'raw', None)
-
- def __repr__(self):
- return '<Response [%s]>' % (self.status_code)
-
- def __bool__(self):
+
+ def __setstate__(self, state):
+ for name, value in state.items():
+ setattr(self, name, value)
+
+ # pickled objects do not have .raw
+ setattr(self, '_content_consumed', True)
+ setattr(self, 'raw', None)
+
+ def __repr__(self):
+ return '<Response [%s]>' % (self.status_code)
+
+ def __bool__(self):
"""Returns True if :attr:`status_code` is less than 400.
This attribute checks if the status code of the response is between
@@ -683,9 +683,9 @@ class Response(object):
the status code, is between 200 and 400, this will return True. This
is **not** a check to see if the response code is ``200 OK``.
"""
- return self.ok
-
- def __nonzero__(self):
+ return self.ok
+
+ def __nonzero__(self):
"""Returns True if :attr:`status_code` is less than 400.
This attribute checks if the status code of the response is between
@@ -693,14 +693,14 @@ class Response(object):
the status code, is between 200 and 400, this will return True. This
is **not** a check to see if the response code is ``200 OK``.
"""
- return self.ok
-
- def __iter__(self):
- """Allows you to use a response as an iterator."""
- return self.iter_content(128)
-
- @property
- def ok(self):
+ return self.ok
+
+ def __iter__(self):
+ """Allows you to use a response as an iterator."""
+ return self.iter_content(128)
+
+ @property
+ def ok(self):
"""Returns True if :attr:`status_code` is less than 400, False if not.
This attribute checks if the status code of the response is between
@@ -708,204 +708,204 @@ class Response(object):
the status code is between 200 and 400, this will return True. This
is **not** a check to see if the response code is ``200 OK``.
"""
- try:
- self.raise_for_status()
- except HTTPError:
- return False
- return True
-
- @property
- def is_redirect(self):
- """True if this Response is a well-formed HTTP redirect that could have
- been processed automatically (by :meth:`Session.resolve_redirects`).
- """
- return ('location' in self.headers and self.status_code in REDIRECT_STATI)
-
- @property
- def is_permanent_redirect(self):
+ try:
+ self.raise_for_status()
+ except HTTPError:
+ return False
+ return True
+
+ @property
+ def is_redirect(self):
+ """True if this Response is a well-formed HTTP redirect that could have
+ been processed automatically (by :meth:`Session.resolve_redirects`).
+ """
+ return ('location' in self.headers and self.status_code in REDIRECT_STATI)
+
+ @property
+ def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect."""
- return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
-
- @property
+ return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
+
+ @property
def next(self):
"""Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
return self._next
@property
- def apparent_encoding(self):
+ def apparent_encoding(self):
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
- return chardet.detect(self.content)['encoding']
-
- def iter_content(self, chunk_size=1, decode_unicode=False):
- """Iterates over the response data. When stream=True is set on the
- request, this avoids reading the content at once into memory for
- large responses. The chunk size is the number of bytes it should
- read into memory. This is not necessarily the length of each item
- returned as decoding can take place.
-
+ return chardet.detect(self.content)['encoding']
+
+ def iter_content(self, chunk_size=1, decode_unicode=False):
+ """Iterates over the response data. When stream=True is set on the
+ request, this avoids reading the content at once into memory for
+ large responses. The chunk size is the number of bytes it should
+ read into memory. This is not necessarily the length of each item
+ returned as decoding can take place.
+
chunk_size must be of type int or None. A value of None will
function differently depending on the value of `stream`.
stream=True will read data as it arrives in whatever size the
chunks are received. If stream=False, data is returned as
a single chunk.
- If decode_unicode is True, content will be decoded using the best
- available encoding based on the response.
- """
+ If decode_unicode is True, content will be decoded using the best
+ available encoding based on the response.
+ """
- def generate():
+ def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
- try:
- for chunk in self.raw.stream(chunk_size, decode_content=True):
- yield chunk
- except ProtocolError as e:
- raise ChunkedEncodingError(e)
- except DecodeError as e:
- raise ContentDecodingError(e)
- except ReadTimeoutError as e:
- raise ConnectionError(e)
+ try:
+ for chunk in self.raw.stream(chunk_size, decode_content=True):
+ yield chunk
+ except ProtocolError as e:
+ raise ChunkedEncodingError(e)
+ except DecodeError as e:
+ raise ContentDecodingError(e)
+ except ReadTimeoutError as e:
+ raise ConnectionError(e)
else:
- # Standard file-like object.
- while True:
- chunk = self.raw.read(chunk_size)
- if not chunk:
- break
- yield chunk
-
- self._content_consumed = True
-
- if self._content_consumed and isinstance(self._content, bool):
- raise StreamConsumedError()
+ # Standard file-like object.
+ while True:
+ chunk = self.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ self._content_consumed = True
+
+ if self._content_consumed and isinstance(self._content, bool):
+ raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
- # simulate reading small chunks of the content
- reused_chunks = iter_slices(self._content, chunk_size)
-
- stream_chunks = generate()
-
- chunks = reused_chunks if self._content_consumed else stream_chunks
-
- if decode_unicode:
- chunks = stream_decode_response_unicode(chunks, self)
-
- return chunks
-
+ # simulate reading small chunks of the content
+ reused_chunks = iter_slices(self._content, chunk_size)
+
+ stream_chunks = generate()
+
+ chunks = reused_chunks if self._content_consumed else stream_chunks
+
+ if decode_unicode:
+ chunks = stream_decode_response_unicode(chunks, self)
+
+ return chunks
+
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
- """Iterates over the response data, one line at a time. When
- stream=True is set on the request, this avoids reading the
- content at once into memory for large responses.
+ """Iterates over the response data, one line at a time. When
+ stream=True is set on the request, this avoids reading the
+ content at once into memory for large responses.
.. note:: This method is not reentrant safe.
- """
-
- pending = None
-
- for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
-
- if pending is not None:
- chunk = pending + chunk
-
- if delimiter:
- lines = chunk.split(delimiter)
- else:
- lines = chunk.splitlines()
-
- if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
- pending = lines.pop()
- else:
- pending = None
-
- for line in lines:
- yield line
-
- if pending is not None:
- yield pending
-
- @property
- def content(self):
- """Content of the response, in bytes."""
-
- if self._content is False:
- # Read the contents.
+ """
+
+ pending = None
+
+ for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
+
+ if pending is not None:
+ chunk = pending + chunk
+
+ if delimiter:
+ lines = chunk.split(delimiter)
+ else:
+ lines = chunk.splitlines()
+
+ if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
+ pending = lines.pop()
+ else:
+ pending = None
+
+ for line in lines:
+ yield line
+
+ if pending is not None:
+ yield pending
+
+ @property
+ def content(self):
+ """Content of the response, in bytes."""
+
+ if self._content is False:
+ # Read the contents.
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
-
+
if self.status_code == 0 or self.raw is None:
- self._content = None
+ self._content = None
else:
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
-
- self._content_consumed = True
- # don't need to release the connection; that's been handled by urllib3
- # since we exhausted the data.
- return self._content
-
- @property
- def text(self):
- """Content of the response, in unicode.
-
- If Response.encoding is None, encoding will be guessed using
+
+ self._content_consumed = True
+ # don't need to release the connection; that's been handled by urllib3
+ # since we exhausted the data.
+ return self._content
+
+ @property
+ def text(self):
+ """Content of the response, in unicode.
+
+ If Response.encoding is None, encoding will be guessed using
``charset_normalizer`` or ``chardet``.
-
- The encoding of the response content is determined based solely on HTTP
- headers, following RFC 2616 to the letter. If you can take advantage of
- non-HTTP knowledge to make a better guess at the encoding, you should
- set ``r.encoding`` appropriately before accessing this property.
- """
-
- # Try charset from content-type
- content = None
- encoding = self.encoding
-
- if not self.content:
- return str('')
-
- # Fallback to auto-detected encoding.
- if self.encoding is None:
- encoding = self.apparent_encoding
-
- # Decode unicode from given encoding.
- try:
- content = str(self.content, encoding, errors='replace')
- except (LookupError, TypeError):
- # A LookupError is raised if the encoding was not found which could
- # indicate a misspelling or similar mistake.
- #
- # A TypeError can be raised if encoding is None
- #
- # So we try blindly encoding.
- content = str(self.content, errors='replace')
-
- return content
-
- def json(self, **kwargs):
+
+ The encoding of the response content is determined based solely on HTTP
+ headers, following RFC 2616 to the letter. If you can take advantage of
+ non-HTTP knowledge to make a better guess at the encoding, you should
+ set ``r.encoding`` appropriately before accessing this property.
+ """
+
+ # Try charset from content-type
+ content = None
+ encoding = self.encoding
+
+ if not self.content:
+ return str('')
+
+ # Fallback to auto-detected encoding.
+ if self.encoding is None:
+ encoding = self.apparent_encoding
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(self.content, encoding, errors='replace')
+ except (LookupError, TypeError):
+ # A LookupError is raised if the encoding was not found which could
+ # indicate a misspelling or similar mistake.
+ #
+ # A TypeError can be raised if encoding is None
+ #
+ # So we try blindly encoding.
+ content = str(self.content, errors='replace')
+
+ return content
+
+ def json(self, **kwargs):
r"""Returns the json-encoded content of a response, if any.
-
- :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
+
+ :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
:raises requests.exceptions.JSONDecodeError: If the response body does not
contain valid json.
- """
-
+ """
+
if not self.encoding and self.content and len(self.content) > 3:
- # No encoding set. JSON RFC 4627 section 3 states we should expect
- # UTF-8, -16 or -32. Detect which one to use; If the detection or
+ # No encoding set. JSON RFC 4627 section 3 states we should expect
+ # UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using charset_normalizer to make
- # a best guess).
- encoding = guess_json_utf(self.content)
- if encoding is not None:
- try:
+ # a best guess).
+ encoding = guess_json_utf(self.content)
+ if encoding is not None:
+ try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
- except UnicodeDecodeError:
- # Wrong UTF codec detected; usually because it's not UTF-8
- # but some other 8-bit codec. This is an RFC violation,
- # and the server didn't bother to tell us what codec *was*
- # used.
- pass
-
+ except UnicodeDecodeError:
+ # Wrong UTF codec detected; usually because it's not UTF-8
+ # but some other 8-bit codec. This is an RFC violation,
+ # and the server didn't bother to tell us what codec *was*
+ # used.
+ pass
+
try:
return complexjson.loads(self.text, **kwargs)
except JSONDecodeError as e:
@@ -916,28 +916,28 @@ class Response(object):
else:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
- @property
- def links(self):
- """Returns the parsed header links of the response, if any."""
-
- header = self.headers.get('link')
-
- # l = MultiDict()
- l = {}
-
- if header:
- links = parse_header_links(header)
-
- for link in links:
- key = link.get('rel') or link.get('url')
- l[key] = link
-
- return l
-
- def raise_for_status(self):
+ @property
+ def links(self):
+ """Returns the parsed header links of the response, if any."""
+
+ header = self.headers.get('link')
+
+ # l = MultiDict()
+ l = {}
+
+ if header:
+ links = parse_header_links(header)
+
+ for link in links:
+ key = link.get('rel') or link.get('url')
+ l[key] = link
+
+ return l
+
+ def raise_for_status(self):
"""Raises :class:`HTTPError`, if one occurred."""
-
- http_error_msg = ''
+
+ http_error_msg = ''
if isinstance(self.reason, bytes):
# We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string
@@ -949,22 +949,22 @@ class Response(object):
reason = self.reason.decode('iso-8859-1')
else:
reason = self.reason
-
- if 400 <= self.status_code < 500:
+
+ if 400 <= self.status_code < 500:
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
-
- elif 500 <= self.status_code < 600:
+
+ elif 500 <= self.status_code < 600:
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
-
- if http_error_msg:
- raise HTTPError(http_error_msg, response=self)
-
- def close(self):
- """Releases the connection back to the pool. Once this method has been
- called the underlying ``raw`` object must not be accessed again.
-
- *Note: Should not normally need to be called explicitly.*
- """
+
+ if http_error_msg:
+ raise HTTPError(http_error_msg, response=self)
+
+ def close(self):
+ """Releases the connection back to the pool. Once this method has been
+ called the underlying ``raw`` object must not be accessed again.
+
+ *Note: Should not normally need to be called explicitly.*
+ """
if not self._content_consumed:
self.raw.close()
diff --git a/contrib/python/requests/requests/sessions.py b/contrib/python/requests/requests/sessions.py
index 41e5aa9de0..3f59cab922 100644
--- a/contrib/python/requests/requests/sessions.py
+++ b/contrib/python/requests/requests/sessions.py
@@ -1,42 +1,42 @@
-# -*- coding: utf-8 -*-
-
-"""
+# -*- coding: utf-8 -*-
+
+"""
requests.sessions
~~~~~~~~~~~~~~~~~
-
-This module provides a Session object to manage and persist settings across
-requests (cookies, auth, proxies).
-"""
-import os
+
+This module provides a Session object to manage and persist settings across
+requests (cookies, auth, proxies).
+"""
+import os
import sys
import time
from datetime import timedelta
from collections import OrderedDict
-
-from .auth import _basic_auth_str
+
+from .auth import _basic_auth_str
from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
-from .cookies import (
- cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
-from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
-from .hooks import default_hooks, dispatch_hook
+from .cookies import (
+ cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
+from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
+from .hooks import default_hooks, dispatch_hook
from ._internal_utils import to_native_string
from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
-from .exceptions import (
- TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
-
-from .structures import CaseInsensitiveDict
-from .adapters import HTTPAdapter
-
-from .utils import (
- requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
+from .exceptions import (
+ TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
+
+from .structures import CaseInsensitiveDict
+from .adapters import HTTPAdapter
+
+from .utils import (
+ requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
get_auth_from_url, rewind_body, resolve_proxies
-)
-
-from .status_codes import codes
-
-# formerly defined here, reexposed here for backward compatibility
-from .models import REDIRECT_STATI
-
+)
+
+from .status_codes import codes
+
+# formerly defined here, reexposed here for backward compatibility
+from .models import REDIRECT_STATI
+
# Preferred clock, based on which one is more accurate on a given system.
if sys.platform == 'win32':
try: # Python 3.4+
@@ -45,55 +45,55 @@ if sys.platform == 'win32':
preferred_clock = time.clock
else:
preferred_clock = time.time
-
-
-def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
+
+
+def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""Determines appropriate setting for a given request, taking into account
the explicit setting on that request, and the setting in the session. If a
- setting is a dictionary, they will be merged together using `dict_class`
- """
-
- if session_setting is None:
- return request_setting
-
- if request_setting is None:
- return session_setting
-
- # Bypass if not a dictionary (e.g. verify)
- if not (
- isinstance(session_setting, Mapping) and
- isinstance(request_setting, Mapping)
- ):
- return request_setting
-
- merged_setting = dict_class(to_key_val_list(session_setting))
- merged_setting.update(to_key_val_list(request_setting))
-
+ setting is a dictionary, they will be merged together using `dict_class`
+ """
+
+ if session_setting is None:
+ return request_setting
+
+ if request_setting is None:
+ return session_setting
+
+ # Bypass if not a dictionary (e.g. verify)
+ if not (
+ isinstance(session_setting, Mapping) and
+ isinstance(request_setting, Mapping)
+ ):
+ return request_setting
+
+ merged_setting = dict_class(to_key_val_list(session_setting))
+ merged_setting.update(to_key_val_list(request_setting))
+
# Remove keys that are set to None. Extract keys first to avoid altering
# the dictionary during iteration.
none_keys = [k for (k, v) in merged_setting.items() if v is None]
for key in none_keys:
del merged_setting[key]
-
- return merged_setting
-
-
-def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
+
+ return merged_setting
+
+
+def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
"""Properly merges both requests and session hooks.
-
- This is necessary because when request_hooks == {'response': []}, the
- merge breaks Session hooks entirely.
- """
- if session_hooks is None or session_hooks.get('response') == []:
- return request_hooks
-
- if request_hooks is None or request_hooks.get('response') == []:
- return session_hooks
-
- return merge_setting(request_hooks, session_hooks, dict_class)
-
-
-class SessionRedirectMixin(object):
+
+ This is necessary because when request_hooks == {'response': []}, the
+ merge breaks Session hooks entirely.
+ """
+ if session_hooks is None or session_hooks.get('response') == []:
+ return request_hooks
+
+ if request_hooks is None or request_hooks.get('response') == []:
+ return session_hooks
+
+ return merge_setting(request_hooks, session_hooks, dict_class)
+
+
+class SessionRedirectMixin(object):
def get_redirect_target(self, resp):
"""Receives a Response. Returns a redirect URI or ``None``"""
@@ -141,80 +141,80 @@ class SessionRedirectMixin(object):
# Standard case: root URI must match
return changed_port or changed_scheme
- def resolve_redirects(self, resp, req, stream=False, timeout=None,
+ def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses or Requests."""
-
+
hist = [] # keep track of history
-
+
url = self.get_redirect_target(resp)
previous_fragment = urlparse(req.url).fragment
while url:
- prepared_request = req.copy()
-
+ prepared_request = req.copy()
+
# Update history and keep track of redirects.
# resp.history must ignore the original request in this loop
hist.append(resp)
resp.history = hist[1:]
-
- try:
- resp.content # Consume socket so it can be released
- except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
- resp.raw.read(decode_content=False)
-
+
+ try:
+ resp.content # Consume socket so it can be released
+ except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
+ resp.raw.read(decode_content=False)
+
if len(resp.history) >= self.max_redirects:
raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)
-
- # Release the connection back into the pool.
- resp.close()
-
- # Handle redirection without scheme (see: RFC 1808 Section 4)
- if url.startswith('//'):
- parsed_rurl = urlparse(resp.url)
+
+ # Release the connection back into the pool.
+ resp.close()
+
+ # Handle redirection without scheme (see: RFC 1808 Section 4)
+ if url.startswith('//'):
+ parsed_rurl = urlparse(resp.url)
url = ':'.join([to_native_string(parsed_rurl.scheme), url])
-
+
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
- parsed = urlparse(url)
+ parsed = urlparse(url)
if parsed.fragment == '' and previous_fragment:
parsed = parsed._replace(fragment=previous_fragment)
elif parsed.fragment:
previous_fragment = parsed.fragment
- url = parsed.geturl()
-
- # Facilitate relative 'location' headers, as allowed by RFC 7231.
- # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
- # Compliant with RFC3986, we percent encode the url.
- if not parsed.netloc:
- url = urljoin(resp.url, requote_uri(url))
- else:
- url = requote_uri(url)
-
- prepared_request.url = to_native_string(url)
-
+ url = parsed.geturl()
+
+ # Facilitate relative 'location' headers, as allowed by RFC 7231.
+ # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
+ # Compliant with RFC3986, we percent encode the url.
+ if not parsed.netloc:
+ url = urljoin(resp.url, requote_uri(url))
+ else:
+ url = requote_uri(url)
+
+ prepared_request.url = to_native_string(url)
+
self.rebuild_method(prepared_request, resp)
-
+
# https://github.com/psf/requests/issues/1084
- if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
+ if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
# https://github.com/psf/requests/issues/3490
purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
for header in purged_headers:
prepared_request.headers.pop(header, None)
- prepared_request.body = None
-
- headers = prepared_request.headers
+ prepared_request.body = None
+
+ headers = prepared_request.headers
headers.pop('Cookie', None)
-
+
# Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
# request, use the old one that we haven't yet touched.
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
merge_cookies(prepared_request._cookies, self.cookies)
- prepared_request.prepare_cookies(prepared_request._cookies)
-
- # Rebuild auth and proxy information.
- proxies = self.rebuild_proxies(prepared_request, proxies)
- self.rebuild_auth(prepared_request, resp)
-
+ prepared_request.prepare_cookies(prepared_request._cookies)
+
+ # Rebuild auth and proxy information.
+ proxies = self.rebuild_proxies(prepared_request, proxies)
+ self.rebuild_auth(prepared_request, resp)
+
# A failed tell() sets `_body_position` to `object()`. This non-None
# value ensures `rewindable` will be True, allowing us to raise an
# UnrewindableBodyError, instead of hanging the connection.
@@ -227,13 +227,13 @@ class SessionRedirectMixin(object):
if rewindable:
rewind_body(prepared_request)
- # Override the original request.
- req = prepared_request
-
+ # Override the original request.
+ req = prepared_request
+
if yield_requests:
yield req
else:
-
+
resp = self.send(
req,
stream=stream,
@@ -244,66 +244,66 @@ class SessionRedirectMixin(object):
allow_redirects=False,
**adapter_kwargs
)
-
+
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
-
+
# extract redirect url, if any, for the next loop
url = self.get_redirect_target(resp)
yield resp
- def rebuild_auth(self, prepared_request, response):
+ def rebuild_auth(self, prepared_request, response):
"""When being redirected we may want to strip authentication from the
- request to avoid leaking credentials. This method intelligently removes
- and reapplies authentication where possible to avoid credential loss.
- """
- headers = prepared_request.headers
- url = prepared_request.url
-
+ request to avoid leaking credentials. This method intelligently removes
+ and reapplies authentication where possible to avoid credential loss.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+
if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
- # If we get redirected to a new host, we should strip out any
+ # If we get redirected to a new host, we should strip out any
# authentication headers.
del headers['Authorization']
-
- # .netrc might have more auth for us on our new host.
- new_auth = get_netrc_auth(url) if self.trust_env else None
- if new_auth is not None:
- prepared_request.prepare_auth(new_auth)
-
- def rebuild_proxies(self, prepared_request, proxies):
+
+ # .netrc might have more auth for us on our new host.
+ new_auth = get_netrc_auth(url) if self.trust_env else None
+ if new_auth is not None:
+ prepared_request.prepare_auth(new_auth)
+
+ def rebuild_proxies(self, prepared_request, proxies):
"""This method re-evaluates the proxy configuration by considering the
- environment variables. If we are redirected to a URL covered by
- NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
- proxy keys for this URL (in case they were stripped by a previous
- redirect).
-
- This method also replaces the Proxy-Authorization header where
- necessary.
+ environment variables. If we are redirected to a URL covered by
+ NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
+ proxy keys for this URL (in case they were stripped by a previous
+ redirect).
+
+ This method also replaces the Proxy-Authorization header where
+ necessary.
:rtype: dict
- """
- headers = prepared_request.headers
+ """
+ headers = prepared_request.headers
scheme = urlparse(prepared_request.url).scheme
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
-
- if 'Proxy-Authorization' in headers:
- del headers['Proxy-Authorization']
-
- try:
- username, password = get_auth_from_url(new_proxies[scheme])
- except KeyError:
- username, password = None, None
-
- if username and password:
- headers['Proxy-Authorization'] = _basic_auth_str(username, password)
-
- return new_proxies
-
+
+ if 'Proxy-Authorization' in headers:
+ del headers['Proxy-Authorization']
+
+ try:
+ username, password = get_auth_from_url(new_proxies[scheme])
+ except KeyError:
+ username, password = None, None
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username, password)
+
+ return new_proxies
+
def rebuild_method(self, prepared_request, response):
"""When being redirected we may want to change the method of the request
based on certain specs or browser behavior.
"""
method = prepared_request.method
-
+
# https://tools.ietf.org/html/rfc7231#section-6.4.4
if response.status_code == codes.see_other and method != 'HEAD':
method = 'GET'
@@ -321,15 +321,15 @@ class SessionRedirectMixin(object):
prepared_request.method = method
-class Session(SessionRedirectMixin):
- """A Requests session.
-
- Provides cookie persistence, connection-pooling, and configuration.
-
- Basic Usage::
-
- >>> import requests
- >>> s = requests.Session()
+class Session(SessionRedirectMixin):
+ """A Requests session.
+
+ Provides cookie persistence, connection-pooling, and configuration.
+
+ Basic Usage::
+
+ >>> import requests
+ >>> s = requests.Session()
>>> s.get('https://httpbin.org/get')
<Response [200]>
@@ -338,42 +338,42 @@ class Session(SessionRedirectMixin):
>>> with requests.Session() as s:
... s.get('https://httpbin.org/get')
<Response [200]>
- """
-
- __attrs__ = [
- 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
+ """
+
+ __attrs__ = [
+ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
'cert', 'adapters', 'stream', 'trust_env',
- 'max_redirects',
- ]
-
- def __init__(self):
-
- #: A case-insensitive dictionary of headers to be sent on each
- #: :class:`Request <Request>` sent from this
- #: :class:`Session <Session>`.
- self.headers = default_headers()
-
- #: Default Authentication tuple or object to attach to
- #: :class:`Request <Request>`.
- self.auth = None
-
+ 'max_redirects',
+ ]
+
+ def __init__(self):
+
+ #: A case-insensitive dictionary of headers to be sent on each
+ #: :class:`Request <Request>` sent from this
+ #: :class:`Session <Session>`.
+ self.headers = default_headers()
+
+ #: Default Authentication tuple or object to attach to
+ #: :class:`Request <Request>`.
+ self.auth = None
+
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
#: be used on each :class:`Request <Request>`.
- self.proxies = {}
-
- #: Event-handling hooks.
- self.hooks = default_hooks()
-
- #: Dictionary of querystring data to attach to each
- #: :class:`Request <Request>`. The dictionary values may be lists for
- #: representing multivalued query parameters.
- self.params = {}
-
- #: Stream response content default.
- self.stream = False
-
- #: SSL Verification default.
+ self.proxies = {}
+
+ #: Event-handling hooks.
+ self.hooks = default_hooks()
+
+ #: Dictionary of querystring data to attach to each
+ #: :class:`Request <Request>`. The dictionary values may be lists for
+ #: representing multivalued query parameters.
+ self.params = {}
+
+ #: Stream response content default.
+ self.stream = False
+
+ #: SSL Verification default.
#: Defaults to `True`, requiring requests to verify the TLS certificate at the
#: remote end.
#: If verify is set to `False`, requests will accept any TLS certificate
@@ -381,112 +381,112 @@ class Session(SessionRedirectMixin):
#: expired certificates, which will make your application vulnerable to
#: man-in-the-middle (MitM) attacks.
#: Only set this to `False` for testing.
- self.verify = True
-
+ self.verify = True
+
#: SSL client certificate default, if String, path to ssl client
#: cert file (.pem). If Tuple, ('cert', 'key') pair.
- self.cert = None
-
- #: Maximum number of redirects allowed. If the request exceeds this
- #: limit, a :class:`TooManyRedirects` exception is raised.
+ self.cert = None
+
+ #: Maximum number of redirects allowed. If the request exceeds this
+ #: limit, a :class:`TooManyRedirects` exception is raised.
#: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is
#: 30.
- self.max_redirects = DEFAULT_REDIRECT_LIMIT
-
+ self.max_redirects = DEFAULT_REDIRECT_LIMIT
+
#: Trust environment settings for proxy configuration, default
#: authentication and similar.
- self.trust_env = True
-
- #: A CookieJar containing all currently outstanding cookies set on this
- #: session. By default it is a
- #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
- #: may be any other ``cookielib.CookieJar`` compatible object.
- self.cookies = cookiejar_from_dict({})
-
- # Default connection adapters.
- self.adapters = OrderedDict()
- self.mount('https://', HTTPAdapter())
- self.mount('http://', HTTPAdapter())
-
- def __enter__(self):
- return self
-
- def __exit__(self, *args):
- self.close()
-
- def prepare_request(self, request):
- """Constructs a :class:`PreparedRequest <PreparedRequest>` for
- transmission and returns it. The :class:`PreparedRequest` has settings
- merged from the :class:`Request <Request>` instance and those of the
- :class:`Session`.
-
- :param request: :class:`Request` instance to prepare with this
- session's settings.
+ self.trust_env = True
+
+ #: A CookieJar containing all currently outstanding cookies set on this
+ #: session. By default it is a
+ #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
+ #: may be any other ``cookielib.CookieJar`` compatible object.
+ self.cookies = cookiejar_from_dict({})
+
+ # Default connection adapters.
+ self.adapters = OrderedDict()
+ self.mount('https://', HTTPAdapter())
+ self.mount('http://', HTTPAdapter())
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ def prepare_request(self, request):
+ """Constructs a :class:`PreparedRequest <PreparedRequest>` for
+ transmission and returns it. The :class:`PreparedRequest` has settings
+ merged from the :class:`Request <Request>` instance and those of the
+ :class:`Session`.
+
+ :param request: :class:`Request` instance to prepare with this
+ session's settings.
:rtype: requests.PreparedRequest
- """
- cookies = request.cookies or {}
-
- # Bootstrap CookieJar.
- if not isinstance(cookies, cookielib.CookieJar):
- cookies = cookiejar_from_dict(cookies)
-
- # Merge with session cookies
- merged_cookies = merge_cookies(
- merge_cookies(RequestsCookieJar(), self.cookies), cookies)
-
- # Set environment's basic authentication if not explicitly set.
- auth = request.auth
- if self.trust_env and not auth and not self.auth:
- auth = get_netrc_auth(request.url)
-
- p = PreparedRequest()
- p.prepare(
- method=request.method.upper(),
- url=request.url,
- files=request.files,
- data=request.data,
- json=request.json,
- headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
- params=merge_setting(request.params, self.params),
- auth=merge_setting(auth, self.auth),
- cookies=merged_cookies,
- hooks=merge_hooks(request.hooks, self.hooks),
- )
- return p
-
- def request(self, method, url,
+ """
+ cookies = request.cookies or {}
+
+ # Bootstrap CookieJar.
+ if not isinstance(cookies, cookielib.CookieJar):
+ cookies = cookiejar_from_dict(cookies)
+
+ # Merge with session cookies
+ merged_cookies = merge_cookies(
+ merge_cookies(RequestsCookieJar(), self.cookies), cookies)
+
+ # Set environment's basic authentication if not explicitly set.
+ auth = request.auth
+ if self.trust_env and not auth and not self.auth:
+ auth = get_netrc_auth(request.url)
+
+ p = PreparedRequest()
+ p.prepare(
+ method=request.method.upper(),
+ url=request.url,
+ files=request.files,
+ data=request.data,
+ json=request.json,
+ headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
+ params=merge_setting(request.params, self.params),
+ auth=merge_setting(auth, self.auth),
+ cookies=merged_cookies,
+ hooks=merge_hooks(request.hooks, self.hooks),
+ )
+ return p
+
+ def request(self, method, url,
params=None, data=None, headers=None, cookies=None, files=None,
auth=None, timeout=None, allow_redirects=True, proxies=None,
hooks=None, stream=None, verify=None, cert=None, json=None):
- """Constructs a :class:`Request <Request>`, prepares it and sends it.
- Returns :class:`Response <Response>` object.
-
- :param method: method for the new :class:`Request` object.
- :param url: URL for the new :class:`Request` object.
- :param params: (optional) Dictionary or bytes to be sent in the query
- string for the :class:`Request`.
+ """Constructs a :class:`Request <Request>`, prepares it and sends it.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query
+ string for the :class:`Request`.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
- :param json: (optional) json to send in the body of the
- :class:`Request`.
- :param headers: (optional) Dictionary of HTTP Headers to send with the
- :class:`Request`.
- :param cookies: (optional) Dict or CookieJar object to send with the
- :class:`Request`.
- :param files: (optional) Dictionary of ``'filename': file-like-objects``
- for multipart encoding upload.
- :param auth: (optional) Auth tuple or callable to enable
- Basic/Digest/Custom HTTP Auth.
- :param timeout: (optional) How long to wait for the server to send
+ :param json: (optional) json to send in the body of the
+ :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the
+ :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the
+ :class:`Request`.
+ :param files: (optional) Dictionary of ``'filename': file-like-objects``
+ for multipart encoding upload.
+ :param auth: (optional) Auth tuple or callable to enable
+ Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
- :type timeout: float or tuple
- :param allow_redirects: (optional) Set to True by default.
- :type allow_redirects: bool
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Set to True by default.
+ :type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol or protocol and
hostname to the URL of the proxy.
- :param stream: (optional) whether to immediately download the response
- content. Defaults to ``False``.
+ :param stream: (optional) whether to immediately download the response
+ content. Defaults to ``False``.
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use. Defaults to ``True``. When set to
@@ -495,12 +495,12 @@ class Session(SessionRedirectMixin):
certificates, which will make your application vulnerable to
man-in-the-middle (MitM) attacks. Setting verify to ``False``
may be useful during local development or testing.
- :param cert: (optional) if String, path to ssl client cert file (.pem).
- If Tuple, ('cert', 'key') pair.
+ :param cert: (optional) if String, path to ssl client cert file (.pem).
+ If Tuple, ('cert', 'key') pair.
:rtype: requests.Response
- """
- # Create the Request.
- req = Request(
+ """
+ # Create the Request.
+ req = Request(
method=method.upper(),
url=url,
headers=headers,
@@ -511,171 +511,171 @@ class Session(SessionRedirectMixin):
auth=auth,
cookies=cookies,
hooks=hooks,
- )
- prep = self.prepare_request(req)
-
- proxies = proxies or {}
-
- settings = self.merge_environment_settings(
- prep.url, proxies, stream, verify, cert
- )
-
- # Send the request.
- send_kwargs = {
- 'timeout': timeout,
- 'allow_redirects': allow_redirects,
- }
- send_kwargs.update(settings)
- resp = self.send(prep, **send_kwargs)
-
- return resp
-
- def get(self, url, **kwargs):
+ )
+ prep = self.prepare_request(req)
+
+ proxies = proxies or {}
+
+ settings = self.merge_environment_settings(
+ prep.url, proxies, stream, verify, cert
+ )
+
+ # Send the request.
+ send_kwargs = {
+ 'timeout': timeout,
+ 'allow_redirects': allow_redirects,
+ }
+ send_kwargs.update(settings)
+ resp = self.send(prep, **send_kwargs)
+
+ return resp
+
+ def get(self, url, **kwargs):
r"""Sends a GET request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
- kwargs.setdefault('allow_redirects', True)
- return self.request('GET', url, **kwargs)
-
- def options(self, url, **kwargs):
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('GET', url, **kwargs)
+
+ def options(self, url, **kwargs):
r"""Sends a OPTIONS request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
- kwargs.setdefault('allow_redirects', True)
- return self.request('OPTIONS', url, **kwargs)
-
- def head(self, url, **kwargs):
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('OPTIONS', url, **kwargs)
+
+ def head(self, url, **kwargs):
r"""Sends a HEAD request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
- kwargs.setdefault('allow_redirects', False)
- return self.request('HEAD', url, **kwargs)
-
- def post(self, url, data=None, json=None, **kwargs):
+ """
+
+ kwargs.setdefault('allow_redirects', False)
+ return self.request('HEAD', url, **kwargs)
+
+ def post(self, url, data=None, json=None, **kwargs):
r"""Sends a POST request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
- :param json: (optional) json to send in the body of the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param json: (optional) json to send in the body of the :class:`Request`.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
- return self.request('POST', url, data=data, json=json, **kwargs)
-
- def put(self, url, data=None, **kwargs):
+ """
+
+ return self.request('POST', url, data=data, json=json, **kwargs)
+
+ def put(self, url, data=None, **kwargs):
r"""Sends a PUT request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
- return self.request('PUT', url, data=data, **kwargs)
-
- def patch(self, url, data=None, **kwargs):
+ """
+
+ return self.request('PUT', url, data=data, **kwargs)
+
+ def patch(self, url, data=None, **kwargs):
r"""Sends a PATCH request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
+
+ :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
+ """
+
return self.request('PATCH', url, data=data, **kwargs)
-
- def delete(self, url, **kwargs):
+
+ def delete(self, url, **kwargs):
r"""Sends a DELETE request. Returns :class:`Response` object.
-
- :param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes.
+
+ :param url: URL for the new :class:`Request` object.
+ :param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
- """
-
- return self.request('DELETE', url, **kwargs)
-
- def send(self, request, **kwargs):
+ """
+
+ return self.request('DELETE', url, **kwargs)
+
+ def send(self, request, **kwargs):
"""Send a given PreparedRequest.
:rtype: requests.Response
"""
- # Set defaults that the hooks can utilize to ensure they always have
- # the correct parameters to reproduce the previous request.
- kwargs.setdefault('stream', self.stream)
- kwargs.setdefault('verify', self.verify)
- kwargs.setdefault('cert', self.cert)
+ # Set defaults that the hooks can utilize to ensure they always have
+ # the correct parameters to reproduce the previous request.
+ kwargs.setdefault('stream', self.stream)
+ kwargs.setdefault('verify', self.verify)
+ kwargs.setdefault('cert', self.cert)
if 'proxies' not in kwargs:
kwargs['proxies'] = resolve_proxies(
request, self.proxies, self.trust_env
)
-
- # It's possible that users might accidentally send a Request object.
- # Guard against that specific failure case.
+
+ # It's possible that users might accidentally send a Request object.
+ # Guard against that specific failure case.
if isinstance(request, Request):
- raise ValueError('You can only send PreparedRequests.')
-
- # Set up variables needed for resolve_redirects and dispatching of hooks
- allow_redirects = kwargs.pop('allow_redirects', True)
- stream = kwargs.get('stream')
- hooks = request.hooks
-
- # Get the appropriate adapter to use
- adapter = self.get_adapter(url=request.url)
-
- # Start time (approximately) of the request
+ raise ValueError('You can only send PreparedRequests.')
+
+ # Set up variables needed for resolve_redirects and dispatching of hooks
+ allow_redirects = kwargs.pop('allow_redirects', True)
+ stream = kwargs.get('stream')
+ hooks = request.hooks
+
+ # Get the appropriate adapter to use
+ adapter = self.get_adapter(url=request.url)
+
+ # Start time (approximately) of the request
start = preferred_clock()
-
- # Send the request
- r = adapter.send(request, **kwargs)
-
- # Total elapsed time of the request (approximately)
+
+ # Send the request
+ r = adapter.send(request, **kwargs)
+
+ # Total elapsed time of the request (approximately)
elapsed = preferred_clock() - start
r.elapsed = timedelta(seconds=elapsed)
-
- # Response manipulation hooks
- r = dispatch_hook('response', hooks, r, **kwargs)
-
- # Persist cookies
- if r.history:
-
- # If the hooks create history then we want those cookies too
- for resp in r.history:
- extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
-
- extract_cookies_to_jar(self.cookies, request, r.raw)
-
- # Resolve redirects if allowed.
+
+ # Response manipulation hooks
+ r = dispatch_hook('response', hooks, r, **kwargs)
+
+ # Persist cookies
+ if r.history:
+
+ # If the hooks create history then we want those cookies too
+ for resp in r.history:
+ extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
+
+ extract_cookies_to_jar(self.cookies, request, r.raw)
+
+ # Resolve redirects if allowed.
if allow_redirects:
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, **kwargs)
history = [resp for resp in gen]
else:
history = []
-
- # Shuffle things around if there's history.
- if history:
- # Insert the first (original) request at the start
- history.insert(0, r)
- # Get the last request made
- r = history.pop()
- r.history = history
-
+
+ # Shuffle things around if there's history.
+ if history:
+ # Insert the first (original) request at the start
+ history.insert(0, r)
+ # Get the last request made
+ r = history.pop()
+ r.history = history
+
# If redirects aren't being followed, store the response on the Request for Response.next().
if not allow_redirects:
try:
@@ -683,83 +683,83 @@ class Session(SessionRedirectMixin):
except StopIteration:
pass
- if not stream:
- r.content
-
- return r
-
- def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ if not stream:
+ r.content
+
+ return r
+
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
"""
Check the environment and merge it with some settings.
:rtype: dict
"""
- # Gather clues from the surrounding environment.
- if self.trust_env:
- # Set environment's proxies.
+ # Gather clues from the surrounding environment.
+ if self.trust_env:
+ # Set environment's proxies.
no_proxy = proxies.get('no_proxy') if proxies is not None else None
env_proxies = get_environ_proxies(url, no_proxy=no_proxy)
- for (k, v) in env_proxies.items():
- proxies.setdefault(k, v)
-
- # Look for requests environment configuration and be compatible
- # with cURL.
- if verify is True or verify is None:
- verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
- os.environ.get('CURL_CA_BUNDLE'))
-
- # Merge all the kwargs.
- proxies = merge_setting(proxies, self.proxies)
- stream = merge_setting(stream, self.stream)
- verify = merge_setting(verify, self.verify)
- cert = merge_setting(cert, self.cert)
-
- return {'verify': verify, 'proxies': proxies, 'stream': stream,
- 'cert': cert}
-
- def get_adapter(self, url):
+ for (k, v) in env_proxies.items():
+ proxies.setdefault(k, v)
+
+ # Look for requests environment configuration and be compatible
+ # with cURL.
+ if verify is True or verify is None:
+ verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
+ os.environ.get('CURL_CA_BUNDLE'))
+
+ # Merge all the kwargs.
+ proxies = merge_setting(proxies, self.proxies)
+ stream = merge_setting(stream, self.stream)
+ verify = merge_setting(verify, self.verify)
+ cert = merge_setting(cert, self.cert)
+
+ return {'verify': verify, 'proxies': proxies, 'stream': stream,
+ 'cert': cert}
+
+ def get_adapter(self, url):
"""
Returns the appropriate connection adapter for the given URL.
:rtype: requests.adapters.BaseAdapter
"""
- for (prefix, adapter) in self.adapters.items():
-
+ for (prefix, adapter) in self.adapters.items():
+
if url.lower().startswith(prefix.lower()):
- return adapter
-
- # Nothing matches :-/
+ return adapter
+
+ # Nothing matches :-/
raise InvalidSchema("No connection adapters were found for {!r}".format(url))
-
- def close(self):
- """Closes all adapters and as such the session"""
- for v in self.adapters.values():
- v.close()
-
- def mount(self, prefix, adapter):
- """Registers a connection adapter to a prefix.
-
+
+ def close(self):
+ """Closes all adapters and as such the session"""
+ for v in self.adapters.values():
+ v.close()
+
+ def mount(self, prefix, adapter):
+ """Registers a connection adapter to a prefix.
+
Adapters are sorted in descending order by prefix length.
"""
- self.adapters[prefix] = adapter
- keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
-
- for key in keys_to_move:
- self.adapters[key] = self.adapters.pop(key)
-
- def __getstate__(self):
+ self.adapters[prefix] = adapter
+ keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
+
+ for key in keys_to_move:
+ self.adapters[key] = self.adapters.pop(key)
+
+ def __getstate__(self):
state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
- return state
-
- def __setstate__(self, state):
- for attr, value in state.items():
- setattr(self, attr, value)
-
-
-def session():
+ return state
+
+ def __setstate__(self, state):
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+
+def session():
"""
Returns a :class:`Session` for context-management.
-
+
.. deprecated:: 1.0.0
This method has been deprecated since version 1.0.0 and is only kept for
@@ -768,4 +768,4 @@ def session():
:rtype: Session
"""
- return Session()
+ return Session()
diff --git a/contrib/python/requests/requests/status_codes.py b/contrib/python/requests/requests/status_codes.py
index 20a0fa897b..d80a7cd4dd 100644
--- a/contrib/python/requests/requests/status_codes.py
+++ b/contrib/python/requests/requests/status_codes.py
@@ -1,5 +1,5 @@
-# -*- coding: utf-8 -*-
-
+# -*- coding: utf-8 -*-
+
r"""
The ``codes`` object defines a mapping from common names for HTTP statuses
to their numerical codes, accessible either as attributes or as dictionary
@@ -20,90 +20,90 @@ the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
``codes.okay`` all correspond to the HTTP status code 200.
"""
-from .structures import LookupDict
-
-_codes = {
-
- # Informational.
- 100: ('continue',),
- 101: ('switching_protocols',),
- 102: ('processing',),
- 103: ('checkpoint',),
- 122: ('uri_too_long', 'request_uri_too_long'),
- 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
- 201: ('created',),
- 202: ('accepted',),
- 203: ('non_authoritative_info', 'non_authoritative_information'),
- 204: ('no_content',),
- 205: ('reset_content', 'reset'),
- 206: ('partial_content', 'partial'),
- 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
- 208: ('already_reported',),
- 226: ('im_used',),
-
- # Redirection.
- 300: ('multiple_choices',),
- 301: ('moved_permanently', 'moved', '\\o-'),
- 302: ('found',),
- 303: ('see_other', 'other'),
- 304: ('not_modified',),
- 305: ('use_proxy',),
- 306: ('switch_proxy',),
- 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
- 308: ('permanent_redirect',
+from .structures import LookupDict
+
+_codes = {
+
+ # Informational.
+ 100: ('continue',),
+ 101: ('switching_protocols',),
+ 102: ('processing',),
+ 103: ('checkpoint',),
+ 122: ('uri_too_long', 'request_uri_too_long'),
+ 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
+ 201: ('created',),
+ 202: ('accepted',),
+ 203: ('non_authoritative_info', 'non_authoritative_information'),
+ 204: ('no_content',),
+ 205: ('reset_content', 'reset'),
+ 206: ('partial_content', 'partial'),
+ 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
+ 208: ('already_reported',),
+ 226: ('im_used',),
+
+ # Redirection.
+ 300: ('multiple_choices',),
+ 301: ('moved_permanently', 'moved', '\\o-'),
+ 302: ('found',),
+ 303: ('see_other', 'other'),
+ 304: ('not_modified',),
+ 305: ('use_proxy',),
+ 306: ('switch_proxy',),
+ 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
+ 308: ('permanent_redirect',
'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
-
- # Client Error.
- 400: ('bad_request', 'bad'),
- 401: ('unauthorized',),
- 402: ('payment_required', 'payment'),
- 403: ('forbidden',),
- 404: ('not_found', '-o-'),
- 405: ('method_not_allowed', 'not_allowed'),
- 406: ('not_acceptable',),
- 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
- 408: ('request_timeout', 'timeout'),
- 409: ('conflict',),
- 410: ('gone',),
- 411: ('length_required',),
- 412: ('precondition_failed', 'precondition'),
- 413: ('request_entity_too_large',),
- 414: ('request_uri_too_large',),
- 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
- 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
- 417: ('expectation_failed',),
- 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
+
+ # Client Error.
+ 400: ('bad_request', 'bad'),
+ 401: ('unauthorized',),
+ 402: ('payment_required', 'payment'),
+ 403: ('forbidden',),
+ 404: ('not_found', '-o-'),
+ 405: ('method_not_allowed', 'not_allowed'),
+ 406: ('not_acceptable',),
+ 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
+ 408: ('request_timeout', 'timeout'),
+ 409: ('conflict',),
+ 410: ('gone',),
+ 411: ('length_required',),
+ 412: ('precondition_failed', 'precondition'),
+ 413: ('request_entity_too_large',),
+ 414: ('request_uri_too_large',),
+ 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
+ 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
+ 417: ('expectation_failed',),
+ 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
421: ('misdirected_request',),
- 422: ('unprocessable_entity', 'unprocessable'),
- 423: ('locked',),
- 424: ('failed_dependency', 'dependency'),
- 425: ('unordered_collection', 'unordered'),
- 426: ('upgrade_required', 'upgrade'),
- 428: ('precondition_required', 'precondition'),
- 429: ('too_many_requests', 'too_many'),
- 431: ('header_fields_too_large', 'fields_too_large'),
- 444: ('no_response', 'none'),
- 449: ('retry_with', 'retry'),
- 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
- 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
- 499: ('client_closed_request',),
-
- # Server Error.
- 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
- 501: ('not_implemented',),
- 502: ('bad_gateway',),
- 503: ('service_unavailable', 'unavailable'),
- 504: ('gateway_timeout',),
- 505: ('http_version_not_supported', 'http_version'),
- 506: ('variant_also_negotiates',),
- 507: ('insufficient_storage',),
- 509: ('bandwidth_limit_exceeded', 'bandwidth'),
- 510: ('not_extended',),
+ 422: ('unprocessable_entity', 'unprocessable'),
+ 423: ('locked',),
+ 424: ('failed_dependency', 'dependency'),
+ 425: ('unordered_collection', 'unordered'),
+ 426: ('upgrade_required', 'upgrade'),
+ 428: ('precondition_required', 'precondition'),
+ 429: ('too_many_requests', 'too_many'),
+ 431: ('header_fields_too_large', 'fields_too_large'),
+ 444: ('no_response', 'none'),
+ 449: ('retry_with', 'retry'),
+ 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
+ 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
+ 499: ('client_closed_request',),
+
+ # Server Error.
+ 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
+ 501: ('not_implemented',),
+ 502: ('bad_gateway',),
+ 503: ('service_unavailable', 'unavailable'),
+ 504: ('gateway_timeout',),
+ 505: ('http_version_not_supported', 'http_version'),
+ 506: ('variant_also_negotiates',),
+ 507: ('insufficient_storage',),
+ 509: ('bandwidth_limit_exceeded', 'bandwidth'),
+ 510: ('not_extended',),
511: ('network_authentication_required', 'network_auth', 'network_authentication'),
-}
-
-codes = LookupDict(name='status_codes')
-
+}
+
+codes = LookupDict(name='status_codes')
+
def _init():
for code, titles in _codes.items():
for title in titles:
diff --git a/contrib/python/requests/requests/structures.py b/contrib/python/requests/requests/structures.py
index 7f57e39c9f..8ee0ba7a08 100644
--- a/contrib/python/requests/requests/structures.py
+++ b/contrib/python/requests/requests/structures.py
@@ -1,105 +1,105 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.structures
-~~~~~~~~~~~~~~~~~~~
-
-Data structures that power Requests.
-"""
-
+# -*- coding: utf-8 -*-
+
+"""
+requests.structures
+~~~~~~~~~~~~~~~~~~~
+
+Data structures that power Requests.
+"""
+
from collections import OrderedDict
-
+
from .compat import Mapping, MutableMapping
-
+
class CaseInsensitiveDict(MutableMapping):
"""A case-insensitive ``dict``-like object.
-
- Implements all methods and operations of
+
+ Implements all methods and operations of
``MutableMapping`` as well as dict's ``copy``. Also
- provides ``lower_items``.
-
- All keys are expected to be strings. The structure remembers the
- case of the last key to be set, and ``iter(instance)``,
- ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
- will contain case-sensitive keys. However, querying and contains
- testing is case insensitive::
-
- cid = CaseInsensitiveDict()
- cid['Accept'] = 'application/json'
- cid['aCCEPT'] == 'application/json' # True
- list(cid) == ['Accept'] # True
-
- For example, ``headers['content-encoding']`` will return the
- value of a ``'Content-Encoding'`` response header, regardless
- of how the header name was originally stored.
-
- If the constructor, ``.update``, or equality comparison
- operations are given keys that have equal ``.lower()``s, the
- behavior is undefined.
+ provides ``lower_items``.
+
+ All keys are expected to be strings. The structure remembers the
+ case of the last key to be set, and ``iter(instance)``,
+ ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
+ will contain case-sensitive keys. However, querying and contains
+ testing is case insensitive::
+
+ cid = CaseInsensitiveDict()
+ cid['Accept'] = 'application/json'
+ cid['aCCEPT'] == 'application/json' # True
+ list(cid) == ['Accept'] # True
+
+ For example, ``headers['content-encoding']`` will return the
+ value of a ``'Content-Encoding'`` response header, regardless
+ of how the header name was originally stored.
+
+ If the constructor, ``.update``, or equality comparison
+ operations are given keys that have equal ``.lower()``s, the
+ behavior is undefined.
"""
-
- def __init__(self, data=None, **kwargs):
+
+ def __init__(self, data=None, **kwargs):
self._store = OrderedDict()
- if data is None:
- data = {}
- self.update(data, **kwargs)
-
- def __setitem__(self, key, value):
- # Use the lowercased key for lookups, but store the actual
- # key alongside the value.
- self._store[key.lower()] = (key, value)
-
- def __getitem__(self, key):
- return self._store[key.lower()][1]
-
- def __delitem__(self, key):
- del self._store[key.lower()]
-
- def __iter__(self):
- return (casedkey for casedkey, mappedvalue in self._store.values())
-
- def __len__(self):
- return len(self._store)
-
- def lower_items(self):
- """Like iteritems(), but with all lowercase keys."""
- return (
- (lowerkey, keyval[1])
- for (lowerkey, keyval)
- in self._store.items()
- )
-
- def __eq__(self, other):
+ if data is None:
+ data = {}
+ self.update(data, **kwargs)
+
+ def __setitem__(self, key, value):
+ # Use the lowercased key for lookups, but store the actual
+ # key alongside the value.
+ self._store[key.lower()] = (key, value)
+
+ def __getitem__(self, key):
+ return self._store[key.lower()][1]
+
+ def __delitem__(self, key):
+ del self._store[key.lower()]
+
+ def __iter__(self):
+ return (casedkey for casedkey, mappedvalue in self._store.values())
+
+ def __len__(self):
+ return len(self._store)
+
+ def lower_items(self):
+ """Like iteritems(), but with all lowercase keys."""
+ return (
+ (lowerkey, keyval[1])
+ for (lowerkey, keyval)
+ in self._store.items()
+ )
+
+ def __eq__(self, other):
if isinstance(other, Mapping):
- other = CaseInsensitiveDict(other)
- else:
- return NotImplemented
- # Compare insensitively
- return dict(self.lower_items()) == dict(other.lower_items())
-
- # Copy is required
- def copy(self):
- return CaseInsensitiveDict(self._store.values())
-
- def __repr__(self):
- return str(dict(self.items()))
-
-
-class LookupDict(dict):
- """Dictionary lookup object."""
-
- def __init__(self, name=None):
- self.name = name
- super(LookupDict, self).__init__()
-
- def __repr__(self):
- return '<lookup \'%s\'>' % (self.name)
-
- def __getitem__(self, key):
- # We allow fall-through here, so values default to None
-
- return self.__dict__.get(key, None)
-
- def get(self, key, default=None):
- return self.__dict__.get(key, default)
+ other = CaseInsensitiveDict(other)
+ else:
+ return NotImplemented
+ # Compare insensitively
+ return dict(self.lower_items()) == dict(other.lower_items())
+
+ # Copy is required
+ def copy(self):
+ return CaseInsensitiveDict(self._store.values())
+
+ def __repr__(self):
+ return str(dict(self.items()))
+
+
+class LookupDict(dict):
+ """Dictionary lookup object."""
+
+ def __init__(self, name=None):
+ self.name = name
+ super(LookupDict, self).__init__()
+
+ def __repr__(self):
+ return '<lookup \'%s\'>' % (self.name)
+
+ def __getitem__(self, key):
+ # We allow fall-through here, so values default to None
+
+ return self.__dict__.get(key, None)
+
+ def get(self, key, default=None):
+ return self.__dict__.get(key, default)
diff --git a/contrib/python/requests/requests/utils.py b/contrib/python/requests/requests/utils.py
index da1ecab840..611bdbe290 100644
--- a/contrib/python/requests/requests/utils.py
+++ b/contrib/python/requests/requests/utils.py
@@ -1,48 +1,48 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.utils
-~~~~~~~~~~~~~~
-
-This module provides utility functions that are used within Requests
-that are also useful for external consumption.
-"""
-
-import codecs
+# -*- coding: utf-8 -*-
+
+"""
+requests.utils
+~~~~~~~~~~~~~~
+
+This module provides utility functions that are used within Requests
+that are also useful for external consumption.
+"""
+
+import codecs
import contextlib
-import io
-import os
-import re
-import socket
-import struct
+import io
+import os
+import re
+import socket
+import struct
import sys
import tempfile
-import warnings
+import warnings
import zipfile
from collections import OrderedDict
from urllib3.util import make_headers
from urllib3.util import parse_url
-
+
from .__version__ import __version__
-from . import certs
+from . import certs
# to_native_string is unused here, but imported here for backwards compatibility
from ._internal_utils import to_native_string
-from .compat import parse_http_list as _parse_list_header
+from .compat import parse_http_list as _parse_list_header
from .compat import (
quote, urlparse, bytes, str, unquote, getproxies,
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
proxy_bypass_environment, getproxies_environment, Mapping)
from .cookies import cookiejar_from_dict
-from .structures import CaseInsensitiveDict
+from .structures import CaseInsensitiveDict
from .exceptions import (
InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)
-
-NETRC_FILES = ('.netrc', '_netrc')
-
-DEFAULT_CA_BUNDLE_PATH = certs.where()
-
+
+NETRC_FILES = ('.netrc', '_netrc')
+
+DEFAULT_CA_BUNDLE_PATH = certs.where()
+
DEFAULT_PORTS = {'http': 80, 'https': 443}
-
+
# Ensure that ', ' is used to preserve previous delimiter behavior.
DEFAULT_ACCEPT_ENCODING = ", ".join(
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
@@ -103,36 +103,36 @@ if sys.platform == 'win32':
return proxy_bypass_registry(host)
-def dict_to_sequence(d):
- """Returns an internal sequence dictionary update."""
-
- if hasattr(d, 'items'):
- d = d.items()
-
- return d
-
-
-def super_len(o):
+def dict_to_sequence(d):
+ """Returns an internal sequence dictionary update."""
+
+ if hasattr(d, 'items'):
+ d = d.items()
+
+ return d
+
+
+def super_len(o):
total_length = None
current_position = 0
- if hasattr(o, '__len__'):
+ if hasattr(o, '__len__'):
total_length = len(o)
-
+
elif hasattr(o, 'len'):
total_length = o.len
-
+
elif hasattr(o, 'fileno'):
- try:
- fileno = o.fileno()
+ try:
+ fileno = o.fileno()
except (io.UnsupportedOperation, AttributeError):
# AttributeError is a surprising exception, seeing as how we've just checked
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
# `Tarfile.extractfile()`, per issue 5229.
- pass
- else:
+ pass
+ else:
total_length = os.fstat(fileno).st_size
-
+
# Having used fstat to determine the file length, we need to
# confirm that this file was opened up in binary mode.
if 'b' not in o.mode:
@@ -145,7 +145,7 @@ def super_len(o):
"for files in text mode."),
FileModeWarning
)
-
+
if hasattr(o, 'tell'):
try:
current_position = o.tell()
@@ -163,7 +163,7 @@ def super_len(o):
# seek to end of file
o.seek(0, 2)
total_length = o.tell()
-
+
# seek back to current position to support
# partially read file-like objects
o.seek(current_position or 0)
@@ -177,70 +177,70 @@ def super_len(o):
def get_netrc_auth(url, raise_errors=False):
- """Returns the Requests tuple auth for a given url from netrc."""
-
+ """Returns the Requests tuple auth for a given url from netrc."""
+
netrc_file = os.environ.get('NETRC')
if netrc_file is not None:
netrc_locations = (netrc_file,)
else:
netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
- try:
- from netrc import netrc, NetrcParseError
-
- netrc_path = None
-
+ try:
+ from netrc import netrc, NetrcParseError
+
+ netrc_path = None
+
for f in netrc_locations:
- try:
+ try:
loc = os.path.expanduser(f)
- except KeyError:
- # os.path.expanduser can fail when $HOME is undefined and
+ except KeyError:
+ # os.path.expanduser can fail when $HOME is undefined and
# getpwuid fails. See https://bugs.python.org/issue20164 &
# https://github.com/psf/requests/issues/1846
- return
-
- if os.path.exists(loc):
- netrc_path = loc
- break
-
- # Abort early if there isn't one.
- if netrc_path is None:
- return
-
- ri = urlparse(url)
-
+ return
+
+ if os.path.exists(loc):
+ netrc_path = loc
+ break
+
+ # Abort early if there isn't one.
+ if netrc_path is None:
+ return
+
+ ri = urlparse(url)
+
# Strip port numbers from netloc. This weird `if...encode`` dance is
# used for Python 3.2, which doesn't support unicode literals.
splitstr = b':'
if isinstance(url, str):
splitstr = splitstr.decode('ascii')
host = ri.netloc.split(splitstr)[0]
-
- try:
- _netrc = netrc(netrc_path).authenticators(host)
- if _netrc:
- # Return with login / password
- login_i = (0 if _netrc[0] else 1)
- return (_netrc[login_i], _netrc[2])
- except (NetrcParseError, IOError):
- # If there was a parsing error or a permissions issue reading the file,
+
+ try:
+ _netrc = netrc(netrc_path).authenticators(host)
+ if _netrc:
+ # Return with login / password
+ login_i = (0 if _netrc[0] else 1)
+ return (_netrc[login_i], _netrc[2])
+ except (NetrcParseError, IOError):
+ # If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth unless explicitly asked to raise errors.
if raise_errors:
raise
-
+
# App Engine hackiness.
- except (ImportError, AttributeError):
- pass
-
-
-def guess_filename(obj):
- """Tries to guess the filename of the given object."""
- name = getattr(obj, 'name', None)
+ except (ImportError, AttributeError):
+ pass
+
+
+def guess_filename(obj):
+ """Tries to guess the filename of the given object."""
+ name = getattr(obj, 'name', None)
if (name and isinstance(name, basestring) and name[0] != '<' and
name[-1] != '>'):
- return os.path.basename(name)
-
-
+ return os.path.basename(name)
+
+
def extract_zipped_paths(path):
"""Replace nonexistent paths that look like they refer to a member of a zip
archive with the location of an extracted copy of the target, or else
@@ -292,201 +292,201 @@ def atomic_open(filename):
raise
-def from_key_val_list(value):
- """Take an object and test to see if it can be represented as a
- dictionary. Unless it can not be represented as such, return an
- OrderedDict, e.g.,
-
- ::
-
- >>> from_key_val_list([('key', 'val')])
- OrderedDict([('key', 'val')])
- >>> from_key_val_list('string')
+def from_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. Unless it can not be represented as such, return an
+ OrderedDict, e.g.,
+
+ ::
+
+ >>> from_key_val_list([('key', 'val')])
+ OrderedDict([('key', 'val')])
+ >>> from_key_val_list('string')
Traceback (most recent call last):
...
ValueError: cannot encode objects that are not 2-tuples
- >>> from_key_val_list({'key': 'val'})
- OrderedDict([('key', 'val')])
+ >>> from_key_val_list({'key': 'val'})
+ OrderedDict([('key', 'val')])
:rtype: OrderedDict
- """
- if value is None:
- return None
-
- if isinstance(value, (str, bytes, bool, int)):
- raise ValueError('cannot encode objects that are not 2-tuples')
-
- return OrderedDict(value)
-
-
-def to_key_val_list(value):
- """Take an object and test to see if it can be represented as a
- dictionary. If it can be, return a list of tuples, e.g.,
-
- ::
-
- >>> to_key_val_list([('key', 'val')])
- [('key', 'val')]
- >>> to_key_val_list({'key': 'val'})
- [('key', 'val')]
- >>> to_key_val_list('string')
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
+ return OrderedDict(value)
+
+
+def to_key_val_list(value):
+ """Take an object and test to see if it can be represented as a
+ dictionary. If it can be, return a list of tuples, e.g.,
+
+ ::
+
+ >>> to_key_val_list([('key', 'val')])
+ [('key', 'val')]
+ >>> to_key_val_list({'key': 'val'})
+ [('key', 'val')]
+ >>> to_key_val_list('string')
Traceback (most recent call last):
...
ValueError: cannot encode objects that are not 2-tuples
:rtype: list
- """
- if value is None:
- return None
-
- if isinstance(value, (str, bytes, bool, int)):
- raise ValueError('cannot encode objects that are not 2-tuples')
-
+ """
+ if value is None:
+ return None
+
+ if isinstance(value, (str, bytes, bool, int)):
+ raise ValueError('cannot encode objects that are not 2-tuples')
+
if isinstance(value, Mapping):
- value = value.items()
-
- return list(value)
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def parse_list_header(value):
- """Parse lists as described by RFC 2068 Section 2.
-
- In particular, parse comma-separated lists where the elements of
- the list may include quoted-strings. A quoted-string could
- contain a comma. A non-quoted string could have quotes in the
- middle. Quotes are removed automatically after parsing.
-
- It basically works like :func:`parse_set_header` just that items
- may appear multiple times and case sensitivity is preserved.
-
- The return value is a standard :class:`list`:
-
- >>> parse_list_header('token, "quoted value"')
- ['token', 'quoted value']
-
- To create a header from the :class:`list` again, use the
- :func:`dump_header` function.
-
- :param value: a string with a list header.
- :return: :class:`list`
+ value = value.items()
+
+ return list(value)
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_list_header(value):
+ """Parse lists as described by RFC 2068 Section 2.
+
+ In particular, parse comma-separated lists where the elements of
+ the list may include quoted-strings. A quoted-string could
+ contain a comma. A non-quoted string could have quotes in the
+ middle. Quotes are removed automatically after parsing.
+
+ It basically works like :func:`parse_set_header` just that items
+ may appear multiple times and case sensitivity is preserved.
+
+ The return value is a standard :class:`list`:
+
+ >>> parse_list_header('token, "quoted value"')
+ ['token', 'quoted value']
+
+ To create a header from the :class:`list` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a list header.
+ :return: :class:`list`
:rtype: list
- """
- result = []
- for item in _parse_list_header(value):
- if item[:1] == item[-1:] == '"':
- item = unquote_header_value(item[1:-1])
- result.append(item)
- return result
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def parse_dict_header(value):
- """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
- convert them into a python dict:
-
- >>> d = parse_dict_header('foo="is a fish", bar="as well"')
- >>> type(d) is dict
- True
- >>> sorted(d.items())
- [('bar', 'as well'), ('foo', 'is a fish')]
-
- If there is no value for a key it will be `None`:
-
- >>> parse_dict_header('key_without_value')
- {'key_without_value': None}
-
- To create a header from the :class:`dict` again, use the
- :func:`dump_header` function.
-
- :param value: a string with a dict header.
- :return: :class:`dict`
+ """
+ result = []
+ for item in _parse_list_header(value):
+ if item[:1] == item[-1:] == '"':
+ item = unquote_header_value(item[1:-1])
+ result.append(item)
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_dict_header(value):
+ """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+ convert them into a python dict:
+
+ >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+ >>> type(d) is dict
+ True
+ >>> sorted(d.items())
+ [('bar', 'as well'), ('foo', 'is a fish')]
+
+ If there is no value for a key it will be `None`:
+
+ >>> parse_dict_header('key_without_value')
+ {'key_without_value': None}
+
+ To create a header from the :class:`dict` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a dict header.
+ :return: :class:`dict`
:rtype: dict
- """
- result = {}
- for item in _parse_list_header(value):
- if '=' not in item:
- result[item] = None
- continue
- name, value = item.split('=', 1)
- if value[:1] == value[-1:] == '"':
- value = unquote_header_value(value[1:-1])
- result[name] = value
- return result
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def unquote_header_value(value, is_filename=False):
- r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
- This does not use the real unquoting but what browsers are actually
- using for quoting.
-
- :param value: the header value to unquote.
+ """
+ result = {}
+ for item in _parse_list_header(value):
+ if '=' not in item:
+ result[item] = None
+ continue
+ name, value = item.split('=', 1)
+ if value[:1] == value[-1:] == '"':
+ value = unquote_header_value(value[1:-1])
+ result[name] = value
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def unquote_header_value(value, is_filename=False):
+ r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
+ This does not use the real unquoting but what browsers are actually
+ using for quoting.
+
+ :param value: the header value to unquote.
:rtype: str
- """
- if value and value[0] == value[-1] == '"':
- # this is not the real unquoting, but fixing this so that the
- # RFC is met will result in bugs with internet explorer and
- # probably some other browsers as well. IE for example is
- # uploading files with "C:\foo\bar.txt" as filename
- value = value[1:-1]
-
- # if this is a filename and the starting characters look like
- # a UNC path, then just return the value without quotes. Using the
- # replace sequence below on a UNC path has the effect of turning
- # the leading double slash into a single slash and then
- # _fix_ie_filename() doesn't work correctly. See #458.
- if not is_filename or value[:2] != '\\\\':
- return value.replace('\\\\', '\\').replace('\\"', '"')
- return value
-
-
-def dict_from_cookiejar(cj):
- """Returns a key/value dictionary from a CookieJar.
-
- :param cj: CookieJar object to extract cookies from.
+ """
+ if value and value[0] == value[-1] == '"':
+ # this is not the real unquoting, but fixing this so that the
+ # RFC is met will result in bugs with internet explorer and
+ # probably some other browsers as well. IE for example is
+ # uploading files with "C:\foo\bar.txt" as filename
+ value = value[1:-1]
+
+ # if this is a filename and the starting characters look like
+ # a UNC path, then just return the value without quotes. Using the
+ # replace sequence below on a UNC path has the effect of turning
+ # the leading double slash into a single slash and then
+ # _fix_ie_filename() doesn't work correctly. See #458.
+ if not is_filename or value[:2] != '\\\\':
+ return value.replace('\\\\', '\\').replace('\\"', '"')
+ return value
+
+
+def dict_from_cookiejar(cj):
+ """Returns a key/value dictionary from a CookieJar.
+
+ :param cj: CookieJar object to extract cookies from.
:rtype: dict
- """
-
- cookie_dict = {}
-
- for cookie in cj:
- cookie_dict[cookie.name] = cookie.value
-
- return cookie_dict
-
-
-def add_dict_to_cookiejar(cj, cookie_dict):
- """Returns a CookieJar from a key/value dictionary.
-
- :param cj: CookieJar to insert cookies into.
- :param cookie_dict: Dict of key/values to insert into CookieJar.
+ """
+
+ cookie_dict = {}
+
+ for cookie in cj:
+ cookie_dict[cookie.name] = cookie.value
+
+ return cookie_dict
+
+
+def add_dict_to_cookiejar(cj, cookie_dict):
+ """Returns a CookieJar from a key/value dictionary.
+
+ :param cj: CookieJar to insert cookies into.
+ :param cookie_dict: Dict of key/values to insert into CookieJar.
:rtype: CookieJar
- """
-
+ """
+
return cookiejar_from_dict(cookie_dict, cj)
-
-
-def get_encodings_from_content(content):
- """Returns encodings from given content string.
-
- :param content: bytestring to extract encodings from.
- """
- warnings.warn((
- 'In requests 3.0, get_encodings_from_content will be removed. For '
- 'more information, please see the discussion on issue #2266. (This'
- ' warning should only appear once.)'),
- DeprecationWarning)
-
- charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
- pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
- xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
-
- return (charset_re.findall(content) +
- pragma_re.findall(content) +
- xml_re.findall(content))
-
-
+
+
+def get_encodings_from_content(content):
+ """Returns encodings from given content string.
+
+ :param content: bytestring to extract encodings from.
+ """
+ warnings.warn((
+ 'In requests 3.0, get_encodings_from_content will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
+ pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
+ xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
+
+ return (charset_re.findall(content) +
+ pragma_re.findall(content) +
+ xml_re.findall(content))
+
+
def _parse_content_type_header(header):
"""Returns content type and parameters from given header
@@ -512,132 +512,132 @@ def _parse_content_type_header(header):
return content_type, params_dict
-def get_encoding_from_headers(headers):
- """Returns encodings from given HTTP Header Dict.
-
- :param headers: dictionary to extract encoding from.
+def get_encoding_from_headers(headers):
+ """Returns encodings from given HTTP Header Dict.
+
+ :param headers: dictionary to extract encoding from.
:rtype: str
- """
-
- content_type = headers.get('content-type')
-
- if not content_type:
- return None
-
+ """
+
+ content_type = headers.get('content-type')
+
+ if not content_type:
+ return None
+
content_type, params = _parse_content_type_header(content_type)
-
- if 'charset' in params:
- return params['charset'].strip("'\"")
-
- if 'text' in content_type:
- return 'ISO-8859-1'
-
+
+ if 'charset' in params:
+ return params['charset'].strip("'\"")
+
+ if 'text' in content_type:
+ return 'ISO-8859-1'
+
if 'application/json' in content_type:
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
return 'utf-8'
-
-
-def stream_decode_response_unicode(iterator, r):
- """Stream decodes a iterator."""
-
- if r.encoding is None:
- for item in iterator:
- yield item
- return
-
- decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
- for chunk in iterator:
- rv = decoder.decode(chunk)
- if rv:
- yield rv
- rv = decoder.decode(b'', final=True)
- if rv:
- yield rv
-
-
-def iter_slices(string, slice_length):
- """Iterate over slices of a string."""
- pos = 0
+
+
+def stream_decode_response_unicode(iterator, r):
+ """Stream decodes a iterator."""
+
+ if r.encoding is None:
+ for item in iterator:
+ yield item
+ return
+
+ decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
+ for chunk in iterator:
+ rv = decoder.decode(chunk)
+ if rv:
+ yield rv
+ rv = decoder.decode(b'', final=True)
+ if rv:
+ yield rv
+
+
+def iter_slices(string, slice_length):
+ """Iterate over slices of a string."""
+ pos = 0
if slice_length is None or slice_length <= 0:
slice_length = len(string)
- while pos < len(string):
- yield string[pos:pos + slice_length]
- pos += slice_length
-
-
-def get_unicode_from_response(r):
- """Returns the requested content back in unicode.
-
- :param r: Response object to get unicode content from.
-
- Tried:
-
- 1. charset from content-type
- 2. fall back and replace all unicode characters
-
+ while pos < len(string):
+ yield string[pos:pos + slice_length]
+ pos += slice_length
+
+
+def get_unicode_from_response(r):
+ """Returns the requested content back in unicode.
+
+ :param r: Response object to get unicode content from.
+
+ Tried:
+
+ 1. charset from content-type
+ 2. fall back and replace all unicode characters
+
:rtype: str
- """
- warnings.warn((
- 'In requests 3.0, get_unicode_from_response will be removed. For '
- 'more information, please see the discussion on issue #2266. (This'
- ' warning should only appear once.)'),
- DeprecationWarning)
-
- tried_encodings = []
-
- # Try charset from content-type
- encoding = get_encoding_from_headers(r.headers)
-
- if encoding:
- try:
- return str(r.content, encoding)
- except UnicodeError:
- tried_encodings.append(encoding)
-
- # Fall back:
- try:
- return str(r.content, encoding, errors='replace')
- except TypeError:
- return r.content
-
-
-# The unreserved URI characters (RFC 3986)
-UNRESERVED_SET = frozenset(
+ """
+ warnings.warn((
+ 'In requests 3.0, get_unicode_from_response will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
+
+ tried_encodings = []
+
+ # Try charset from content-type
+ encoding = get_encoding_from_headers(r.headers)
+
+ if encoding:
+ try:
+ return str(r.content, encoding)
+ except UnicodeError:
+ tried_encodings.append(encoding)
+
+ # Fall back:
+ try:
+ return str(r.content, encoding, errors='replace')
+ except TypeError:
+ return r.content
+
+
+# The unreserved URI characters (RFC 3986)
+UNRESERVED_SET = frozenset(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~")
-
-
-def unquote_unreserved(uri):
- """Un-escape any percent-escape sequences in a URI that are unreserved
- characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
+
+
+def unquote_unreserved(uri):
+ """Un-escape any percent-escape sequences in a URI that are unreserved
+ characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
:rtype: str
- """
- parts = uri.split('%')
- for i in range(1, len(parts)):
- h = parts[i][0:2]
- if len(h) == 2 and h.isalnum():
- try:
- c = chr(int(h, 16))
- except ValueError:
- raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
-
- if c in UNRESERVED_SET:
- parts[i] = c + parts[i][2:]
- else:
- parts[i] = '%' + parts[i]
- else:
- parts[i] = '%' + parts[i]
- return ''.join(parts)
-
-
-def requote_uri(uri):
- """Re-quote the given URI.
-
- This function passes the given URI through an unquote/quote cycle to
- ensure that it is fully and consistently quoted.
+ """
+ parts = uri.split('%')
+ for i in range(1, len(parts)):
+ h = parts[i][0:2]
+ if len(h) == 2 and h.isalnum():
+ try:
+ c = chr(int(h, 16))
+ except ValueError:
+ raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
+
+ if c in UNRESERVED_SET:
+ parts[i] = c + parts[i][2:]
+ else:
+ parts[i] = '%' + parts[i]
+ else:
+ parts[i] = '%' + parts[i]
+ return ''.join(parts)
+
+
+def requote_uri(uri):
+ """Re-quote the given URI.
+
+ This function passes the given URI through an unquote/quote cycle to
+ ensure that it is fully and consistently quoted.
:rtype: str
- """
+ """
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
try:
@@ -650,69 +650,69 @@ def requote_uri(uri):
# there may be unquoted '%'s in the URI. We need to make sure they're
# properly quoted so they do not cause issues elsewhere.
return quote(uri, safe=safe_without_percent)
-
-
-def address_in_network(ip, net):
+
+
+def address_in_network(ip, net):
"""This function allows you to check if an IP belongs to a network subnet
- Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
- returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
+ Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
+ returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
:rtype: bool
- """
- ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
- netaddr, bits = net.split('/')
- netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
- network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
- return (ipaddr & netmask) == (network & netmask)
-
-
-def dotted_netmask(mask):
+ """
+ ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
+ netaddr, bits = net.split('/')
+ netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
+ network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
+ return (ipaddr & netmask) == (network & netmask)
+
+
+def dotted_netmask(mask):
"""Converts mask from /xx format to xxx.xxx.xxx.xxx
- Example: if mask is 24 function returns 255.255.255.0
+ Example: if mask is 24 function returns 255.255.255.0
:rtype: str
- """
- bits = 0xffffffff ^ (1 << 32 - mask) - 1
- return socket.inet_ntoa(struct.pack('>I', bits))
-
-
-def is_ipv4_address(string_ip):
+ """
+ bits = 0xffffffff ^ (1 << 32 - mask) - 1
+ return socket.inet_ntoa(struct.pack('>I', bits))
+
+
+def is_ipv4_address(string_ip):
"""
:rtype: bool
"""
- try:
- socket.inet_aton(string_ip)
- except socket.error:
- return False
- return True
-
-
-def is_valid_cidr(string_network):
+ try:
+ socket.inet_aton(string_ip)
+ except socket.error:
+ return False
+ return True
+
+
+def is_valid_cidr(string_network):
"""
Very simple check of the cidr format in no_proxy variable.
:rtype: bool
"""
- if string_network.count('/') == 1:
- try:
- mask = int(string_network.split('/')[1])
- except ValueError:
- return False
-
- if mask < 1 or mask > 32:
- return False
-
- try:
- socket.inet_aton(string_network.split('/')[0])
- except socket.error:
- return False
- else:
- return False
- return True
-
-
+ if string_network.count('/') == 1:
+ try:
+ mask = int(string_network.split('/')[1])
+ except ValueError:
+ return False
+
+ if mask < 1 or mask > 32:
+ return False
+
+ try:
+ socket.inet_aton(string_network.split('/')[0])
+ except socket.error:
+ return False
+ else:
+ return False
+ return True
+
+
@contextlib.contextmanager
def set_environ(env_name, value):
"""Set the environment variable 'env_name' to 'value'
@@ -736,65 +736,65 @@ def set_environ(env_name, value):
def should_bypass_proxies(url, no_proxy):
- """
- Returns whether we should bypass proxies or not.
+ """
+ Returns whether we should bypass proxies or not.
:rtype: bool
- """
+ """
# Prioritize lowercase environment variables over uppercase
# to keep a consistent behaviour with other http projects (curl, wget).
- get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
-
- # First check whether no_proxy is defined. If it is, check that the URL
- # we're getting isn't in the no_proxy list.
+ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
+
+ # First check whether no_proxy is defined. If it is, check that the URL
+ # we're getting isn't in the no_proxy list.
no_proxy_arg = no_proxy
if no_proxy is None:
no_proxy = get_proxy('no_proxy')
parsed = urlparse(url)
-
+
if parsed.hostname is None:
# URLs don't always have hostnames, e.g. file:/// urls.
return True
- if no_proxy:
- # We need to check whether we match here. We need to see if we match
+ if no_proxy:
+ # We need to check whether we match here. We need to see if we match
# the end of the hostname, both with and without the port.
no_proxy = (
host for host in no_proxy.replace(' ', '').split(',') if host
)
-
+
if is_ipv4_address(parsed.hostname):
- for proxy_ip in no_proxy:
- if is_valid_cidr(proxy_ip):
+ for proxy_ip in no_proxy:
+ if is_valid_cidr(proxy_ip):
if address_in_network(parsed.hostname, proxy_ip):
- return True
+ return True
elif parsed.hostname == proxy_ip:
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
# matches the IP of the index
return True
- else:
+ else:
host_with_port = parsed.hostname
if parsed.port:
host_with_port += ':{}'.format(parsed.port)
- for host in no_proxy:
+ for host in no_proxy:
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
- # The URL does match something in no_proxy, so we don't want
- # to apply the proxies on this URL.
- return True
-
+ # The URL does match something in no_proxy, so we don't want
+ # to apply the proxies on this URL.
+ return True
+
with set_environ('no_proxy', no_proxy_arg):
# parsed.hostname can be `None` in cases such as a file URI.
try:
bypass = proxy_bypass(parsed.hostname)
except (TypeError, socket.gaierror):
bypass = False
-
- if bypass:
- return True
-
- return False
-
+
+ if bypass:
+ return True
+
+ return False
+
def get_environ_proxies(url, no_proxy=None):
"""
@@ -803,14 +803,14 @@ def get_environ_proxies(url, no_proxy=None):
:rtype: dict
"""
if should_bypass_proxies(url, no_proxy=no_proxy):
- return {}
- else:
- return getproxies()
-
-
+ return {}
+ else:
+ return getproxies()
+
+
def select_proxy(url, proxies):
"""Select a proxy for the url, if applicable.
-
+
:param url: The url being for the request
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
"""
@@ -818,7 +818,7 @@ def select_proxy(url, proxies):
urlparts = urlparse(url)
if urlparts.hostname is None:
return proxies.get(urlparts.scheme, proxies.get('all'))
-
+
proxy_keys = [
urlparts.scheme + '://' + urlparts.hostname,
urlparts.scheme,
@@ -830,10 +830,10 @@ def select_proxy(url, proxies):
if proxy_key in proxies:
proxy = proxies[proxy_key]
break
-
+
return proxy
-
-
+
+
def resolve_proxies(request, proxies, trust_env=True):
"""This method takes proxy information from a request and configuration
input to resolve a mapping of target proxies. This will consider settings
@@ -870,94 +870,94 @@ def default_user_agent(name="python-requests"):
return '%s/%s' % (name, __version__)
-def default_headers():
+def default_headers():
"""
:rtype: requests.structures.CaseInsensitiveDict
"""
- return CaseInsensitiveDict({
- 'User-Agent': default_user_agent(),
+ return CaseInsensitiveDict({
+ 'User-Agent': default_user_agent(),
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
- 'Accept': '*/*',
- 'Connection': 'keep-alive',
- })
-
-
-def parse_header_links(value):
+ 'Accept': '*/*',
+ 'Connection': 'keep-alive',
+ })
+
+
+def parse_header_links(value):
"""Return a list of parsed link headers proxies.
-
- i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
-
+
+ i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
+
:rtype: list
- """
-
- links = []
-
+ """
+
+ links = []
+
replace_chars = ' \'"'
-
+
value = value.strip(replace_chars)
if not value:
return links
for val in re.split(', *<', value):
- try:
+ try:
url, params = val.split(';', 1)
- except ValueError:
- url, params = val, ''
-
+ except ValueError:
+ url, params = val, ''
+
link = {'url': url.strip('<> \'"')}
-
+
for param in params.split(';'):
- try:
+ try:
key, value = param.split('=')
- except ValueError:
- break
-
- link[key.strip(replace_chars)] = value.strip(replace_chars)
-
- links.append(link)
-
- return links
-
-
-# Null bytes; no need to recreate these on each call to guess_json_utf
-_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
-_null2 = _null * 2
-_null3 = _null * 3
-
-
-def guess_json_utf(data):
+ except ValueError:
+ break
+
+ link[key.strip(replace_chars)] = value.strip(replace_chars)
+
+ links.append(link)
+
+ return links
+
+
+# Null bytes; no need to recreate these on each call to guess_json_utf
+_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
+_null2 = _null * 2
+_null3 = _null * 3
+
+
+def guess_json_utf(data):
"""
:rtype: str
"""
- # JSON always starts with two ASCII characters, so detection is as
- # easy as counting the nulls and from their location and count
- # determine the encoding. Also detect a BOM, if present.
- sample = data[:4]
+ # JSON always starts with two ASCII characters, so detection is as
+ # easy as counting the nulls and from their location and count
+ # determine the encoding. Also detect a BOM, if present.
+ sample = data[:4]
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
- return 'utf-32' # BOM included
- if sample[:3] == codecs.BOM_UTF8:
- return 'utf-8-sig' # BOM included, MS style (discouraged)
- if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
- return 'utf-16' # BOM included
- nullcount = sample.count(_null)
- if nullcount == 0:
- return 'utf-8'
- if nullcount == 2:
- if sample[::2] == _null2: # 1st and 3rd are null
- return 'utf-16-be'
- if sample[1::2] == _null2: # 2nd and 4th are null
- return 'utf-16-le'
- # Did not detect 2 valid UTF-16 ascii-range characters
- if nullcount == 3:
- if sample[:3] == _null3:
- return 'utf-32-be'
- if sample[1:] == _null3:
- return 'utf-32-le'
- # Did not detect a valid UTF-32 ascii-range character
- return None
-
-
-def prepend_scheme_if_needed(url, new_scheme):
+ return 'utf-32' # BOM included
+ if sample[:3] == codecs.BOM_UTF8:
+ return 'utf-8-sig' # BOM included, MS style (discouraged)
+ if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
+ return 'utf-16' # BOM included
+ nullcount = sample.count(_null)
+ if nullcount == 0:
+ return 'utf-8'
+ if nullcount == 2:
+ if sample[::2] == _null2: # 1st and 3rd are null
+ return 'utf-16-be'
+ if sample[1::2] == _null2: # 2nd and 4th are null
+ return 'utf-16-le'
+ # Did not detect 2 valid UTF-16 ascii-range characters
+ if nullcount == 3:
+ if sample[:3] == _null3:
+ return 'utf-32-be'
+ if sample[1:] == _null3:
+ return 'utf-32-le'
+ # Did not detect a valid UTF-32 ascii-range character
+ return None
+
+
+def prepend_scheme_if_needed(url, new_scheme):
"""Given a URL that may or may not have a scheme, prepend the given scheme.
Does not replace a present scheme with the one provided as an argument.
@@ -965,15 +965,15 @@ def prepend_scheme_if_needed(url, new_scheme):
"""
parsed = parse_url(url)
scheme, auth, host, port, path, query, fragment = parsed
-
+
# A defect in urlparse determines that there isn't a netloc present in some
# urls. We previously assumed parsing was overly cautious, and swapped the
# netloc and path. Due to a lack of tests on the original defect, this is
# maintained with parse_url for backwards compatibility.
netloc = parsed.netloc
- if not netloc:
- netloc, path = path, netloc
-
+ if not netloc:
+ netloc, path = path, netloc
+
if auth:
# parse_url doesn't provide the netloc with auth
# so we'll add it ourselves.
@@ -982,26 +982,26 @@ def prepend_scheme_if_needed(url, new_scheme):
scheme = new_scheme
if path is None:
path = ''
-
+
return urlunparse((scheme, netloc, path, '', query, fragment))
-
-def get_auth_from_url(url):
- """Given a url with authentication components, extract them into a tuple of
+
+def get_auth_from_url(url):
+ """Given a url with authentication components, extract them into a tuple of
username,password.
:rtype: (str,str)
"""
- parsed = urlparse(url)
-
- try:
- auth = (unquote(parsed.username), unquote(parsed.password))
- except (AttributeError, TypeError):
- auth = ('', '')
-
- return auth
-
-
+ parsed = urlparse(url)
+
+ try:
+ auth = (unquote(parsed.username), unquote(parsed.password))
+ except (AttributeError, TypeError):
+ auth = ('', '')
+
+ return auth
+
+
# Moved outside of function to avoid recompile every call
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
@@ -1013,12 +1013,12 @@ def check_header_validity(header):
header injection.
:param header: tuple, in the format (name, value).
- """
+ """
name, value = header
-
+
if isinstance(value, bytes):
pat = _CLEAN_HEADER_REGEX_BYTE
- else:
+ else:
pat = _CLEAN_HEADER_REGEX_STR
try:
if not pat.match(value):
@@ -1026,23 +1026,23 @@ def check_header_validity(header):
except TypeError:
raise InvalidHeader("Value for header {%s: %s} must be of type str or "
"bytes, not %s" % (name, value, type(value)))
-
-
-def urldefragauth(url):
- """
+
+
+def urldefragauth(url):
+ """
Given a url remove the fragment and the authentication part.
:rtype: str
- """
- scheme, netloc, path, params, query, fragment = urlparse(url)
-
- # see func:`prepend_scheme_if_needed`
- if not netloc:
- netloc, path = path, netloc
-
- netloc = netloc.rsplit('@', 1)[-1]
-
- return urlunparse((scheme, netloc, path, params, query, ''))
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
+ # see func:`prepend_scheme_if_needed`
+ if not netloc:
+ netloc, path = path, netloc
+
+ netloc = netloc.rsplit('@', 1)[-1]
+
+ return urlunparse((scheme, netloc, path, params, query, ''))
def rewind_body(prepared_request):
diff --git a/contrib/python/requests/ya.make b/contrib/python/requests/ya.make
index aceaa1f30f..f971752d75 100644
--- a/contrib/python/requests/ya.make
+++ b/contrib/python/requests/ya.make
@@ -1,11 +1,11 @@
# Generated by devtools/yamaker (pypi).
PY23_LIBRARY()
-
+
OWNER(g:python-contrib)
VERSION(2.27.1)
-
+
LICENSE(Apache-2.0)
PEERDIR(
@@ -26,32 +26,32 @@ ENDIF()
NO_LINT()
-PY_SRCS(
- TOP_LEVEL
+PY_SRCS(
+ TOP_LEVEL
requests/__init__.py
requests/__version__.py
requests/_internal_utils.py
- requests/adapters.py
- requests/api.py
- requests/auth.py
- requests/certs.py
+ requests/adapters.py
+ requests/api.py
+ requests/auth.py
+ requests/certs.py
requests/compat.py
requests/cookies.py
requests/exceptions.py
requests/help.py
- requests/hooks.py
- requests/models.py
+ requests/hooks.py
+ requests/models.py
requests/packages.py
- requests/sessions.py
+ requests/sessions.py
requests/status_codes.py
requests/structures.py
requests/utils.py
-)
-
+)
+
RESOURCE_FILES(
PREFIX contrib/python/requests/
.dist-info/METADATA
.dist-info/top_level.txt
)
-END()
+END()
diff --git a/contrib/python/six/six.py b/contrib/python/six/six.py
index a35fd4e78d..4cba03c75f 100644
--- a/contrib/python/six/six.py
+++ b/contrib/python/six/six.py
@@ -23,7 +23,7 @@
from __future__ import absolute_import
import functools
-import itertools
+import itertools
import operator
import sys
import types
@@ -35,7 +35,7 @@ __version__ = "1.16.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
+PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
@@ -58,7 +58,7 @@ else:
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
-
+
def __len__(self):
return 1 << 31
try:
@@ -95,13 +95,13 @@ class _LazyDescr(object):
def __get__(self, obj, tp):
result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- try:
- # This is a bit ugly, but it avoids running this again by
- # removing this descriptor.
- delattr(obj.__class__, self.name)
- except AttributeError:
- pass
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
return result
@@ -167,14 +167,14 @@ class MovedAttribute(_LazyDescr):
class _SixMetaPathImporter(object):
-
+
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
-
+
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
@@ -243,7 +243,7 @@ _importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
-
+
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
@@ -255,11 +255,11 @@ _moved_attributes = [
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
- MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
- MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
@@ -319,12 +319,12 @@ _moved_attributes = [
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
-# Add windows specific modules.
-if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
-
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
@@ -338,7 +338,7 @@ _importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
-
+
"""Lazy loading of moved objects in six.moves.urllib_parse"""
@@ -380,7 +380,7 @@ _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_pa
class Module_six_moves_urllib_error(_LazyModule):
-
+
"""Lazy loading of moved objects in six.moves.urllib_error"""
@@ -400,7 +400,7 @@ _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.er
class Module_six_moves_urllib_request(_LazyModule):
-
+
"""Lazy loading of moved objects in six.moves.urllib_request"""
@@ -452,7 +452,7 @@ _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.
class Module_six_moves_urllib_response(_LazyModule):
-
+
"""Lazy loading of moved objects in six.moves.urllib_response"""
@@ -473,7 +473,7 @@ _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib
class Module_six_moves_urllib_robotparser(_LazyModule):
-
+
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
@@ -491,7 +491,7 @@ _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.url
class Module_six_moves_urllib(types.ModuleType):
-
+
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
@@ -562,9 +562,9 @@ if PY3:
create_bound_method = types.MethodType
- def create_unbound_method(func, cls):
- return func
-
+ def create_unbound_method(func, cls):
+ return func
+
Iterator = object
else:
def get_unbound_function(unbound):
@@ -573,9 +573,9 @@ else:
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
- def create_unbound_method(func, cls):
- return types.MethodType(func, None, cls)
-
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
class Iterator(object):
def next(self):
@@ -606,31 +606,31 @@ if PY3:
def iterlists(d, **kw):
return iter(d.lists(**kw))
-
- viewkeys = operator.methodcaller("keys")
-
- viewvalues = operator.methodcaller("values")
-
- viewitems = operator.methodcaller("items")
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
- return d.iterkeys(**kw)
+ return d.iterkeys(**kw)
def itervalues(d, **kw):
- return d.itervalues(**kw)
+ return d.itervalues(**kw)
def iteritems(d, **kw):
- return d.iteritems(**kw)
+ return d.iteritems(**kw)
def iterlists(d, **kw):
- return d.iterlists(**kw)
-
- viewkeys = operator.methodcaller("viewkeys")
-
- viewvalues = operator.methodcaller("viewvalues")
-
- viewitems = operator.methodcaller("viewitems")
-
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
@@ -642,13 +642,13 @@ _add_doc(iterlists,
if PY3:
def b(s):
return s.encode("latin-1")
-
+
def u(s):
return s
unichr = chr
- import struct
- int2byte = struct.Struct(">B").pack
- del struct
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
@@ -656,53 +656,53 @@ if PY3:
StringIO = io.StringIO
BytesIO = io.BytesIO
del io
- _assertCountEqual = "assertCountEqual"
- if sys.version_info[1] <= 1:
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
- else:
- _assertRaisesRegex = "assertRaisesRegex"
- _assertRegex = "assertRegex"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
_assertNotRegex = "assertNotRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
-
+
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
-
+
def byte2int(bs):
return ord(bs[0])
-
+
def indexbytes(buf, i):
return ord(buf[i])
- iterbytes = functools.partial(itertools.imap, ord)
+ iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
- _assertCountEqual = "assertItemsEqual"
- _assertRaisesRegex = "assertRaisesRegexp"
- _assertRegex = "assertRegexpMatches"
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
-def assertCountEqual(self, *args, **kwargs):
- return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
- return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
- return getattr(self, _assertRegex)(*args, **kwargs)
-
-
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
def assertNotRegex(self, *args, **kwargs):
return getattr(self, _assertNotRegex)(*args, **kwargs)
@@ -743,17 +743,17 @@ else:
if sys.version_info[:2] > (3,):
- exec_("""def raise_from(value, from_value):
+ exec_("""def raise_from(value, from_value):
try:
raise value from from_value
finally:
value = None
-""")
-else:
- def raise_from(value, from_value):
- raise value
-
-
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
@@ -761,14 +761,14 @@ if print_ is None:
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
-
+
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
+ isinstance(data, unicode) and
+ fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
@@ -809,16 +809,16 @@ if print_ is None:
write(sep)
write(arg)
write(end)
-if sys.version_info[:2] < (3, 3):
- _print = print_
-
- def print_(*args, **kwargs):
- fp = kwargs.get("file", sys.stdout)
- flush = kwargs.pop("flush", False)
- _print(*args, **kwargs)
- if flush and fp is not None:
- fp.flush()
-
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
@@ -852,14 +852,14 @@ if sys.version_info[0:2] < (3, 4):
else:
wraps = functools.wraps
-
+
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
-
+
def __new__(cls, name, this_bases, d):
if sys.version_info[:2] >= (3, 7):
# This version introduced PEP 560 that requires a bit
@@ -894,7 +894,7 @@ def add_metaclass(metaclass):
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
-
+
def ensure_binary(s, encoding='utf-8', errors='strict'):
"""Coerce **s** to six.binary_type.
@@ -955,24 +955,24 @@ def ensure_text(s, encoding='utf-8', errors='strict'):
raise TypeError("not expecting type '%s'" % type(s))
-def python_2_unicode_compatible(klass):
- """
+def python_2_unicode_compatible(klass):
+ """
A class decorator that defines __unicode__ and __str__ methods under Python 2.
- Under Python 3 it does nothing.
-
- To support Python 2 and 3 with a single code base, define a __str__ method
- returning text and apply this decorator to the class.
- """
- if PY2:
- if '__str__' not in klass.__dict__:
- raise ValueError("@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." %
- klass.__name__)
- klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
- return klass
-
-
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
@@ -990,7 +990,7 @@ if sys.meta_path:
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
+ importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
diff --git a/contrib/python/six/ya.make b/contrib/python/six/ya.make
index 2d0d30959c..e0c7849214 100644
--- a/contrib/python/six/ya.make
+++ b/contrib/python/six/ya.make
@@ -17,8 +17,8 @@ RESOURCE_FILES(
.dist-info/top_level.txt
)
-NO_LINT()
-
+NO_LINT()
+
END()
RECURSE_FOR_TESTS(
diff --git a/contrib/python/traitlets/ya.make b/contrib/python/traitlets/ya.make
index d323b115d4..3156aae8c5 100644
--- a/contrib/python/traitlets/ya.make
+++ b/contrib/python/traitlets/ya.make
@@ -10,8 +10,8 @@ ELSE()
PEERDIR(contrib/python/traitlets/py3)
ENDIF()
-NO_LINT()
-
+NO_LINT()
+
END()
RECURSE(
diff --git a/contrib/python/ya.make b/contrib/python/ya.make
index 6580204645..d01ced9f3a 100644
--- a/contrib/python/ya.make
+++ b/contrib/python/ya.make
@@ -102,7 +102,7 @@ RECURSE(
bandit
bcrypt
beautifulsoup4
- behave
+ behave
betamax
betamax-serializers
billiard
@@ -144,7 +144,7 @@ RECURSE(
chardet
charset-normalizer
channels
- chart-studio
+ chart-studio
CherryPy
cherrypy-cors
ciso8601
@@ -198,7 +198,7 @@ RECURSE(
dataclasses-json
datadiff
dateparser
- dateutil
+ dateutil
DAWG-Python
dbf_light
debian-inspector
@@ -344,7 +344,7 @@ RECURSE(
escapism
etcd3
excel-formulas-calculator
- execnet
+ execnet
executing
ExifRead
extractcode
@@ -585,7 +585,7 @@ RECURSE(
legacycontour
license-expression
line_profiler
- linecache2
+ linecache2
llist
lmdb
localshop
@@ -597,7 +597,7 @@ RECURSE(
LunarCalendar
lunardate
lunr
- lxml
+ lxml
lz4
M2Crypto
m3u8
@@ -729,7 +729,7 @@ RECURSE(
pdbpp
pdfminer.six
pecan
- peewee
+ peewee
peewee/playhouse
pefile
pem
@@ -795,7 +795,7 @@ RECURSE(
pybreaker
pycares
pycbrf
- pycodestyle
+ pycodestyle
pycollada
pycountry
pycparser
@@ -813,7 +813,7 @@ RECURSE(
pyelftools
pyelftools/readelf
pyfakefs
- pyflakes
+ pyflakes
pyfst
pygit2
PyGithub
@@ -868,7 +868,7 @@ RECURSE(
pytest-falcon-client
pytest-fixture-config
pytest-flake8
- pytest-flakes
+ pytest-flakes
pytest-flask
pytest-forked
pytest-freezegun
@@ -918,7 +918,7 @@ RECURSE(
python3-saml
pytils
pytracemalloc
- pytz
+ pytz
pyudev
pyusb
PyWavelets
@@ -940,7 +940,7 @@ RECURSE(
reportlab
repoze.lru
repr
- requests
+ requests
requests-file
requests-mock
requests-oauthlib
@@ -969,7 +969,7 @@ RECURSE(
sanic-testing
scales
scancode-toolkit
- scandir
+ scandir
schedule
schema
schematics
@@ -999,7 +999,7 @@ RECURSE(
simplegeneric
simplejson
singledispatch
- six
+ six
skynet_pyro4
slack-sdk
slackclient
diff --git a/contrib/tools/ya.make b/contrib/tools/ya.make
index 76beda7179..750911c587 100644
--- a/contrib/tools/ya.make
+++ b/contrib/tools/ya.make
@@ -52,7 +52,7 @@ RECURSE(
ycmd
zookeeper
jdk
- jdk/test
+ jdk/test
xmllint
)
diff --git a/library/cpp/blockcodecs/ut/ya.make b/library/cpp/blockcodecs/ut/ya.make
index 9164656b63..25b882c15b 100644
--- a/library/cpp/blockcodecs/ut/ya.make
+++ b/library/cpp/blockcodecs/ut/ya.make
@@ -10,8 +10,8 @@ SPLIT_FACTOR(40)
TIMEOUT(300)
-SIZE(MEDIUM)
-
+SIZE(MEDIUM)
+
SRCS(
codecs_ut.cpp
)
diff --git a/library/cpp/codecs/static/tools/tests/ya.make b/library/cpp/codecs/static/tools/tests/ya.make
index 19086ef2d3..c5324eaf53 100644
--- a/library/cpp/codecs/static/tools/tests/ya.make
+++ b/library/cpp/codecs/static/tools/tests/ya.make
@@ -8,8 +8,8 @@ DATA(sbr://143310406)
TIMEOUT(4200)
-TAG(ya:not_autocheck)
-
+TAG(ya:not_autocheck)
+
DEPENDS(
library/cpp/codecs/static/tools/static_codec_checker
library/cpp/codecs/static/tools/static_codec_generator
diff --git a/library/cpp/messagebus/rain_check/test/TestRainCheck.py b/library/cpp/messagebus/rain_check/test/TestRainCheck.py
index 398ae8e2a6..92ed727b62 100644
--- a/library/cpp/messagebus/rain_check/test/TestRainCheck.py
+++ b/library/cpp/messagebus/rain_check/test/TestRainCheck.py
@@ -1,5 +1,5 @@
-from devtools.fleur.ytest import group, constraint
-from devtools.fleur.ytest.integration import UnitTestGroup
+from devtools.fleur.ytest import group, constraint
+from devtools.fleur.ytest.integration import UnitTestGroup
@group
@constraint('library.messagebus')
diff --git a/library/cpp/messagebus/test/TestMessageBus.py b/library/cpp/messagebus/test/TestMessageBus.py
index 01b84ae11e..0bbaa0a313 100644
--- a/library/cpp/messagebus/test/TestMessageBus.py
+++ b/library/cpp/messagebus/test/TestMessageBus.py
@@ -1,5 +1,5 @@
-from devtools.fleur.ytest import group, constraint
-from devtools.fleur.ytest.integration import UnitTestGroup
+from devtools.fleur.ytest import group, constraint
+from devtools.fleur.ytest.integration import UnitTestGroup
@group
@constraint('library.messagebus')
diff --git a/library/cpp/testing/common/env.cpp b/library/cpp/testing/common/env.cpp
index 749a5752ed..fa3a47fe16 100644
--- a/library/cpp/testing/common/env.cpp
+++ b/library/cpp/testing/common/env.cpp
@@ -73,10 +73,10 @@ TString GetWorkPath() {
return NPrivate::GetCwd();
}
-TFsPath GetOutputPath() {
- return GetWorkPath() + "/testing_out_stuff";
-}
-
+TFsPath GetOutputPath() {
+ return GetWorkPath() + "/testing_out_stuff";
+}
+
const TString& GetRamDrivePath() {
return NPrivate::GetTestEnv().RamDrivePath;
}
diff --git a/library/cpp/testing/unittest/fat/test_port_manager.cpp b/library/cpp/testing/unittest/fat/test_port_manager.cpp
index 97284f92c8..f77d2e3a25 100644
--- a/library/cpp/testing/unittest/fat/test_port_manager.cpp
+++ b/library/cpp/testing/unittest/fat/test_port_manager.cpp
@@ -13,7 +13,7 @@ bool IsFreePort(ui16 port) {
}
void get_port_ranges() {
- for (int i = 1; i < 10; ++i) {
+ for (int i = 1; i < 10; ++i) {
TPortManager pm;
ui16 port = pm.GetPortsRange(1024, i);
for (int p = port; p < port + i; ++p) {
diff --git a/library/cpp/testing/unittest/registar.cpp b/library/cpp/testing/unittest/registar.cpp
index f350beba9d..3679b768ed 100644
--- a/library/cpp/testing/unittest/registar.cpp
+++ b/library/cpp/testing/unittest/registar.cpp
@@ -9,12 +9,12 @@
#include <util/system/backtrace.h>
#include <util/system/guard.h>
#include <util/system/tls.h>
-#include <util/system/error.h>
+#include <util/system/error.h>
#include <util/string/cast.h>
-bool NUnitTest::ShouldColorizeDiff = true;
+bool NUnitTest::ShouldColorizeDiff = true;
bool NUnitTest::ContinueOnFail = false;
-
+
TString NUnitTest::RandomString(size_t len, ui32 seed) {
TReallyFastRng32 rand(seed);
TString ret;
@@ -107,34 +107,34 @@ struct TDiffColorizer {
};
struct TTraceDiffFormatter {
- bool Reverse = false;
-
+ bool Reverse = false;
+
explicit TTraceDiffFormatter(bool reverse = false)
- : Reverse(reverse)
+ : Reverse(reverse)
{
}
-
+
TString Special(TStringBuf str) const {
return ToString(str);
- }
-
+ }
+
TString Common(TArrayRef<const char> str) const {
return TString(str.begin(), str.end());
- }
-
+ }
+
TString Left(TArrayRef<const char> str) const {
return NUnitTest::GetFormatTag("good") +
TString(str.begin(), str.end()) +
NUnitTest::GetResetTag();
- }
-
+ }
+
TString Right(TArrayRef<const char> str) const {
return NUnitTest::GetFormatTag("bad") +
TString(str.begin(), str.end()) +
NUnitTest::GetResetTag();
- }
-};
-
+ }
+};
+
TString NUnitTest::GetFormatTag(const char* name) {
return Sprintf("[[%s]]", name);
}
@@ -147,12 +147,12 @@ TString NUnitTest::ColoredDiff(TStringBuf s1, TStringBuf s2, const TString& deli
TStringStream res;
TVector<NDiff::TChunk<char>> chunks;
NDiff::InlineDiff(chunks, s1, s2, delims);
- if (NUnitTest::ShouldColorizeDiff) {
- NDiff::PrintChunks(res, TDiffColorizer(reverse), chunks);
+ if (NUnitTest::ShouldColorizeDiff) {
+ NDiff::PrintChunks(res, TDiffColorizer(reverse), chunks);
} else {
res << NUnitTest::GetResetTag();
NDiff::PrintChunks(res, TTraceDiffFormatter(reverse), chunks);
- }
+ }
return res.Str();
}
@@ -308,7 +308,7 @@ void NUnitTest::TTestBase::SetUp() {
void NUnitTest::TTestBase::TearDown() {
}
-void NUnitTest::TTestBase::AddError(const char* msg, const TString& backtrace, TTestContext* context) {
+void NUnitTest::TTestBase::AddError(const char* msg, const TString& backtrace, TTestContext* context) {
++TestErrors_;
const NUnitTest::ITestSuiteProcessor::TUnit unit = {Name()};
const NUnitTest::ITestSuiteProcessor::TTest test = {&unit, CurrentSubtest_};
@@ -317,7 +317,7 @@ void NUnitTest::TTestBase::AddError(const char* msg, const TString& backtrace, T
Processor()->Error(err);
}
-void NUnitTest::TTestBase::AddError(const char* msg, TTestContext* context) {
+void NUnitTest::TTestBase::AddError(const char* msg, TTestContext* context) {
AddError(msg, TString(), context);
}
@@ -334,17 +334,17 @@ bool NUnitTest::TTestBase::CheckAccessTest(const char* test) {
void NUnitTest::TTestBase::BeforeTest(const char* func) {
const NUnitTest::ITestSuiteProcessor::TUnit unit = {Name()};
const NUnitTest::ITestSuiteProcessor::TTest test = {&unit, func};
- rusage.Fill();
+ rusage.Fill();
Processor()->BeforeTest(test);
}
-void NUnitTest::TTestBase::Finish(const char* func, TTestContext* context) {
- TRusage finishRusage = TRusage::Get();
+void NUnitTest::TTestBase::Finish(const char* func, TTestContext* context) {
+ TRusage finishRusage = TRusage::Get();
context->Metrics["ru_rss"] = finishRusage.MaxRss - rusage.MaxRss;
- context->Metrics["ru_major_pagefaults"] = finishRusage.MajorPageFaults - rusage.MajorPageFaults;
- context->Metrics["ru_utime"] = (finishRusage.Utime - rusage.Utime).MicroSeconds();
- context->Metrics["ru_stime"] = (finishRusage.Stime - rusage.Stime).MicroSeconds();
-
+ context->Metrics["ru_major_pagefaults"] = finishRusage.MajorPageFaults - rusage.MajorPageFaults;
+ context->Metrics["ru_utime"] = (finishRusage.Utime - rusage.Utime).MicroSeconds();
+ context->Metrics["ru_stime"] = (finishRusage.Stime - rusage.Stime).MicroSeconds();
+
const NUnitTest::ITestSuiteProcessor::TUnit unit = {Name()};
const NUnitTest::ITestSuiteProcessor::TTest test = {&unit, func};
const NUnitTest::ITestSuiteProcessor::TFinish finish = {&test, context, TestErrors_ == 0};
diff --git a/library/cpp/testing/unittest/registar.h b/library/cpp/testing/unittest/registar.h
index 12de69287d..44517a0092 100644
--- a/library/cpp/testing/unittest/registar.h
+++ b/library/cpp/testing/unittest/registar.h
@@ -21,8 +21,8 @@
#include <util/system/spinlock.h>
#include <util/system/src_location.h>
-#include <util/system/rusage.h>
-
+#include <util/system/rusage.h>
+
#include <cmath>
#include <cstdio>
#include <functional>
@@ -39,7 +39,7 @@ namespace NUnitTest {
TTestBase* GetCurrentTest();
}
- extern bool ShouldColorizeDiff;
+ extern bool ShouldColorizeDiff;
extern bool ContinueOnFail;
TString ColoredDiff(TStringBuf s1, TStringBuf s2, const TString& delims = TString(), bool reverse = false);
TString GetFormatTag(const char* name);
@@ -94,12 +94,12 @@ namespace NUnitTest {
const TTest* test;
const char* msg;
TString BackTrace;
- TTestContext* Context;
+ TTestContext* Context;
};
struct TFinish {
const TTest* test;
- TTestContext* Context;
+ TTestContext* Context;
bool Success;
};
@@ -186,7 +186,7 @@ namespace NUnitTest {
class TTestBase {
friend class TTestFactory;
- TRusage rusage;
+ TRusage rusage;
public:
TTestBase() noexcept;
@@ -202,9 +202,9 @@ namespace NUnitTest {
virtual void TearDown();
- void AddError(const char* msg, const TString& backtrace = TString(), TTestContext* context = nullptr);
+ void AddError(const char* msg, const TString& backtrace = TString(), TTestContext* context = nullptr);
- void AddError(const char* msg, TTestContext* context);
+ void AddError(const char* msg, TTestContext* context);
void RunAfterTest(std::function<void()> f); // function like atexit to run after current unit test
@@ -213,7 +213,7 @@ namespace NUnitTest {
void BeforeTest(const char* func);
- void Finish(const char* func, TTestContext* context);
+ void Finish(const char* func, TTestContext* context);
void AtStart();
diff --git a/library/cpp/testing/unittest/utmain.cpp b/library/cpp/testing/unittest/utmain.cpp
index 42b296bbf7..305bc6b40f 100644
--- a/library/cpp/testing/unittest/utmain.cpp
+++ b/library/cpp/testing/unittest/utmain.cpp
@@ -50,23 +50,23 @@ const size_t MAX_COMMENT_MESSAGE_LENGTH = 1024 * 1024; // 1 MB
using namespace NUnitTest;
-class TNullTraceWriterProcessor: public ITestSuiteProcessor {
-};
-
-class TTraceWriterProcessor: public ITestSuiteProcessor {
-public:
+class TNullTraceWriterProcessor: public ITestSuiteProcessor {
+};
+
+class TTraceWriterProcessor: public ITestSuiteProcessor {
+public:
inline TTraceWriterProcessor(const char* traceFilePath, EOpenMode mode)
: PrevTime(TInstant::Now())
{
TraceFile = new TUnbufferedFileOutput(TFile(traceFilePath, mode | WrOnly | Seq));
- }
-
-private:
+ }
+
+private:
TAutoPtr<TUnbufferedFileOutput> TraceFile;
TString TraceFilePath;
- TInstant PrevTime;
+ TInstant PrevTime;
TVector<TString> ErrorMessages;
-
+
inline void Trace(const TString eventName, const NJson::TJsonValue eventValue) {
NJsonWriter::TBuf json(NJsonWriter::HEM_UNSAFE);
json.BeginObject();
@@ -79,14 +79,14 @@ private:
json.FlushTo(TraceFile.Get());
*TraceFile << "\n";
- }
-
+ }
+
inline void TraceSubtestFinished(const char* className, const char* subtestName, const char* status, const TString comment, const TTestContext* context) {
- const TInstant now = TInstant::Now();
+ const TInstant now = TInstant::Now();
NJson::TJsonValue event;
- event.InsertValue("class", className);
- event.InsertValue("subtest", subtestName);
- event.InsertValue("status", status);
+ event.InsertValue("class", className);
+ event.InsertValue("subtest", subtestName);
+ event.InsertValue("status", status);
event.InsertValue("comment", comment.data());
event.InsertValue("time", (now - PrevTime).SecondsFloat());
if (context) {
@@ -94,17 +94,17 @@ private:
event["metrics"].InsertValue(metric.first, metric.second);
}
}
- Trace("subtest-finished", event);
+ Trace("subtest-finished", event);
- PrevTime = now;
+ PrevTime = now;
TString marker = Join("", "\n###subtest-finished:", className, "::", subtestName, "\n");
Cout << marker;
Cout.Flush();
Cerr << comment;
Cerr << marker;
Cerr.Flush();
- }
-
+ }
+
virtual TString BuildComment(const char* message, const char* backTrace) {
return NUnitTest::GetFormatTag("bad") +
TString(message).substr(0, MAX_COMMENT_MESSAGE_LENGTH) +
@@ -117,28 +117,28 @@ private:
void OnBeforeTest(const TTest* test) override {
NJson::TJsonValue event;
- event.InsertValue("class", test->unit->name);
- event.InsertValue("subtest", test->name);
- Trace("subtest-started", event);
+ event.InsertValue("class", test->unit->name);
+ event.InsertValue("subtest", test->name);
+ Trace("subtest-started", event);
TString marker = Join("", "\n###subtest-started:", test->unit->name, "::", test->name, "\n");
Cout << marker;
Cout.Flush();
Cerr << marker;
Cerr.Flush();
- }
-
+ }
+
void OnUnitStart(const TUnit* unit) override {
NJson::TJsonValue event;
- event.InsertValue("class", unit->name);
- Trace("test-started", event);
- }
-
+ event.InsertValue("class", unit->name);
+ Trace("test-started", event);
+ }
+
void OnUnitStop(const TUnit* unit) override {
NJson::TJsonValue event;
- event.InsertValue("class", unit->name);
- Trace("test-finished", event);
- }
-
+ event.InsertValue("class", unit->name);
+ Trace("test-finished", event);
+ }
+
void OnError(const TError* descr) override {
const TString comment = BuildComment(descr->msg, descr->BackTrace.data());
ErrorMessages.push_back(comment);
@@ -161,20 +161,20 @@ private:
TraceSubtestFinished(descr->test->unit->name.data(), descr->test->name, "fail", msgs, descr->Context);
ErrorMessages.clear();
}
- }
-};
-
+ }
+};
+
class TColoredProcessor: public ITestSuiteProcessor, public NColorizer::TColors {
public:
inline TColoredProcessor(const TString& appName)
: PrintBeforeSuite_(true)
, PrintBeforeTest_(true)
- , PrintAfterTest_(true)
- , PrintAfterSuite_(true)
+ , PrintAfterTest_(true)
+ , PrintAfterSuite_(true)
, PrintTimes_(false)
- , PrintSummary_(true)
+ , PrintSummary_(true)
, PrevTime_(TInstant::Now())
- , ShowFails(true)
+ , ShowFails(true)
, Start(0)
, End(Max<size_t>())
, AppName(appName)
@@ -182,7 +182,7 @@ public:
, IsForked(false)
, Loop(false)
, ForkExitedCorrectly(false)
- , TraceProcessor(new TNullTraceWriterProcessor())
+ , TraceProcessor(new TNullTraceWriterProcessor())
{
}
@@ -217,30 +217,30 @@ public:
PrintBeforeSuite_ = print;
}
- inline void SetPrintAfterSuite(bool print) {
- PrintAfterSuite_ = print;
- }
-
+ inline void SetPrintAfterSuite(bool print) {
+ PrintAfterSuite_ = print;
+ }
+
inline void SetPrintBeforeTest(bool print) {
PrintBeforeTest_ = print;
}
- inline void SetPrintAfterTest(bool print) {
- PrintAfterTest_ = print;
- }
-
+ inline void SetPrintAfterTest(bool print) {
+ PrintAfterTest_ = print;
+ }
+
inline void SetPrintTimes(bool print) {
PrintTimes_ = print;
}
- inline void SetPrintSummary(bool print) {
- PrintSummary_ = print;
- }
-
- inline bool GetPrintSummary() {
- return PrintSummary_;
- }
-
+ inline void SetPrintSummary(bool print) {
+ PrintSummary_ = print;
+ }
+
+ inline bool GetPrintSummary() {
+ return PrintSummary_;
+ }
+
inline void SetShowFails(bool show) {
ShowFails = show;
}
@@ -250,14 +250,14 @@ public:
}
inline void BeQuiet() {
- SetPrintTimes(false);
- SetPrintBeforeSuite(false);
- SetPrintAfterSuite(false);
- SetPrintBeforeTest(false);
- SetPrintAfterTest(false);
- SetPrintSummary(false);
- }
-
+ SetPrintTimes(false);
+ SetPrintBeforeSuite(false);
+ SetPrintAfterSuite(false);
+ SetPrintBeforeTest(false);
+ SetPrintAfterTest(false);
+ SetPrintSummary(false);
+ }
+
inline void SetStart(size_t val) {
Start = val;
}
@@ -291,13 +291,13 @@ public:
return Loop;
}
- inline void SetTraceProcessor(TAutoPtr<ITestSuiteProcessor> traceProcessor) {
- TraceProcessor = traceProcessor;
- }
-
+ inline void SetTraceProcessor(TAutoPtr<ITestSuiteProcessor> traceProcessor) {
+ TraceProcessor = traceProcessor;
+ }
+
private:
void OnUnitStart(const TUnit* unit) override {
- TraceProcessor->UnitStart(*unit);
+ TraceProcessor->UnitStart(*unit);
if (IsForked) {
return;
}
@@ -307,14 +307,14 @@ private:
}
void OnUnitStop(const TUnit* unit) override {
- TraceProcessor->UnitStop(*unit);
+ TraceProcessor->UnitStop(*unit);
if (IsForked) {
return;
}
- if (!PrintAfterSuite_) {
- return;
- }
-
+ if (!PrintAfterSuite_) {
+ return;
+ }
+
fprintf(stderr, "%s----->%s %s -> ok: %s%u%s",
LightBlueColor().data(), OldColor().data(), unit->name.data(),
LightGreenColor().data(), GoodTestsInCurrentUnit(), OldColor().data());
@@ -326,7 +326,7 @@ private:
}
void OnBeforeTest(const TTest* test) override {
- TraceProcessor->BeforeTest(*test);
+ TraceProcessor->BeforeTest(*test);
if (IsForked) {
return;
}
@@ -336,29 +336,29 @@ private:
}
void OnError(const TError* descr) override {
- TraceProcessor->Error(*descr);
+ TraceProcessor->Error(*descr);
if (!IsForked && ForkExitedCorrectly) {
return;
}
- if (!PrintAfterTest_) {
- return;
- }
-
+ if (!PrintAfterTest_) {
+ return;
+ }
+
const TString err = Sprintf("[%sFAIL%s] %s::%s -> %s%s%s\n%s%s%s", LightRedColor().data(), OldColor().data(),
descr->test->unit->name.data(),
descr->test->name,
LightRedColor().data(), descr->msg, OldColor().data(), LightCyanColor().data(), descr->BackTrace.data(), OldColor().data());
- const TDuration test_duration = SaveTestDuration();
+ const TDuration test_duration = SaveTestDuration();
if (ShowFails) {
- if (PrintTimes_) {
+ if (PrintTimes_) {
Fails.push_back(Sprintf("%s %s", test_duration.ToString().data(), err.data()));
- } else {
- Fails.push_back(err);
- }
+ } else {
+ Fails.push_back(err);
+ }
}
fprintf(stderr, "%s", err.data());
NOTE_IN_VALGRIND(descr->test);
- PrintTimes(test_duration);
+ PrintTimes(test_duration);
if (IsForked) {
fprintf(stderr, "%s", ForkCorrectExitMsg);
}
@@ -369,47 +369,47 @@ private:
if (!IsForked && ForkExitedCorrectly) {
return;
}
- if (!PrintAfterTest_) {
- return;
- }
-
+ if (!PrintAfterTest_) {
+ return;
+ }
+
if (descr->Success) {
fprintf(stderr, "[%sgood%s] %s::%s\n", LightGreenColor().data(), OldColor().data(),
descr->test->unit->name.data(),
descr->test->name);
NOTE_IN_VALGRIND(descr->test);
- PrintTimes(SaveTestDuration());
+ PrintTimes(SaveTestDuration());
if (IsForked) {
fprintf(stderr, "%s", ForkCorrectExitMsg);
}
}
}
- inline TDuration SaveTestDuration() {
- const TInstant now = TInstant::Now();
- TDuration d = now - PrevTime_;
- PrevTime_ = now;
- return d;
- }
-
- inline void PrintTimes(TDuration d) {
+ inline TDuration SaveTestDuration() {
+ const TInstant now = TInstant::Now();
+ TDuration d = now - PrevTime_;
+ PrevTime_ = now;
+ return d;
+ }
+
+ inline void PrintTimes(TDuration d) {
if (!PrintTimes_) {
return;
}
- Cerr << d << "\n";
+ Cerr << d << "\n";
}
void OnEnd() override {
- TraceProcessor->End();
+ TraceProcessor->End();
if (IsForked) {
return;
}
-
- if (!PrintSummary_) {
- return;
- }
-
+
+ if (!PrintSummary_) {
+ return;
+ }
+
fprintf(stderr, "[%sDONE%s] ok: %s%u%s",
YellowColor().data(), OldColor().data(),
LightGreenColor().data(), GoodTests(), OldColor().data());
@@ -513,10 +513,10 @@ private:
private:
bool PrintBeforeSuite_;
bool PrintBeforeTest_;
- bool PrintAfterTest_;
- bool PrintAfterSuite_;
+ bool PrintAfterTest_;
+ bool PrintAfterSuite_;
bool PrintTimes_;
- bool PrintSummary_;
+ bool PrintSummary_;
THashSet<TString> DisabledSuites_;
THashSet<TString> EnabledSuites_;
THashSet<TString> DisabledTests_;
@@ -532,7 +532,7 @@ private:
bool Loop;
static const char* const ForkCorrectExitMsg;
bool ForkExitedCorrectly;
- TAutoPtr<ITestSuiteProcessor> TraceProcessor;
+ TAutoPtr<ITestSuiteProcessor> TraceProcessor;
};
const char* const TColoredProcessor::ForkCorrectExitMsg = "--END--";
@@ -616,12 +616,12 @@ static int DoUsage(const char* progname) {
<< " --print-before-test print each test name before running it\n"
<< " --print-before-suite print each test suite name before running it\n"
<< " --show-fails print a list of all failed tests at the end\n"
- << " --dont-show-fails do not print a list of all failed tests at the end\n"
+ << " --dont-show-fails do not print a list of all failed tests at the end\n"
<< " --continue-on-fail print a message and continue running test suite instead of break\n"
<< " --print-times print wall clock duration of each test\n"
- << " --fork-tests run each test in a separate process\n"
- << " --trace-path path to the trace file to be generated\n"
- << " --trace-path-append path to the trace file to be appended\n";
+ << " --fork-tests run each test in a separate process\n"
+ << " --trace-path path to the trace file to be generated\n"
+ << " --trace-path-append path to the trace file to be appended\n";
return 0;
}
@@ -695,8 +695,8 @@ int NUnitTest::RunMain(int argc, char** argv) {
processor.SetPrintBeforeTest(true);
} else if (strcmp(name, "--show-fails") == 0) {
processor.SetShowFails(true);
- } else if (strcmp(name, "--dont-show-fails") == 0) {
- processor.SetShowFails(false);
+ } else if (strcmp(name, "--dont-show-fails") == 0) {
+ processor.SetShowFails(false);
} else if (strcmp(name, "--continue-on-fail") == 0) {
processor.SetContinueOnFail(true);
} else if (strcmp(name, "--print-times") == 0) {
@@ -713,15 +713,15 @@ int NUnitTest::RunMain(int argc, char** argv) {
processor.SetIsForked(true);
} else if (strcmp(name, "--loop") == 0) {
processor.SetLoop(true);
- } else if (strcmp(name, "--trace-path") == 0) {
- ++i;
- processor.BeQuiet();
- NUnitTest::ShouldColorizeDiff = false;
- processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], CreateAlways));
+ } else if (strcmp(name, "--trace-path") == 0) {
+ ++i;
+ processor.BeQuiet();
+ NUnitTest::ShouldColorizeDiff = false;
+ processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], CreateAlways));
} else if (strcmp(name, "--trace-path-append") == 0) {
- ++i;
- processor.BeQuiet();
- NUnitTest::ShouldColorizeDiff = false;
+ ++i;
+ processor.BeQuiet();
+ NUnitTest::ShouldColorizeDiff = false;
processor.SetTraceProcessor(new TTraceWriterProcessor(argv[i], OpenAlways | ForAppend));
} else if (strcmp(name, "--list-path") == 0) {
++i;
@@ -752,7 +752,7 @@ int NUnitTest::RunMain(int argc, char** argv) {
unsigned ret;
for (;;) {
ret = TTestFactory::Instance().Execute();
- if (!processor.GetIsForked() && ret && processor.GetPrintSummary()) {
+ if (!processor.GetIsForked() && ret && processor.GetPrintSummary()) {
Cerr << "SOME TESTS FAILED!!!!" << Endl;
}
diff --git a/library/python/certifi/certifi/__init__.py b/library/python/certifi/certifi/__init__.py
index b025fc3510..5270d206cd 100644
--- a/library/python/certifi/certifi/__init__.py
+++ b/library/python/certifi/certifi/__init__.py
@@ -4,7 +4,7 @@ if hasattr(ssl, 'builtin_cadata'):
from .binary import where
else:
from .source import where
-
+
__all__ = ['where', '__version__']
__version__ = '2020.04.05.2'
diff --git a/library/python/certifi/certifi/binary.py b/library/python/certifi/certifi/binary.py
index 7cd6c020d2..1050e733a3 100644
--- a/library/python/certifi/certifi/binary.py
+++ b/library/python/certifi/certifi/binary.py
@@ -1,6 +1,6 @@
import ssl
-
-
+
+
def builtin_ca():
return None, None, ssl.builtin_cadata()
diff --git a/library/python/filelock/__init__.py b/library/python/filelock/__init__.py
index aea171410d..f81ff67f37 100644
--- a/library/python/filelock/__init__.py
+++ b/library/python/filelock/__init__.py
@@ -5,9 +5,9 @@ import sys
import library.python.windows
-logger = logging.getLogger(__name__)
+logger = logging.getLogger(__name__)
+
-
def set_close_on_exec(stream):
if library.python.windows.on_win():
library.python.windows.set_handle_information(stream, inherit=False)
@@ -16,16 +16,16 @@ def set_close_on_exec(stream):
fcntl.fcntl(stream, fcntl.F_SETFD, fcntl.FD_CLOEXEC)
-class AbstractFileLock(object):
-
+class AbstractFileLock(object):
+
def __init__(self, path):
self.path = path
def acquire(self, blocking=True):
- raise NotImplementedError
+ raise NotImplementedError
def release(self):
- raise NotImplementedError
+ raise NotImplementedError
def __enter__(self):
self.acquire()
@@ -34,17 +34,17 @@ class AbstractFileLock(object):
def __exit__(self, type, value, traceback):
self.release()
-
-class _NixFileLock(AbstractFileLock):
-
- def __init__(self, path):
- super(_NixFileLock, self).__init__(path)
+
+class _NixFileLock(AbstractFileLock):
+
+ def __init__(self, path):
+ super(_NixFileLock, self).__init__(path)
from fcntl import flock, LOCK_EX, LOCK_UN, LOCK_NB
self._locker = lambda lock, blocking: flock(lock, LOCK_EX if blocking else LOCK_EX | LOCK_NB)
self._unlocker = lambda lock: flock(lock, LOCK_UN)
self._lock = open(self.path, 'a')
set_close_on_exec(self._lock)
-
+
def acquire(self, blocking=True):
import errno
try:
@@ -54,69 +54,69 @@ class _NixFileLock(AbstractFileLock):
return False
raise
return True
-
- def release(self):
+
+ def release(self):
self._unlocker(self._lock)
-
+
def __del__(self):
if hasattr(self, "_lock"):
self._lock.close()
-
-
-class _WinFileLock(AbstractFileLock):
- """
- Based on LockFile / UnlockFile from win32 API
- https://msdn.microsoft.com/en-us/library/windows/desktop/aa365202(v=vs.85).aspx
- """
-
- _LOCKED_BYTES_NUM = 1
-
- def __init__(self, path):
- super(_WinFileLock, self).__init__(path)
- self._lock = None
+
+
+class _WinFileLock(AbstractFileLock):
+ """
+ Based on LockFile / UnlockFile from win32 API
+ https://msdn.microsoft.com/en-us/library/windows/desktop/aa365202(v=vs.85).aspx
+ """
+
+ _LOCKED_BYTES_NUM = 1
+
+ def __init__(self, path):
+ super(_WinFileLock, self).__init__(path)
+ self._lock = None
try:
with file(path, 'w') as lock_file:
lock_file.write(" " * self._LOCKED_BYTES_NUM)
except IOError as e:
if e.errno != errno.EACCES or not os.path.isfile(path):
raise
-
+
def acquire(self, blocking=True):
- self._lock = open(self.path)
+ self._lock = open(self.path)
set_close_on_exec(self._lock)
- import time
+ import time
locked = False
- while not locked:
+ while not locked:
locked = library.python.windows.lock_file(self._lock, 0, self._LOCKED_BYTES_NUM, raises=False)
if locked:
return True
if blocking:
- time.sleep(.5)
+ time.sleep(.5)
else:
return False
-
- def release(self):
+
+ def release(self):
if self._lock:
library.python.windows.unlock_file(self._lock, 0, self._LOCKED_BYTES_NUM, raises=False)
self._lock.close()
self._lock = None
-
-
-class FileLock(AbstractFileLock):
-
- def __init__(self, path):
- super(FileLock, self).__init__(path)
-
- if sys.platform.startswith('win'):
- self._lock = _WinFileLock(path)
- else:
- self._lock = _NixFileLock(path)
-
+
+
+class FileLock(AbstractFileLock):
+
+ def __init__(self, path):
+ super(FileLock, self).__init__(path)
+
+ if sys.platform.startswith('win'):
+ self._lock = _WinFileLock(path)
+ else:
+ self._lock = _NixFileLock(path)
+
def acquire(self, blocking=True):
logger.debug('Acquiring filelock (blocking=%s): %s', blocking, self.path)
return self._lock.acquire(blocking)
-
- def release(self):
- logger.debug('Ensuring filelock released: %s', self.path)
- return self._lock.release()
+
+ def release(self):
+ logger.debug('Ensuring filelock released: %s', self.path)
+ return self._lock.release()
diff --git a/library/python/filelock/ut/lib/test_filelock.py b/library/python/filelock/ut/lib/test_filelock.py
index 4c985022d4..1b11d89123 100644
--- a/library/python/filelock/ut/lib/test_filelock.py
+++ b/library/python/filelock/ut/lib/test_filelock.py
@@ -1,13 +1,13 @@
-import os
-import time
-import logging
-import multiprocessing
+import os
+import time
+import logging
+import multiprocessing
import tempfile
import threading
-
+
import library.python.filelock
-
-
+
+
def _acquire_lock(lock_path, out_file_path):
with library.python.filelock.FileLock(lock_path):
with open(out_file_path, "a") as out:
@@ -15,21 +15,21 @@ def _acquire_lock(lock_path, out_file_path):
time.sleep(2)
-def test_filelock():
+def test_filelock():
temp_dir = tempfile.mkdtemp()
lock_path = os.path.join(temp_dir, "file.lock")
out_file_path = os.path.join(temp_dir, "out.txt")
-
+
process_count = 5
processes = []
for i in range(process_count):
process = multiprocessing.Process(target=_acquire_lock, args=(lock_path, out_file_path))
process.start()
processes.append(process)
-
+
for process in processes:
process.join()
-
+
pids = []
times = []
with open(out_file_path) as out:
@@ -39,7 +39,7 @@ def test_filelock():
pid, time_val = line.split(":")
pids.append(pid)
times.append(float(time_val))
-
+
assert len(set(pids)) == process_count
time1 = times.pop()
while times:
diff --git a/library/python/find_root/__init__.py b/library/python/find_root/__init__.py
index a53252c6e7..6da604d62e 100644
--- a/library/python/find_root/__init__.py
+++ b/library/python/find_root/__init__.py
@@ -1,20 +1,20 @@
-import os
-
-
-def is_root(path):
+import os
+
+
+def is_root(path):
return os.path.exists(os.path.join(path, ".arcadia.root")) or os.path.exists(os.path.join(path, 'devtools', 'ya', 'ya.conf.json'))
-
-
-def detect_root(path, detector=is_root):
- return _find_path(path, detector)
-
-
-def _find_path(starts_from, check):
- p = os.path.realpath(starts_from)
- while True:
- if check(p):
- return p
- next_p = os.path.dirname(p)
- if next_p == p:
- return None
- p = next_p
+
+
+def detect_root(path, detector=is_root):
+ return _find_path(path, detector)
+
+
+def _find_path(starts_from, check):
+ p = os.path.realpath(starts_from)
+ while True:
+ if check(p):
+ return p
+ next_p = os.path.dirname(p)
+ if next_p == p:
+ return None
+ p = next_p
diff --git a/library/python/find_root/ya.make b/library/python/find_root/ya.make
index a672093d96..beaa8e3c52 100644
--- a/library/python/find_root/ya.make
+++ b/library/python/find_root/ya.make
@@ -1,7 +1,7 @@
PY23_LIBRARY()
-
+
OWNER(g:yatool)
-
+
PY_SRCS(__init__.py)
-
-END()
+
+END()
diff --git a/library/python/fs/__init__.py b/library/python/fs/__init__.py
index 06dced90d8..b1b7cde079 100644
--- a/library/python/fs/__init__.py
+++ b/library/python/fs/__init__.py
@@ -14,15 +14,15 @@ import library.python.func
import library.python.strings
import library.python.windows
-logger = logging.getLogger(__name__)
-
-
-try:
- WindowsError
-except NameError:
- WindowsError = None
-
-
+logger = logging.getLogger(__name__)
+
+
+try:
+ WindowsError
+except NameError:
+ WindowsError = None
+
+
_diehard_win_tries = 10
errorfix_win = library.python.windows.errorfix
@@ -127,10 +127,10 @@ def remove_dir(path):
os.rmdir(path)
-def fix_path_encoding(path):
+def fix_path_encoding(path):
return library.python.strings.to_str(path, library.python.strings.fs_encoding())
-
-
+
+
# File/directory remove
# Non-atomic
# Throws OSError, AssertionError
@@ -140,8 +140,8 @@ def remove_tree(path):
def rmtree(path):
if library.python.windows.on_win():
library.python.windows.rmtree(path)
- else:
- shutil.rmtree(fix_path_encoding(path))
+ else:
+ shutil.rmtree(fix_path_encoding(path))
st = os.lstat(path)
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
@@ -161,7 +161,7 @@ def remove_tree_safe(path):
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
os.remove(path)
elif stat.S_ISDIR(st.st_mode):
- shutil.rmtree(fix_path_encoding(path), ignore_errors=True)
+ shutil.rmtree(fix_path_encoding(path), ignore_errors=True)
# XXX
except UnicodeDecodeError as e:
logging.exception(u'remove_tree_safe with argument %s raise exception: %s', path, e)
@@ -196,11 +196,11 @@ def hardlink(src, lnk):
os.link(src, lnk)
-@errorfix_win
-def hardlink_or_copy(src, lnk):
- def should_fallback_to_copy(exc):
- if WindowsError is not None and isinstance(exc, WindowsError) and exc.winerror == 1142: # too many hardlinks
- return True
+@errorfix_win
+def hardlink_or_copy(src, lnk):
+ def should_fallback_to_copy(exc):
+ if WindowsError is not None and isinstance(exc, WindowsError) and exc.winerror == 1142: # too many hardlinks
+ return True
# cross-device hardlink or too many hardlinks, or some known WSL error
if isinstance(exc, OSError) and exc.errno in (
errno.EXDEV,
@@ -209,19 +209,19 @@ def hardlink_or_copy(src, lnk):
errno.EACCES,
errno.EPERM,
):
- return True
- return False
-
- try:
- hardlink(src, lnk)
- except Exception as e:
+ return True
+ return False
+
+ try:
+ hardlink(src, lnk)
+ except Exception as e:
logger.debug('Failed to hardlink %s to %s with error %s, will copy it', src, lnk, repr(e))
- if should_fallback_to_copy(e):
+ if should_fallback_to_copy(e):
copy2(src, lnk, follow_symlinks=False)
- else:
- raise
-
-
+ else:
+ raise
+
+
# Atomic file/directory symlink (Unix only)
# Dst must not exist
# Throws OSError
@@ -259,7 +259,7 @@ def hardlink_tree(src, dst):
if os.path.isfile(src):
hardlink(src, dst)
return
- for dirpath, _, filenames in walk_relative(src):
+ for dirpath, _, filenames in walk_relative(src):
src_dirpath = os.path.join(src, dirpath) if dirpath != '.' else src
dst_dirpath = os.path.join(dst, dirpath) if dirpath != '.' else dst
os.mkdir(dst_dirpath)
@@ -270,20 +270,20 @@ def hardlink_tree(src, dst):
# File copy
# throws EnvironmentError (OSError, IOError)
@errorfix_win
-def copy_file(src, dst, copy_function=shutil.copy2):
+def copy_file(src, dst, copy_function=shutil.copy2):
if os.path.isdir(dst):
raise CustomFsError(errno.EISDIR, filename=dst)
- copy_function(src, dst)
+ copy_function(src, dst)
# File/directory copy
# throws EnvironmentError (OSError, IOError, shutil.Error)
@errorfix_win
-def copy_tree(src, dst, copy_function=shutil.copy2):
+def copy_tree(src, dst, copy_function=shutil.copy2):
if os.path.isfile(src):
- copy_file(src, dst, copy_function=copy_function)
+ copy_file(src, dst, copy_function=copy_function)
return
- copytree3(src, dst, copy_function=copy_function)
+ copytree3(src, dst, copy_function=copy_function)
# File read
@@ -356,7 +356,7 @@ def get_tree_size(path, recursive=False, raise_all_errors=False):
except OSError as e:
if raise_all_errors:
raise
- logger.debug("Cannot calculate file size: %s", e)
+ logger.debug("Cannot calculate file size: %s", e)
if not recursive:
break
return total_size
@@ -447,25 +447,25 @@ def copytree3(
copy_function(srcname, dstname)
# catch the Error from the recursive copytree3 so that we can
# continue with other files
- except shutil.Error as err:
+ except shutil.Error as err:
errors.extend(err.args[0])
- except EnvironmentError as why:
+ except EnvironmentError as why:
errors.append((srcname, dstname, str(why)))
try:
shutil.copystat(src, dst)
- except OSError as why:
+ except OSError as why:
if WindowsError is not None and isinstance(why, WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
- raise shutil.Error(errors)
-
-
-def walk_relative(path, topdown=True, onerror=None, followlinks=False):
- for dirpath, dirnames, filenames in os.walk(path, topdown=topdown, onerror=onerror, followlinks=followlinks):
- yield os.path.relpath(dirpath, path), dirnames, filenames
+ raise shutil.Error(errors)
+
+
+def walk_relative(path, topdown=True, onerror=None, followlinks=False):
+ for dirpath, dirnames, filenames in os.walk(path, topdown=topdown, onerror=onerror, followlinks=followlinks):
+ yield os.path.relpath(dirpath, path), dirnames, filenames
def supports_clone():
diff --git a/library/python/fs/test/test_fs.py b/library/python/fs/test/test_fs.py
index 6bbd761c04..9e2c70c069 100644
--- a/library/python/fs/test/test_fs.py
+++ b/library/python/fs/test/test_fs.py
@@ -3,20 +3,20 @@
import errno
import os
import pytest
-import shutil
+import shutil
import six
-import library.python.fs
+import library.python.fs
import library.python.strings
-import library.python.tmp
+import library.python.tmp
import library.python.windows
-import yatest.common
+import yatest.common
+
-
def in_env(case):
def wrapped_case(*args, **kwargs):
- with library.python.tmp.temp_dir() as temp_dir:
+ with library.python.tmp.temp_dir() as temp_dir:
case(lambda path: os.path.join(temp_dir, path))
return wrapped_case
@@ -84,7 +84,7 @@ def gen_error_access_denied():
def test_errorfix_win():
- @library.python.fs.errorfix_win
+ @library.python.fs.errorfix_win
def erroneous_func():
gen_error_access_denied()
@@ -99,7 +99,7 @@ def test_errorfix_win():
def test_custom_fs_error():
with pytest.raises(OSError) as errinfo:
- raise library.python.fs.CustomFsError(errno.EACCES, filename='some/file')
+ raise library.python.fs.CustomFsError(errno.EACCES, filename='some/file')
assert errinfo.value.errno == errno.EACCES
# See transcode_error, which encodes strerror, in library/python/windows/__init__.py
assert isinstance(errinfo.value.strerror, (six.binary_type, six.text_type))
@@ -108,7 +108,7 @@ def test_custom_fs_error():
@in_env
def test_ensure_dir(path):
- library.python.fs.ensure_dir(path('dir/subdir'))
+ library.python.fs.ensure_dir(path('dir/subdir'))
assert os.path.isdir(path('dir'))
assert os.path.isdir(path('dir/subdir'))
@@ -116,7 +116,7 @@ def test_ensure_dir(path):
@in_env
def test_ensure_dir_exists(path):
os.makedirs(path('dir/subdir'))
- library.python.fs.ensure_dir(path('dir/subdir'))
+ library.python.fs.ensure_dir(path('dir/subdir'))
assert os.path.isdir(path('dir'))
assert os.path.isdir(path('dir/subdir'))
@@ -124,7 +124,7 @@ def test_ensure_dir_exists(path):
@in_env
def test_ensure_dir_exists_partly(path):
os.mkdir(path('dir'))
- library.python.fs.ensure_dir(path('dir/subdir'))
+ library.python.fs.ensure_dir(path('dir/subdir'))
assert os.path.isdir(path('dir'))
assert os.path.isdir(path('dir/subdir'))
@@ -133,7 +133,7 @@ def test_ensure_dir_exists_partly(path):
def test_ensure_dir_exists_file(path):
mkfile(path('dir'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.ensure_dir(path('dir/subdir'))
+ library.python.fs.ensure_dir(path('dir/subdir'))
# ENOENT on Windows!
assert errinfo.value.errno in (errno.ENOTDIR, errno.ENOENT)
assert os.path.isfile(path('dir'))
@@ -141,7 +141,7 @@ def test_ensure_dir_exists_file(path):
@in_env
def test_create_dirs(path):
- assert library.python.fs.create_dirs(path('dir/subdir')) == path('dir/subdir')
+ assert library.python.fs.create_dirs(path('dir/subdir')) == path('dir/subdir')
assert os.path.isdir(path('dir'))
assert os.path.isdir(path('dir/subdir'))
@@ -149,7 +149,7 @@ def test_create_dirs(path):
@in_env
def test_move_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -158,7 +158,7 @@ def test_move_file(path):
@in_env
def test_move_file_no_src(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno == errno.ENOENT
@@ -169,13 +169,13 @@ def test_move_file_exists(path):
if library.python.windows.on_win():
# move is platform-dependent, use replace_file for dst replacement on all platforms
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'DST'
else:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -186,7 +186,7 @@ def test_move_file_exists_dir_empty(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EEXIST, errno.EISDIR)
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
@@ -199,7 +199,7 @@ def test_move_file_exists_dir_nonempty(path):
os.mkdir(path('dst'))
mkfile(path('dst/dst_file'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EEXIST, errno.EISDIR)
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
@@ -211,7 +211,7 @@ def test_move_file_exists_dir_nonempty(path):
def test_move_dir(path):
os.mkdir(path('src'))
mkfile(path('src/src_file'))
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/src_file'))
@@ -225,13 +225,13 @@ def test_move_dir_exists_empty(path):
if library.python.windows.on_win():
# move is platform-dependent, use non-atomic replace for directory replacement
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert not os.path.isfile(path('dst/src_file'))
else:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/src_file'))
@@ -244,7 +244,7 @@ def test_move_dir_exists_nonempty(path):
os.mkdir(path('dst'))
mkfile(path('dst/dst_file'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EEXIST, errno.ENOTEMPTY)
assert os.path.isdir(path('src'))
assert os.path.isfile(path('src/src_file'))
@@ -259,7 +259,7 @@ def test_move_dir_exists_file(path):
mkfile(path('src/src_file'))
mkfile(path('dst'), 'DST')
with pytest.raises(OSError) as errinfo:
- library.python.fs.move(path('src'), path('dst'))
+ library.python.fs.move(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EEXIST, errno.ENOTDIR)
assert os.path.isdir(path('src'))
assert os.path.isfile(path('dst'))
@@ -269,13 +269,13 @@ def test_move_dir_exists_file(path):
@in_env
def test_replace_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.replace_file(path('src'), path('dst'))
+ library.python.fs.replace_file(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
mkfile(path('src'), 'SRC')
- library.python.fs.replace(path('src'), path('dst2'))
+ library.python.fs.replace(path('src'), path('dst2'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst2'))
assert file_data(path('dst2')) == 'SRC'
@@ -284,11 +284,11 @@ def test_replace_file(path):
@in_env
def test_replace_file_no_src(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.replace_file(path('src'), path('dst'))
+ library.python.fs.replace_file(path('src'), path('dst'))
assert errinfo.value.errno == errno.ENOENT
with pytest.raises(OSError) as errinfo2:
- library.python.fs.replace(path('src'), path('dst2'))
+ library.python.fs.replace(path('src'), path('dst2'))
assert errinfo2.value.errno == errno.ENOENT
@@ -296,14 +296,14 @@ def test_replace_file_no_src(path):
def test_replace_file_exists(path):
mkfile(path('src'), 'SRC')
mkfile(path('dst'), 'DST')
- library.python.fs.replace_file(path('src'), path('dst'))
+ library.python.fs.replace_file(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
mkfile(path('src'), 'SRC')
mkfile(path('dst2'), 'DST')
- library.python.fs.replace(path('src'), path('dst2'))
+ library.python.fs.replace(path('src'), path('dst2'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst2'))
assert file_data(path('dst2')) == 'SRC'
@@ -314,7 +314,7 @@ def test_replace_file_exists_dir_empty(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.replace_file(path('src'), path('dst'))
+ library.python.fs.replace_file(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EISDIR, errno.EACCES)
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
@@ -325,7 +325,7 @@ def test_replace_file_exists_dir_empty(path):
def test_replace_file_exists_dir_empty_overwrite(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
- library.python.fs.replace(path('src'), path('dst'))
+ library.python.fs.replace(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -337,20 +337,20 @@ def test_replace_file_exists_dir_nonempty(path):
os.mkdir(path('dst'))
mkfile(path('dst/dst_file'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.replace_file(path('src'), path('dst'))
+ library.python.fs.replace_file(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EISDIR, errno.EACCES)
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/dst_file'))
assert not os.path.isfile(path('dst/src'))
-
-
+
+
@in_env
def test_replace_file_exists_dir_nonempty_overwrite(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
mkfile(path('dst/dst_file'))
- library.python.fs.replace(path('src'), path('dst'))
+ library.python.fs.replace(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -360,7 +360,7 @@ def test_replace_file_exists_dir_nonempty_overwrite(path):
def test_replace_dir(path):
os.mkdir(path('src'))
mkfile(path('src/src_file'))
- library.python.fs.replace(path('src'), path('dst'))
+ library.python.fs.replace(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/src_file'))
@@ -371,7 +371,7 @@ def test_replace_dir_exists_empty(path):
os.mkdir(path('src'))
mkfile(path('src/src_file'))
os.mkdir(path('dst'))
- library.python.fs.replace(path('src'), path('dst'))
+ library.python.fs.replace(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/src_file'))
@@ -383,7 +383,7 @@ def test_replace_dir_exists_nonempty(path):
mkfile(path('src/src_file'))
os.mkdir(path('dst'))
mkfile(path('dst/dst_file'))
- library.python.fs.replace(path('src'), path('dst'))
+ library.python.fs.replace(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/src_file'))
@@ -395,7 +395,7 @@ def test_replace_dir_exists_file(path):
os.mkdir(path('src'))
mkfile(path('src/src_file'))
mkfile(path('dst'), 'DST')
- library.python.fs.replace(path('src'), path('dst'))
+ library.python.fs.replace(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/src_file'))
@@ -404,14 +404,14 @@ def test_replace_dir_exists_file(path):
@in_env
def test_remove_file(path):
mkfile(path('path'))
- library.python.fs.remove_file(path('path'))
+ library.python.fs.remove_file(path('path'))
assert not os.path.exists(path('path'))
@in_env
def test_remove_file_no(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.remove_file(path('path'))
+ library.python.fs.remove_file(path('path'))
assert errinfo.value.errno == errno.ENOENT
@@ -419,7 +419,7 @@ def test_remove_file_no(path):
def test_remove_file_exists_dir(path):
os.mkdir(path('path'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.remove_file(path('path'))
+ library.python.fs.remove_file(path('path'))
assert errinfo.value.errno in (errno.EISDIR, errno.EACCES)
assert os.path.isdir(path('path'))
@@ -427,14 +427,14 @@ def test_remove_file_exists_dir(path):
@in_env
def test_remove_dir(path):
os.mkdir(path('path'))
- library.python.fs.remove_dir(path('path'))
+ library.python.fs.remove_dir(path('path'))
assert not os.path.exists(path('path'))
@in_env
def test_remove_dir_no(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.remove_dir(path('path'))
+ library.python.fs.remove_dir(path('path'))
assert errinfo.value.errno == errno.ENOENT
@@ -442,7 +442,7 @@ def test_remove_dir_no(path):
def test_remove_dir_exists_file(path):
mkfile(path('path'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.remove_dir(path('path'))
+ library.python.fs.remove_dir(path('path'))
assert errinfo.value.errno in (errno.ENOTDIR, errno.EINVAL)
assert os.path.isfile(path('path'))
@@ -450,52 +450,52 @@ def test_remove_dir_exists_file(path):
@in_env
def test_remove_tree(path):
mktree_example(path, 'path')
- library.python.fs.remove_tree(path('path'))
+ library.python.fs.remove_tree(path('path'))
assert not os.path.exists(path('path'))
@in_env
def test_remove_tree_empty(path):
os.mkdir(path('path'))
- library.python.fs.remove_tree(path('path'))
+ library.python.fs.remove_tree(path('path'))
assert not os.path.exists(path('path'))
@in_env
def test_remove_tree_file(path):
mkfile(path('path'))
- library.python.fs.remove_tree(path('path'))
+ library.python.fs.remove_tree(path('path'))
assert not os.path.exists(path('path'))
@in_env
def test_remove_tree_no(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.remove_tree(path('path'))
+ library.python.fs.remove_tree(path('path'))
assert errinfo.value.errno == errno.ENOENT
@in_env
def test_remove_tree_safe(path):
- library.python.fs.remove_tree_safe(path('path'))
+ library.python.fs.remove_tree_safe(path('path'))
@in_env
def test_ensure_removed(path):
- library.python.fs.ensure_removed(path('path'))
+ library.python.fs.ensure_removed(path('path'))
@in_env
def test_ensure_removed_exists(path):
os.makedirs(path('dir/subdir'))
- library.python.fs.ensure_removed(path('dir'))
+ library.python.fs.ensure_removed(path('dir'))
assert not os.path.exists(path('dir'))
@in_env
def test_ensure_removed_exists_precise(path):
os.makedirs(path('dir/subdir'))
- library.python.fs.ensure_removed(path('dir/subdir'))
+ library.python.fs.ensure_removed(path('dir/subdir'))
assert os.path.exists(path('dir'))
assert not os.path.exists(path('dir/subdir'))
@@ -503,7 +503,7 @@ def test_ensure_removed_exists_precise(path):
@in_env
def test_hardlink_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.hardlink(path('src'), path('dst'))
+ library.python.fs.hardlink(path('src'), path('dst'))
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -513,7 +513,7 @@ def test_hardlink_file(path):
@in_env
def test_hardlink_file_no_src(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink(path('src'), path('dst'))
+ library.python.fs.hardlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.ENOENT
@@ -522,7 +522,7 @@ def test_hardlink_file_exists(path):
mkfile(path('src'), 'SRC')
mkfile(path('dst'), 'DST')
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink(path('src'), path('dst'))
+ library.python.fs.hardlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
@@ -535,7 +535,7 @@ def test_hardlink_file_exists_dir(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink(path('src'), path('dst'))
+ library.python.fs.hardlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
@@ -547,7 +547,7 @@ def test_hardlink_dir(path):
os.mkdir(path('src'))
mkfile(path('src/src_file'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink(path('src'), path('dst'))
+ library.python.fs.hardlink(path('src'), path('dst'))
assert errinfo.value.errno in (errno.EPERM, errno.EACCES)
assert os.path.isdir(path('src'))
assert not os.path.isdir(path('dst'))
@@ -557,7 +557,7 @@ def test_hardlink_dir(path):
@in_env
def test_symlink_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert os.path.islink(path('dst'))
@@ -567,7 +567,7 @@ def test_symlink_file(path):
@pytest.mark.skipif(library.python.windows.on_win(), reason='Symlinks disabled on Windows')
@in_env
def test_symlink_file_no_src(path):
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert not os.path.isfile(path('src'))
assert not os.path.isfile(path('dst'))
assert os.path.islink(path('dst'))
@@ -579,7 +579,7 @@ def test_symlink_file_exists(path):
mkfile(path('src'), 'SRC')
mkfile(path('dst'), 'DST')
with pytest.raises(OSError) as errinfo:
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
@@ -593,7 +593,7 @@ def test_symlink_file_exists_dir(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
@@ -606,7 +606,7 @@ def test_symlink_file_exists_dir(path):
def test_symlink_dir(path):
os.mkdir(path('src'))
mkfile(path('src/src_file'))
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.islink(path('dst'))
@@ -616,7 +616,7 @@ def test_symlink_dir(path):
@pytest.mark.skipif(library.python.windows.on_win(), reason='Symlinks disabled on Windows')
@in_env
def test_symlink_dir_no_src(path):
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert not os.path.isdir(path('src'))
assert not os.path.isdir(path('dst'))
assert os.path.islink(path('dst'))
@@ -629,7 +629,7 @@ def test_symlink_dir_exists(path):
mkfile(path('src/src_file'))
os.mkdir(path('dst'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isdir(path('src'))
assert os.path.isdir(path('dst'))
@@ -644,7 +644,7 @@ def test_symlink_dir_exists_file(path):
mkfile(path('src/src_file'))
mkfile(path('dst'), 'DST')
with pytest.raises(OSError) as errinfo:
- library.python.fs.symlink(path('src'), path('dst'))
+ library.python.fs.symlink(path('src'), path('dst'))
assert errinfo.value.errno == errno.EEXIST
assert os.path.isdir(path('src'))
assert os.path.isfile(path('dst'))
@@ -654,28 +654,28 @@ def test_symlink_dir_exists_file(path):
@in_env
def test_hardlink_tree(path):
mktree_example(path, 'src')
- library.python.fs.hardlink_tree(path('src'), path('dst'))
+ library.python.fs.hardlink_tree(path('src'), path('dst'))
assert trees_equal(path('src'), path('dst'))
@in_env
def test_hardlink_tree_empty(path):
os.mkdir(path('src'))
- library.python.fs.hardlink_tree(path('src'), path('dst'))
+ library.python.fs.hardlink_tree(path('src'), path('dst'))
assert trees_equal(path('src'), path('dst'))
@in_env
def test_hardlink_tree_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.hardlink_tree(path('src'), path('dst'))
+ library.python.fs.hardlink_tree(path('src'), path('dst'))
assert trees_equal(path('src'), path('dst'))
@in_env
def test_hardlink_tree_no_src(path):
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink_tree(path('src'), path('dst'))
+ library.python.fs.hardlink_tree(path('src'), path('dst'))
assert errinfo.value.errno == errno.ENOENT
@@ -684,11 +684,11 @@ def test_hardlink_tree_exists(path):
mktree_example(path, 'src')
os.mkdir(path('dst_dir'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink_tree(path('src'), path('dst_dir'))
+ library.python.fs.hardlink_tree(path('src'), path('dst_dir'))
assert errinfo.value.errno == errno.EEXIST
mkfile(path('dst_file'), 'DST')
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink_tree(path('src'), path('dst_file'))
+ library.python.fs.hardlink_tree(path('src'), path('dst_file'))
assert errinfo.value.errno == errno.EEXIST
@@ -697,18 +697,18 @@ def test_hardlink_tree_file_exists(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst_dir'))
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink_tree(path('src'), path('dst_dir'))
+ library.python.fs.hardlink_tree(path('src'), path('dst_dir'))
assert errinfo.value.errno == errno.EEXIST
mkfile(path('dst_file'), 'DST')
with pytest.raises(OSError) as errinfo:
- library.python.fs.hardlink_tree(path('src'), path('dst_file'))
+ library.python.fs.hardlink_tree(path('src'), path('dst_file'))
assert errinfo.value.errno == errno.EEXIST
@in_env
def test_copy_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.copy_file(path('src'), path('dst'))
+ library.python.fs.copy_file(path('src'), path('dst'))
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -717,14 +717,14 @@ def test_copy_file(path):
@in_env
def test_copy_file_no_src(path):
with pytest.raises(EnvironmentError):
- library.python.fs.copy_file(path('src'), path('dst'))
+ library.python.fs.copy_file(path('src'), path('dst'))
@in_env
def test_copy_file_exists(path):
mkfile(path('src'), 'SRC')
mkfile(path('dst'), 'DST')
- library.python.fs.copy_file(path('src'), path('dst'))
+ library.python.fs.copy_file(path('src'), path('dst'))
assert os.path.isfile(path('src'))
assert os.path.isfile(path('dst'))
assert file_data(path('dst')) == 'SRC'
@@ -735,7 +735,7 @@ def test_copy_file_exists_dir_empty(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst'))
with pytest.raises(EnvironmentError):
- library.python.fs.copy_file(path('src'), path('dst'))
+ library.python.fs.copy_file(path('src'), path('dst'))
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
assert not os.path.isfile(path('dst/src'))
@@ -747,7 +747,7 @@ def test_copy_file_exists_dir_nonempty(path):
os.mkdir(path('dst'))
mkfile(path('dst/dst_file'))
with pytest.raises(EnvironmentError):
- library.python.fs.copy_file(path('src'), path('dst'))
+ library.python.fs.copy_file(path('src'), path('dst'))
assert os.path.isfile(path('src'))
assert os.path.isdir(path('dst'))
assert os.path.isfile(path('dst/dst_file'))
@@ -757,28 +757,28 @@ def test_copy_file_exists_dir_nonempty(path):
@in_env
def test_copy_tree(path):
mktree_example(path, 'src')
- library.python.fs.copy_tree(path('src'), path('dst'))
+ library.python.fs.copy_tree(path('src'), path('dst'))
assert trees_equal(path('src'), path('dst'))
@in_env
def test_copy_tree_empty(path):
os.mkdir(path('src'))
- library.python.fs.copy_tree(path('src'), path('dst'))
+ library.python.fs.copy_tree(path('src'), path('dst'))
assert trees_equal(path('src'), path('dst'))
@in_env
def test_copy_tree_file(path):
mkfile(path('src'), 'SRC')
- library.python.fs.copy_tree(path('src'), path('dst'))
+ library.python.fs.copy_tree(path('src'), path('dst'))
assert trees_equal(path('src'), path('dst'))
@in_env
def test_copy_tree_no_src(path):
with pytest.raises(EnvironmentError):
- library.python.fs.copy_tree(path('src'), path('dst'))
+ library.python.fs.copy_tree(path('src'), path('dst'))
@in_env
@@ -786,10 +786,10 @@ def test_copy_tree_exists(path):
mktree_example(path, 'src')
os.mkdir(path('dst_dir'))
with pytest.raises(EnvironmentError):
- library.python.fs.copy_tree(path('src'), path('dst_dir'))
+ library.python.fs.copy_tree(path('src'), path('dst_dir'))
mkfile(path('dst_file'), 'DST')
with pytest.raises(EnvironmentError):
- library.python.fs.copy_tree(path('src'), path('dst_file'))
+ library.python.fs.copy_tree(path('src'), path('dst_file'))
@in_env
@@ -797,9 +797,9 @@ def test_copy_tree_file_exists(path):
mkfile(path('src'), 'SRC')
os.mkdir(path('dst_dir'))
with pytest.raises(EnvironmentError):
- library.python.fs.copy_tree(path('src'), path('dst_dir'))
+ library.python.fs.copy_tree(path('src'), path('dst_dir'))
mkfile(path('dst_file'), 'DST')
- library.python.fs.copy_tree(path('src'), path('dst_file'))
+ library.python.fs.copy_tree(path('src'), path('dst_file'))
assert trees_equal(path('src'), path('dst_file'))
@@ -807,14 +807,14 @@ def test_copy_tree_file_exists(path):
def test_read_file(path):
mkfile(path('src'), 'SRC')
assert library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding()) == 'SRC'
- assert library.python.fs.read_file(path('src'), binary=False) == 'SRC'
+ assert library.python.fs.read_file(path('src'), binary=False) == 'SRC'
@in_env
def test_read_file_empty(path):
mkfile(path('src'))
assert library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding()) == ''
- assert library.python.fs.read_file(path('src'), binary=False) == ''
+ assert library.python.fs.read_file(path('src'), binary=False) == ''
@in_env
@@ -824,7 +824,7 @@ def test_read_file_multiline(path):
library.python.fs.read_file(path('src')).decode(library.python.strings.fs_encoding())
== 'SRC line 1\nSRC line 2\n'
)
- assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
+ assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
@in_env
@@ -835,9 +835,9 @@ def test_read_file_multiline_crlf(path):
== 'SRC line 1\r\nSRC line 2\r\n'
)
if library.python.windows.on_win() or six.PY3: # universal newlines are by default in text mode in python3
- assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
+ assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\nSRC line 2\n'
else:
- assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\r\nSRC line 2\r\n'
+ assert library.python.fs.read_file(path('src'), binary=False) == 'SRC line 1\r\nSRC line 2\r\n'
@in_env
@@ -845,20 +845,20 @@ def test_read_file_unicode(path):
s = u'АБВ'
mkfile(path('src'), s.encode('utf-8'))
mkfile(path('src_cp1251'), s.encode('cp1251'))
- assert library.python.fs.read_file_unicode(path('src')) == s
- assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == s
- assert library.python.fs.read_file_unicode(path('src'), binary=False) == s
- assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == s
+ assert library.python.fs.read_file_unicode(path('src')) == s
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == s
+ assert library.python.fs.read_file_unicode(path('src'), binary=False) == s
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == s
@in_env
def test_read_file_unicode_empty(path):
mkfile(path('src'))
mkfile(path('src_cp1251'))
- assert library.python.fs.read_file_unicode(path('src')) == ''
- assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == ''
- assert library.python.fs.read_file_unicode(path('src'), binary=False) == ''
- assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == ''
+ assert library.python.fs.read_file_unicode(path('src')) == ''
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == ''
+ assert library.python.fs.read_file_unicode(path('src'), binary=False) == ''
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == ''
@in_env
@@ -866,10 +866,10 @@ def test_read_file_unicode_multiline(path):
s = u'АБВ\nИ еще\n'
mkfile(path('src'), s.encode('utf-8'))
mkfile(path('src_cp1251'), s.encode('cp1251'))
- assert library.python.fs.read_file_unicode(path('src')) == s
- assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == s
- assert library.python.fs.read_file_unicode(path('src'), binary=False) == s
- assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == s
+ assert library.python.fs.read_file_unicode(path('src')) == s
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == s
+ assert library.python.fs.read_file_unicode(path('src'), binary=False) == s
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == s
@in_env
@@ -877,37 +877,37 @@ def test_read_file_unicode_multiline_crlf(path):
s = u'АБВ\r\nИ еще\r\n'
mkfile(path('src'), s.encode('utf-8'))
mkfile(path('src_cp1251'), s.encode('cp1251'))
- assert library.python.fs.read_file_unicode(path('src')) == s
- assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == s
+ assert library.python.fs.read_file_unicode(path('src')) == s
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), enc='cp1251') == s
if library.python.windows.on_win() or six.PY3: # universal newlines are by default in text mode in python3
- assert library.python.fs.read_file_unicode(path('src'), binary=False) == u'АБВ\nИ еще\n'
- assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == u'АБВ\nИ еще\n'
+ assert library.python.fs.read_file_unicode(path('src'), binary=False) == u'АБВ\nИ еще\n'
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == u'АБВ\nИ еще\n'
else:
- assert library.python.fs.read_file_unicode(path('src'), binary=False) == s
- assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == s
+ assert library.python.fs.read_file_unicode(path('src'), binary=False) == s
+ assert library.python.fs.read_file_unicode(path('src_cp1251'), binary=False, enc='cp1251') == s
@in_env
def test_write_file(path):
- library.python.fs.write_file(path('src'), 'SRC')
+ library.python.fs.write_file(path('src'), 'SRC')
assert file_data(path('src')) == 'SRC'
- library.python.fs.write_file(path('src2'), 'SRC', binary=False)
+ library.python.fs.write_file(path('src2'), 'SRC', binary=False)
assert file_data(path('src2')) == 'SRC'
@in_env
def test_write_file_empty(path):
- library.python.fs.write_file(path('src'), '')
+ library.python.fs.write_file(path('src'), '')
assert file_data(path('src')) == ''
- library.python.fs.write_file(path('src2'), '', binary=False)
+ library.python.fs.write_file(path('src2'), '', binary=False)
assert file_data(path('src2')) == ''
@in_env
def test_write_file_multiline(path):
- library.python.fs.write_file(path('src'), 'SRC line 1\nSRC line 2\n')
+ library.python.fs.write_file(path('src'), 'SRC line 1\nSRC line 2\n')
assert file_data(path('src')) == 'SRC line 1\nSRC line 2\n'
- library.python.fs.write_file(path('src2'), 'SRC line 1\nSRC line 2\n', binary=False)
+ library.python.fs.write_file(path('src2'), 'SRC line 1\nSRC line 2\n', binary=False)
if library.python.windows.on_win():
assert file_data(path('src2')) == 'SRC line 1\r\nSRC line 2\r\n'
else:
@@ -916,9 +916,9 @@ def test_write_file_multiline(path):
@in_env
def test_write_file_multiline_crlf(path):
- library.python.fs.write_file(path('src'), 'SRC line 1\r\nSRC line 2\r\n')
+ library.python.fs.write_file(path('src'), 'SRC line 1\r\nSRC line 2\r\n')
assert file_data(path('src')) == 'SRC line 1\r\nSRC line 2\r\n'
- library.python.fs.write_file(path('src2'), 'SRC line 1\r\nSRC line 2\r\n', binary=False)
+ library.python.fs.write_file(path('src2'), 'SRC line 1\r\nSRC line 2\r\n', binary=False)
if library.python.windows.on_win():
assert file_data(path('src2')) == 'SRC line 1\r\r\nSRC line 2\r\r\n'
else:
@@ -928,13 +928,13 @@ def test_write_file_multiline_crlf(path):
@in_env
def test_get_file_size(path):
mkfile(path('one.txt'), '22')
- assert library.python.fs.get_file_size(path('one.txt')) == 2
-
-
+ assert library.python.fs.get_file_size(path('one.txt')) == 2
+
+
@in_env
def test_get_file_size_empty(path):
mkfile(path('one.txt'))
- assert library.python.fs.get_file_size(path('one.txt')) == 0
+ assert library.python.fs.get_file_size(path('one.txt')) == 0
@in_env
@@ -942,9 +942,9 @@ def test_get_tree_size(path):
os.makedirs(path('deeper'))
mkfile(path('one.txt'), '1')
mkfile(path('deeper/two.txt'), '22')
- assert library.python.fs.get_tree_size(path('one.txt')) == 1
- assert library.python.fs.get_tree_size(path('')) == 1
- assert library.python.fs.get_tree_size(path(''), recursive=True) == 3
+ assert library.python.fs.get_tree_size(path('one.txt')) == 1
+ assert library.python.fs.get_tree_size(path('')) == 1
+ assert library.python.fs.get_tree_size(path(''), recursive=True) == 3
@pytest.mark.skipif(library.python.windows.on_win(), reason='Symlinks disabled on Windows')
@@ -956,61 +956,61 @@ def test_get_tree_size_dangling_symlink(path):
os.symlink(path('deeper/two.txt'), path("deeper/link.txt"))
os.remove(path('deeper/two.txt'))
# does not fail
- assert library.python.fs.get_tree_size(path(''), recursive=True) == 1
-
-
+ assert library.python.fs.get_tree_size(path(''), recursive=True) == 1
+
+
@pytest.mark.skipif(not library.python.windows.on_win(), reason='Test hardlinks on windows')
-def test_hardlink_or_copy():
- max_allowed_hard_links = 1023
-
- def run(hardlink_function, dir):
- src = r"test.txt"
- with open(src, "w") as f:
- f.write("test")
- for i in range(max_allowed_hard_links + 1):
- hardlink_function(src, os.path.join(dir, "{}.txt".format(i)))
-
- dir1 = library.python.fs.create_dirs("one")
- with pytest.raises(WindowsError) as e:
- run(library.python.fs.hardlink, dir1)
- assert e.value.winerror == 1142
- assert len(os.listdir(dir1)) == max_allowed_hard_links
-
- dir2 = library.python.fs.create_dirs("two")
- run(library.python.fs.hardlink_or_copy, dir2)
- assert len(os.listdir(dir2)) == max_allowed_hard_links + 1
-
-
-def test_remove_tree_unicode():
- path = u"test_remove_tree_unicode/русский".encode("utf-8")
- os.makedirs(path)
+def test_hardlink_or_copy():
+ max_allowed_hard_links = 1023
+
+ def run(hardlink_function, dir):
+ src = r"test.txt"
+ with open(src, "w") as f:
+ f.write("test")
+ for i in range(max_allowed_hard_links + 1):
+ hardlink_function(src, os.path.join(dir, "{}.txt".format(i)))
+
+ dir1 = library.python.fs.create_dirs("one")
+ with pytest.raises(WindowsError) as e:
+ run(library.python.fs.hardlink, dir1)
+ assert e.value.winerror == 1142
+ assert len(os.listdir(dir1)) == max_allowed_hard_links
+
+ dir2 = library.python.fs.create_dirs("two")
+ run(library.python.fs.hardlink_or_copy, dir2)
+ assert len(os.listdir(dir2)) == max_allowed_hard_links + 1
+
+
+def test_remove_tree_unicode():
+ path = u"test_remove_tree_unicode/русский".encode("utf-8")
+ os.makedirs(path)
library.python.fs.remove_tree(six.text_type("test_remove_tree_unicode"))
- assert not os.path.exists("test_remove_tree_unicode")
-
-
-def test_remove_tree_safe_unicode():
- path = u"test_remove_tree_safe_unicode/русский".encode("utf-8")
- os.makedirs(path)
+ assert not os.path.exists("test_remove_tree_unicode")
+
+
+def test_remove_tree_safe_unicode():
+ path = u"test_remove_tree_safe_unicode/русский".encode("utf-8")
+ os.makedirs(path)
library.python.fs.remove_tree_safe(six.text_type("test_remove_tree_safe_unicode"))
- assert not os.path.exists("test_remove_tree_safe_unicode")
-
-
-def test_copy_tree_custom_copy_function():
- library.python.fs.create_dirs("test_copy_tree_src/deepper/inner")
- library.python.fs.write_file("test_copy_tree_src/deepper/deepper.txt", "deepper.txt")
- library.python.fs.write_file("test_copy_tree_src/deepper/inner/inner.txt", "inner.txt")
- copied = []
-
- def copy_function(src, dst):
- shutil.copy2(src, dst)
- copied.append(dst)
-
+ assert not os.path.exists("test_remove_tree_safe_unicode")
+
+
+def test_copy_tree_custom_copy_function():
+ library.python.fs.create_dirs("test_copy_tree_src/deepper/inner")
+ library.python.fs.write_file("test_copy_tree_src/deepper/deepper.txt", "deepper.txt")
+ library.python.fs.write_file("test_copy_tree_src/deepper/inner/inner.txt", "inner.txt")
+ copied = []
+
+ def copy_function(src, dst):
+ shutil.copy2(src, dst)
+ copied.append(dst)
+
library.python.fs.copy_tree(
"test_copy_tree_src", yatest.common.work_path("test_copy_tree_dst"), copy_function=copy_function
)
- assert len(copied) == 2
- assert yatest.common.work_path("test_copy_tree_dst/deepper/deepper.txt") in copied
- assert yatest.common.work_path("test_copy_tree_dst/deepper/inner/inner.txt") in copied
+ assert len(copied) == 2
+ assert yatest.common.work_path("test_copy_tree_dst/deepper/deepper.txt") in copied
+ assert yatest.common.work_path("test_copy_tree_dst/deepper/inner/inner.txt") in copied
def test_copy2():
diff --git a/library/python/fs/test/ya.make b/library/python/fs/test/ya.make
index c58182bfba..33e3f5b4ff 100644
--- a/library/python/fs/test/ya.make
+++ b/library/python/fs/test/ya.make
@@ -1,14 +1,14 @@
-OWNER(g:yatool)
-
+OWNER(g:yatool)
+
PY23_TEST()
-
-TEST_SRCS(
- test_fs.py
-)
-
-PEERDIR(
- library/python/fs
- library/python/tmp
-)
-
-END()
+
+TEST_SRCS(
+ test_fs.py
+)
+
+PEERDIR(
+ library/python/fs
+ library/python/tmp
+)
+
+END()
diff --git a/library/python/fs/ya.make b/library/python/fs/ya.make
index 2597e4fb8e..b3c5092c71 100644
--- a/library/python/fs/ya.make
+++ b/library/python/fs/ya.make
@@ -1,23 +1,23 @@
-OWNER(g:yatool)
-
-PY23_LIBRARY()
-
+OWNER(g:yatool)
+
+PY23_LIBRARY()
+
PY_SRCS(
__init__.py
)
-
+
IF (OS_DARWIN)
PY_SRCS(
clonefile.pyx
)
ENDIF()
-PEERDIR(
- library/python/func
- library/python/strings
- library/python/windows
-)
-
-END()
-
-RECURSE_FOR_TESTS(test)
+PEERDIR(
+ library/python/func
+ library/python/strings
+ library/python/windows
+)
+
+END()
+
+RECURSE_FOR_TESTS(test)
diff --git a/library/python/pytest/allure/conftest.py b/library/python/pytest/allure/conftest.py
index 451e377cfc..0d5cfda1e5 100644
--- a/library/python/pytest/allure/conftest.py
+++ b/library/python/pytest/allure/conftest.py
@@ -1,8 +1,8 @@
-import os
-import pytest
-
-
-@pytest.mark.tryfirst
-def pytest_configure(config):
- if "ALLURE_REPORT_DIR" in os.environ:
- config.option.allurereportdir = os.environ["ALLURE_REPORT_DIR"]
+import os
+import pytest
+
+
+@pytest.mark.tryfirst
+def pytest_configure(config):
+ if "ALLURE_REPORT_DIR" in os.environ:
+ config.option.allurereportdir = os.environ["ALLURE_REPORT_DIR"]
diff --git a/library/python/pytest/empty/main.c b/library/python/pytest/empty/main.c
index d49fcc7031..9efa08162a 100644
--- a/library/python/pytest/empty/main.c
+++ b/library/python/pytest/empty/main.c
@@ -1,7 +1,7 @@
-/*
-to be used for build python tests in a stub binary for the case of using system python
-*/
-
-int main(void) {
- return 0;
-}
+/*
+to be used for build python tests in a stub binary for the case of using system python
+*/
+
+int main(void) {
+ return 0;
+}
diff --git a/library/python/pytest/empty/ya.make b/library/python/pytest/empty/ya.make
index 4394568f15..8f0fa37e2a 100644
--- a/library/python/pytest/empty/ya.make
+++ b/library/python/pytest/empty/ya.make
@@ -1,12 +1,12 @@
-LIBRARY()
-
+LIBRARY()
+
OWNER(
g:yatool
dmitko
)
-
+
SRCS(
main.c
)
-
-END()
+
+END()
diff --git a/library/python/pytest/main.py b/library/python/pytest/main.py
index df3513031f..6296bd6f0f 100644
--- a/library/python/pytest/main.py
+++ b/library/python/pytest/main.py
@@ -1,25 +1,25 @@
import os
-import sys
+import sys
import time
import __res
-
+
FORCE_EXIT_TESTSFAILED_ENV = 'FORCE_EXIT_TESTSFAILED'
-def main():
+def main():
import library.python.pytest.context as context
context.Ctx["YA_PYTEST_START_TIMESTAMP"] = time.time()
profile = None
if '--profile-pytest' in sys.argv:
sys.argv.remove('--profile-pytest')
-
+
import pstats
import cProfile
profile = cProfile.Profile()
profile.enable()
-
+
# Reset influencing env. vars
# For more info see library/python/testing/yatest_common/yatest/common/errors.py
if FORCE_EXIT_TESTSFAILED_ENV in os.environ:
@@ -46,12 +46,12 @@ def main():
m.setattr(_pytest.assertion.rewrite, "AssertionRewritingHook", rewrite.AssertionRewritingHook)
prefix = '__tests__.'
-
+
test_modules = [
name[len(prefix):] for name in sys.extra_modules
if name.startswith(prefix) and not name.endswith('.conftest')
]
-
+
doctest_packages = __res.find("PY_DOCTEST_PACKAGES") or ""
if isinstance(doctest_packages, bytes):
doctest_packages = doctest_packages.decode('utf-8')
@@ -70,20 +70,20 @@ def main():
def remove_user_site(paths):
site_paths = ('site-packages', 'site-python')
-
+
def is_site_path(path):
for p in site_paths:
if path.find(p) != -1:
return True
return False
-
+
new_paths = list(paths)
for p in paths:
if is_site_path(p):
new_paths.remove(p)
-
+
return new_paths
-
+
sys.path = remove_user_site(sys.path)
rc = pytest.main(plugins=[
collection.CollectionPlugin(test_modules, doctest_modules),
@@ -91,10 +91,10 @@ def main():
conftests,
])
- if rc == 5:
- # don't care about EXIT_NOTESTSCOLLECTED
- rc = 0
-
+ if rc == 5:
+ # don't care about EXIT_NOTESTSCOLLECTED
+ rc = 0
+
if rc == 1 and yatest_runner and not listing_mode and not os.environ.get(FORCE_EXIT_TESTSFAILED_ENV) == '1':
# XXX it's place for future improvements
# Test wrapper should terminate with 0 exit code if there are common test failures
@@ -110,7 +110,7 @@ def main():
ps.print_stats()
sys.exit(rc)
-
-
-if __name__ == '__main__':
- main()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/library/python/pytest/plugins/collection.py b/library/python/pytest/plugins/collection.py
index 1535da686c..e36f47a78f 100644
--- a/library/python/pytest/plugins/collection.py
+++ b/library/python/pytest/plugins/collection.py
@@ -1,26 +1,26 @@
import os
-import sys
+import sys
from six import reraise
-
-import py
+
+import py
import pytest # noqa
-import _pytest.python
-import _pytest.doctest
+import _pytest.python
+import _pytest.doctest
import json
import library.python.testing.filter.filter as test_filter
-
-
-class LoadedModule(_pytest.python.Module):
+
+
+class LoadedModule(_pytest.python.Module):
def __init__(self, parent, name, **kwargs):
self.name = name + '.py'
self.session = parent
self.parent = parent
self.config = parent.config
- self.keywords = {}
+ self.keywords = {}
self.own_markers = []
self.fspath = py.path.local()
-
+
@classmethod
def from_parent(cls, **kwargs):
namespace = kwargs.pop('namespace', True)
@@ -31,7 +31,7 @@ class LoadedModule(_pytest.python.Module):
return loaded_module
- @property
+ @property
def _nodeid(self):
if os.getenv('CONFTEST_LOAD_POLICY') == 'LOCAL':
return self._getobj().__file__
@@ -41,25 +41,25 @@ class LoadedModule(_pytest.python.Module):
@property
def nodeid(self):
return self._nodeid
-
- def _getobj(self):
+
+ def _getobj(self):
module_name = self.name[:-len('.py')]
if self.namespace:
module_name = '__tests__.' + module_name
- __import__(module_name)
- return sys.modules[module_name]
-
-
-class DoctestModule(LoadedModule):
-
- def collect(self):
- import doctest
+ __import__(module_name)
+ return sys.modules[module_name]
+
+
+class DoctestModule(LoadedModule):
+
+ def collect(self):
+ import doctest
module = self._getobj()
# uses internal doctest module parsing mechanism
- finder = doctest.DocTestFinder()
- optionflags = _pytest.doctest.get_optionflags(self)
- runner = doctest.DebugRunner(verbose=0, optionflags=optionflags)
+ finder = doctest.DocTestFinder()
+ optionflags = _pytest.doctest.get_optionflags(self)
+ runner = doctest.DebugRunner(verbose=0, optionflags=optionflags)
try:
for test in finder.find(module, self.name[:-len('.py')]):
@@ -75,8 +75,8 @@ class DoctestModule(LoadedModule):
etype, exc, tb = sys.exc_info()
msg = 'DoctestModule failed, probably you can add NO_DOCTESTS() macro to ya.make'
reraise(etype, type(exc)('{}\n{}'.format(exc, msg)), tb)
-
-
+
+
# NOTE: Since we are overriding collect method of pytest session, pytest hooks are not invoked during collection.
def pytest_ignore_collect(module, session, filenames_from_full_filters, accept_filename_predicate):
if session.config.option.mode == 'list':
@@ -93,14 +93,14 @@ def pytest_ignore_collect(module, session, filenames_from_full_filters, accept_f
return False
-class CollectionPlugin(object):
+class CollectionPlugin(object):
def __init__(self, test_modules, doctest_modules):
- self._test_modules = test_modules
+ self._test_modules = test_modules
self._doctest_modules = doctest_modules
-
- def pytest_sessionstart(self, session):
-
- def collect(*args, **kwargs):
+
+ def pytest_sessionstart(self, session):
+
+ def collect(*args, **kwargs):
accept_filename_predicate = test_filter.make_py_file_filter(session.config.option.test_filter)
full_test_names_file_path = session.config.option.test_list_path
filenames_filter = None
@@ -111,7 +111,7 @@ class CollectionPlugin(object):
full_names_filter = set(json.load(afile)[int(session.config.option.modulo_index)])
filenames_filter = set(map(lambda x: x.split('::')[0], full_names_filter))
- for test_module in self._test_modules:
+ for test_module in self._test_modules:
module = LoadedModule.from_parent(name=test_module, parent=session)
if not pytest_ignore_collect(module, session, filenames_filter, accept_filename_predicate):
yield module
@@ -120,9 +120,9 @@ class CollectionPlugin(object):
module = DoctestModule.from_parent(name=test_module, parent=session)
if not pytest_ignore_collect(module, session, filenames_filter, accept_filename_predicate):
yield module
-
+
if os.environ.get('YA_PYTEST_DISABLE_DOCTEST', 'no') == 'no':
for doctest_module in self._doctest_modules:
yield DoctestModule.from_parent(name=doctest_module, parent=session, namespace=False)
- session.collect = collect
+ session.collect = collect
diff --git a/library/python/pytest/plugins/conftests.py b/library/python/pytest/plugins/conftests.py
index dfae771ef8..522041f5a7 100644
--- a/library/python/pytest/plugins/conftests.py
+++ b/library/python/pytest/plugins/conftests.py
@@ -1,26 +1,26 @@
import os
-import importlib
+import importlib
import sys
-import inspect
+import inspect
from pytest import hookimpl
from .fixtures import metrics, links # noqa
-
-orig_getfile = inspect.getfile
-
-
-def getfile(object):
- res = orig_getfile(object)
- if inspect.ismodule(object):
- if not res and getattr(object, '__orig_file__'):
- res = object.__orig_file__
- return res
-
-inspect.getfile = getfile
+
+orig_getfile = inspect.getfile
+
+
+def getfile(object):
+ res = orig_getfile(object)
+ if inspect.ismodule(object):
+ if not res and getattr(object, '__orig_file__'):
+ res = object.__orig_file__
+ return res
+
+inspect.getfile = getfile
conftest_modules = []
-
-
+
+
@hookimpl(trylast=True)
def pytest_load_initial_conftests(early_config, parser, args):
conftests = filter(lambda name: name.endswith(".conftest"), sys.extra_modules)
@@ -45,6 +45,6 @@ def getconftestmodules(*args, **kwargs):
def pytest_sessionstart(session):
- # Override filesystem based relevant conftest discovery on the call path
- assert session.config.pluginmanager
- session.config.pluginmanager._getconftestmodules = getconftestmodules
+ # Override filesystem based relevant conftest discovery on the call path
+ assert session.config.pluginmanager
+ session.config.pluginmanager._getconftestmodules = getconftestmodules
diff --git a/library/python/pytest/plugins/fixtures.py b/library/python/pytest/plugins/fixtures.py
index 9f5fd6ccf1..6f7e0a27e4 100644
--- a/library/python/pytest/plugins/fixtures.py
+++ b/library/python/pytest/plugins/fixtures.py
@@ -1,26 +1,26 @@
-import os
-import pytest
+import os
+import pytest
import six
-
-
-MAX_ALLOWED_LINKS_COUNT = 10
-
-
-@pytest.fixture
-def metrics(request):
-
- class Metrics(object):
- @classmethod
- def set(cls, name, value):
- assert len(name) <= 128, "Length of the metric name must less than 128"
- assert type(value) in [int, float], "Metric value must be of type int or float"
- test_name = request.node.nodeid
- if test_name not in request.config.test_metrics:
- request.config.test_metrics[test_name] = {}
- request.config.test_metrics[test_name][name] = value
-
- @classmethod
- def set_benchmark(cls, benchmark_values):
+
+
+MAX_ALLOWED_LINKS_COUNT = 10
+
+
+@pytest.fixture
+def metrics(request):
+
+ class Metrics(object):
+ @classmethod
+ def set(cls, name, value):
+ assert len(name) <= 128, "Length of the metric name must less than 128"
+ assert type(value) in [int, float], "Metric value must be of type int or float"
+ test_name = request.node.nodeid
+ if test_name not in request.config.test_metrics:
+ request.config.test_metrics[test_name] = {}
+ request.config.test_metrics[test_name][name] = value
+
+ @classmethod
+ def set_benchmark(cls, benchmark_values):
# report of google has key 'benchmarks' which is a list of benchmark results
# yandex benchmark has key 'benchmark', which is a list of benchmark results
# use this to differentiate which kind of result it is
@@ -31,12 +31,12 @@ def metrics(request):
@classmethod
def set_ybenchmark(cls, benchmark_values):
- for benchmark in benchmark_values["benchmark"]:
- name = benchmark["name"]
+ for benchmark in benchmark_values["benchmark"]:
+ name = benchmark["name"]
for key, value in six.iteritems(benchmark):
- if key != "name":
- cls.set("{}_{}".format(name, key), value)
-
+ if key != "name":
+ cls.set("{}_{}".format(name, key), value)
+
@classmethod
def set_gbenchmark(cls, benchmark_values):
time_unit_multipliers = {"ns": 1, "us": 1000, "ms": 1000000}
@@ -50,36 +50,36 @@ def metrics(request):
cls.set("{}_{}".format(name, k), v * time_unit_mult)
elif k not in ignore_keys and isinstance(v, (float, int)):
cls.set("{}_{}".format(name, k), v)
- return Metrics
-
-
-@pytest.fixture
-def links(request):
-
- class Links(object):
- @classmethod
- def set(cls, name, path):
-
- if len(request.config.test_logs[request.node.nodeid]) >= MAX_ALLOWED_LINKS_COUNT:
- raise Exception("Cannot add more than {} links to test".format(MAX_ALLOWED_LINKS_COUNT))
-
- reserved_names = ["log", "logsdir", "stdout", "stderr"]
- if name in reserved_names:
- raise Exception("Attachment name should not belong to the reserved list: {}".format(", ".join(reserved_names)))
- output_dir = request.config.ya.output_dir
-
- if not os.path.exists(path):
- raise Exception("Path to be attached does not exist: {}".format(path))
-
- if os.path.isabs(path) and ".." in os.path.relpath(path, output_dir):
- raise Exception("Test attachment must be inside yatest.common.output_path()")
-
- request.config.test_logs[request.node.nodeid][name] = path
-
- @classmethod
- def get(cls, name):
- if name not in request.config.test_logs[request.node.nodeid]:
- raise KeyError("Attachment with name '{}' does not exist".format(name))
- return request.config.test_logs[request.node.nodeid][name]
-
- return Links
+ return Metrics
+
+
+@pytest.fixture
+def links(request):
+
+ class Links(object):
+ @classmethod
+ def set(cls, name, path):
+
+ if len(request.config.test_logs[request.node.nodeid]) >= MAX_ALLOWED_LINKS_COUNT:
+ raise Exception("Cannot add more than {} links to test".format(MAX_ALLOWED_LINKS_COUNT))
+
+ reserved_names = ["log", "logsdir", "stdout", "stderr"]
+ if name in reserved_names:
+ raise Exception("Attachment name should not belong to the reserved list: {}".format(", ".join(reserved_names)))
+ output_dir = request.config.ya.output_dir
+
+ if not os.path.exists(path):
+ raise Exception("Path to be attached does not exist: {}".format(path))
+
+ if os.path.isabs(path) and ".." in os.path.relpath(path, output_dir):
+ raise Exception("Test attachment must be inside yatest.common.output_path()")
+
+ request.config.test_logs[request.node.nodeid][name] = path
+
+ @classmethod
+ def get(cls, name):
+ if name not in request.config.test_logs[request.node.nodeid]:
+ raise KeyError("Attachment with name '{}' does not exist".format(name))
+ return request.config.test_logs[request.node.nodeid][name]
+
+ return Links
diff --git a/library/python/pytest/plugins/ya.make b/library/python/pytest/plugins/ya.make
index 07914cf4d3..c15d6f759d 100644
--- a/library/python/pytest/plugins/ya.make
+++ b/library/python/pytest/plugins/ya.make
@@ -1,20 +1,20 @@
OWNER(g:yatest)
-
+
PY23_LIBRARY()
-
-PY_SRCS(
- ya.py
- collection.py
- conftests.py
- fixtures.py
-)
-
-PEERDIR(
+
+PY_SRCS(
+ ya.py
+ collection.py
+ conftests.py
+ fixtures.py
+)
+
+PEERDIR(
library/python/filelock
- library/python/find_root
+ library/python/find_root
library/python/testing/filter
-)
-
+)
+
IF (PYTHON2)
PY_SRCS(
fakeid_py2.py
@@ -29,4 +29,4 @@ ELSE()
)
ENDIF()
-END()
+END()
diff --git a/library/python/pytest/plugins/ya.py b/library/python/pytest/plugins/ya.py
index c6b06478d9..1bde03042d 100644
--- a/library/python/pytest/plugins/ya.py
+++ b/library/python/pytest/plugins/ya.py
@@ -3,101 +3,101 @@
import base64
import errno
import re
-import sys
-import os
-import logging
-import fnmatch
-import json
-import time
+import sys
+import os
+import logging
+import fnmatch
+import json
+import time
import traceback
-import collections
+import collections
import signal
import inspect
import warnings
import attr
import faulthandler
-import py
-import pytest
+import py
+import pytest
import six
-import _pytest
+import _pytest
import _pytest._io
import _pytest.mark
import _pytest.outcomes
import _pytest.skipping
-
+
from _pytest.warning_types import PytestUnhandledCoroutineWarning
from yatest_lib import test_splitter
-try:
- import resource
-except ImportError:
- resource = None
-
-try:
- import library.python.pytest.yatest_tools as tools
-except ImportError:
- # fallback for pytest script mode
- import yatest_tools as tools
-
+try:
+ import resource
+except ImportError:
+ resource = None
+
+try:
+ import library.python.pytest.yatest_tools as tools
+except ImportError:
+ # fallback for pytest script mode
+ import yatest_tools as tools
+
try:
from library.python import filelock
except ImportError:
filelock = None
-import yatest_lib.tools
-
-import yatest_lib.external as canon
-
+import yatest_lib.tools
+
+import yatest_lib.external as canon
+
import yatest_lib.ya
from library.python.pytest import context
-console_logger = logging.getLogger("console")
-yatest_logger = logging.getLogger("ya.test")
-
-
-_pytest.main.EXIT_NOTESTSCOLLECTED = 0
+console_logger = logging.getLogger("console")
+yatest_logger = logging.getLogger("ya.test")
+
+
+_pytest.main.EXIT_NOTESTSCOLLECTED = 0
SHUTDOWN_REQUESTED = False
-
+
pytest_config = None
-
-def configure_pdb_on_demand():
- import signal
-
- if hasattr(signal, "SIGUSR1"):
- def on_signal(*args):
+
+def configure_pdb_on_demand():
+ import signal
+
+ if hasattr(signal, "SIGUSR1"):
+ def on_signal(*args):
import ipdb
ipdb.set_trace()
-
- signal.signal(signal.SIGUSR1, on_signal)
-
-
-class CustomImporter(object):
- def __init__(self, roots):
- self._roots = roots
-
- def find_module(self, fullname, package_path=None):
- for path in self._roots:
- full_path = self._get_module_path(path, fullname)
-
- if os.path.exists(full_path) and os.path.isdir(full_path) and not os.path.exists(os.path.join(full_path, "__init__.py")):
- open(os.path.join(full_path, "__init__.py"), "w").close()
-
- return None
-
- def _get_module_path(self, path, fullname):
- return os.path.join(path, *fullname.split('.'))
-
-
-class YaTestLoggingFileHandler(logging.FileHandler):
- pass
-
-
+
+ signal.signal(signal.SIGUSR1, on_signal)
+
+
+class CustomImporter(object):
+ def __init__(self, roots):
+ self._roots = roots
+
+ def find_module(self, fullname, package_path=None):
+ for path in self._roots:
+ full_path = self._get_module_path(path, fullname)
+
+ if os.path.exists(full_path) and os.path.isdir(full_path) and not os.path.exists(os.path.join(full_path, "__init__.py")):
+ open(os.path.join(full_path, "__init__.py"), "w").close()
+
+ return None
+
+ def _get_module_path(self, path, fullname):
+ return os.path.join(path, *fullname.split('.'))
+
+
+class YaTestLoggingFileHandler(logging.FileHandler):
+ pass
+
+
class _TokenFilterFormatter(logging.Formatter):
def __init__(self, fmt):
super(_TokenFilterFormatter, self).__init__(fmt)
@@ -123,141 +123,141 @@ class _TokenFilterFormatter(logging.Formatter):
return self._filter(super(_TokenFilterFormatter, self).format(record))
-def setup_logging(log_path, level=logging.DEBUG, *other_logs):
- logs = [log_path] + list(other_logs)
- root_logger = logging.getLogger()
- for i in range(len(root_logger.handlers) - 1, -1, -1):
- if isinstance(root_logger.handlers[i], YaTestLoggingFileHandler):
+def setup_logging(log_path, level=logging.DEBUG, *other_logs):
+ logs = [log_path] + list(other_logs)
+ root_logger = logging.getLogger()
+ for i in range(len(root_logger.handlers) - 1, -1, -1):
+ if isinstance(root_logger.handlers[i], YaTestLoggingFileHandler):
root_logger.handlers.pop(i).close()
- root_logger.setLevel(level)
- for log_file in logs:
- file_handler = YaTestLoggingFileHandler(log_file)
- log_format = '%(asctime)s - %(levelname)s - %(name)s - %(funcName)s: %(message)s'
+ root_logger.setLevel(level)
+ for log_file in logs:
+ file_handler = YaTestLoggingFileHandler(log_file)
+ log_format = '%(asctime)s - %(levelname)s - %(name)s - %(funcName)s: %(message)s'
file_handler.setFormatter(_TokenFilterFormatter(log_format))
- file_handler.setLevel(level)
- root_logger.addHandler(file_handler)
-
-
-def pytest_addoption(parser):
- parser.addoption("--build-root", action="store", dest="build_root", default="", help="path to the build root")
- parser.addoption("--dep-root", action="append", dest="dep_roots", default=[], help="path to the dep build roots")
- parser.addoption("--source-root", action="store", dest="source_root", default="", help="path to the source root")
- parser.addoption("--data-root", action="store", dest="data_root", default="", help="path to the arcadia_tests_data root")
- parser.addoption("--output-dir", action="store", dest="output_dir", default="", help="path to the test output dir")
- parser.addoption("--python-path", action="store", dest="python_path", default="", help="path the canonical python binary")
- parser.addoption("--valgrind-path", action="store", dest="valgrind_path", default="", help="path the canonical valgring binary")
- parser.addoption("--test-filter", action="append", dest="test_filter", default=None, help="test filter")
+ file_handler.setLevel(level)
+ root_logger.addHandler(file_handler)
+
+
+def pytest_addoption(parser):
+ parser.addoption("--build-root", action="store", dest="build_root", default="", help="path to the build root")
+ parser.addoption("--dep-root", action="append", dest="dep_roots", default=[], help="path to the dep build roots")
+ parser.addoption("--source-root", action="store", dest="source_root", default="", help="path to the source root")
+ parser.addoption("--data-root", action="store", dest="data_root", default="", help="path to the arcadia_tests_data root")
+ parser.addoption("--output-dir", action="store", dest="output_dir", default="", help="path to the test output dir")
+ parser.addoption("--python-path", action="store", dest="python_path", default="", help="path the canonical python binary")
+ parser.addoption("--valgrind-path", action="store", dest="valgrind_path", default="", help="path the canonical valgring binary")
+ parser.addoption("--test-filter", action="append", dest="test_filter", default=None, help="test filter")
parser.addoption("--test-file-filter", action="store", dest="test_file_filter", default=None, help="test file filter")
- parser.addoption("--test-param", action="append", dest="test_params", default=None, help="test parameters")
- parser.addoption("--test-log-level", action="store", dest="test_log_level", choices=["critical", "error", "warning", "info", "debug"], default="debug", help="test log level")
+ parser.addoption("--test-param", action="append", dest="test_params", default=None, help="test parameters")
+ parser.addoption("--test-log-level", action="store", dest="test_log_level", choices=["critical", "error", "warning", "info", "debug"], default="debug", help="test log level")
parser.addoption("--mode", action="store", choices=[yatest_lib.ya.RunMode.List, yatest_lib.ya.RunMode.Run], dest="mode", default=yatest_lib.ya.RunMode.Run, help="testing mode")
parser.addoption("--test-list-file", action="store", dest="test_list_file")
- parser.addoption("--modulo", default=1, type=int)
- parser.addoption("--modulo-index", default=0, type=int)
+ parser.addoption("--modulo", default=1, type=int)
+ parser.addoption("--modulo-index", default=0, type=int)
parser.addoption("--partition-mode", default='SEQUENTIAL', help="Split tests according to partitoin mode")
- parser.addoption("--split-by-tests", action='store_true', help="Split test execution by tests instead of suites", default=False)
- parser.addoption("--project-path", action="store", default="", help="path to CMakeList where test is declared")
- parser.addoption("--build-type", action="store", default="", help="build type")
+ parser.addoption("--split-by-tests", action='store_true', help="Split test execution by tests instead of suites", default=False)
+ parser.addoption("--project-path", action="store", default="", help="path to CMakeList where test is declared")
+ parser.addoption("--build-type", action="store", default="", help="build type")
parser.addoption("--flags", action="append", dest="flags", default=[], help="build flags (-D)")
parser.addoption("--sanitize", action="store", default="", help="sanitize mode")
- parser.addoption("--test-stderr", action="store_true", default=False, help="test stderr")
+ parser.addoption("--test-stderr", action="store_true", default=False, help="test stderr")
parser.addoption("--test-debug", action="store_true", default=False, help="test debug mode")
- parser.addoption("--root-dir", action="store", default=None)
- parser.addoption("--ya-trace", action="store", dest="ya_trace_path", default=None, help="path to ya trace report")
+ parser.addoption("--root-dir", action="store", default=None)
+ parser.addoption("--ya-trace", action="store", dest="ya_trace_path", default=None, help="path to ya trace report")
parser.addoption("--ya-version", action="store", dest="ya_version", default=0, type=int, help="allows to be compatible with ya and the new changes in ya-dev")
- parser.addoption(
- "--test-suffix", action="store", dest="test_suffix", default=None, help="add suffix to every test name"
- )
- parser.addoption("--gdb-path", action="store", dest="gdb_path", default="", help="path the canonical gdb binary")
- parser.addoption("--collect-cores", action="store_true", dest="collect_cores", default=False, help="allows core dump file recovering during test")
+ parser.addoption(
+ "--test-suffix", action="store", dest="test_suffix", default=None, help="add suffix to every test name"
+ )
+ parser.addoption("--gdb-path", action="store", dest="gdb_path", default="", help="path the canonical gdb binary")
+ parser.addoption("--collect-cores", action="store_true", dest="collect_cores", default=False, help="allows core dump file recovering during test")
parser.addoption("--sanitizer-extra-checks", action="store_true", dest="sanitizer_extra_checks", default=False, help="enables extra checks for tests built with sanitizers")
- parser.addoption("--report-deselected", action="store_true", dest="report_deselected", default=False, help="report deselected tests to the trace file")
- parser.addoption("--pdb-on-sigusr1", action="store_true", default=False, help="setup pdb.set_trace on SIGUSR1")
+ parser.addoption("--report-deselected", action="store_true", dest="report_deselected", default=False, help="report deselected tests to the trace file")
+ parser.addoption("--pdb-on-sigusr1", action="store_true", default=False, help="setup pdb.set_trace on SIGUSR1")
parser.addoption("--test-tool-bin", help="Path to test_tool")
parser.addoption("--test-list-path", dest="test_list_path", action="store", help="path to test list", default="")
-
-
+
+
def from_ya_test():
return "YA_TEST_RUNNER" in os.environ
-def pytest_configure(config):
+def pytest_configure(config):
global pytest_config
pytest_config = config
config.option.continue_on_collection_errors = True
-
+
config.addinivalue_line("markers", "ya:external")
config.from_ya_test = from_ya_test()
- config.test_logs = collections.defaultdict(dict)
- config.test_metrics = {}
+ config.test_logs = collections.defaultdict(dict)
+ config.test_metrics = {}
config.suite_metrics = {}
config.configure_timestamp = time.time()
- context = {
- "project_path": config.option.project_path,
- "test_stderr": config.option.test_stderr,
+ context = {
+ "project_path": config.option.project_path,
+ "test_stderr": config.option.test_stderr,
"test_debug": config.option.test_debug,
- "build_type": config.option.build_type,
- "test_traceback": config.option.tbstyle,
+ "build_type": config.option.build_type,
+ "test_traceback": config.option.tbstyle,
"flags": config.option.flags,
"sanitize": config.option.sanitize,
- }
+ }
if config.option.collectonly:
config.option.mode = yatest_lib.ya.RunMode.List
config.ya = yatest_lib.ya.Ya(
- config.option.mode,
- config.option.source_root,
- config.option.build_root,
- config.option.dep_roots,
- config.option.output_dir,
- config.option.test_params,
- context,
- config.option.python_path,
- config.option.valgrind_path,
- config.option.gdb_path,
- config.option.data_root,
- )
- config.option.test_log_level = {
- "critical": logging.CRITICAL,
- "error": logging.ERROR,
- "warning": logging.WARN,
- "info": logging.INFO,
- "debug": logging.DEBUG,
- }[config.option.test_log_level]
-
- if not config.option.collectonly:
- setup_logging(os.path.join(config.ya.output_dir, "run.log"), config.option.test_log_level)
- config.current_item_nodeid = None
- config.current_test_name = None
- config.test_cores_count = 0
- config.collect_cores = config.option.collect_cores
+ config.option.mode,
+ config.option.source_root,
+ config.option.build_root,
+ config.option.dep_roots,
+ config.option.output_dir,
+ config.option.test_params,
+ context,
+ config.option.python_path,
+ config.option.valgrind_path,
+ config.option.gdb_path,
+ config.option.data_root,
+ )
+ config.option.test_log_level = {
+ "critical": logging.CRITICAL,
+ "error": logging.ERROR,
+ "warning": logging.WARN,
+ "info": logging.INFO,
+ "debug": logging.DEBUG,
+ }[config.option.test_log_level]
+
+ if not config.option.collectonly:
+ setup_logging(os.path.join(config.ya.output_dir, "run.log"), config.option.test_log_level)
+ config.current_item_nodeid = None
+ config.current_test_name = None
+ config.test_cores_count = 0
+ config.collect_cores = config.option.collect_cores
config.sanitizer_extra_checks = config.option.sanitizer_extra_checks
try:
config.test_tool_bin = config.option.test_tool_bin
except AttributeError:
logging.info("test_tool_bin not specified")
-
- if config.sanitizer_extra_checks:
+
+ if config.sanitizer_extra_checks:
for envvar in ['LSAN_OPTIONS', 'ASAN_OPTIONS']:
if envvar in os.environ:
os.environ.pop(envvar)
if envvar + '_ORIGINAL' in os.environ:
os.environ[envvar] = os.environ[envvar + '_ORIGINAL']
- if config.option.root_dir:
+ if config.option.root_dir:
config.rootdir = py.path.local(config.option.root_dir)
config.invocation_params = attr.evolve(config.invocation_params, dir=config.rootdir)
-
+
extra_sys_path = []
# Arcadia paths from the test DEPENDS section of ya.make
extra_sys_path.append(os.path.join(config.option.source_root, config.option.project_path))
- # Build root is required for correct import of protobufs, because imports are related to the root
- # (like import devtools.dummy_arcadia.protos.lib.my_proto_pb2)
+ # Build root is required for correct import of protobufs, because imports are related to the root
+ # (like import devtools.dummy_arcadia.protos.lib.my_proto_pb2)
extra_sys_path.append(config.option.build_root)
-
+
for path in config.option.dep_roots:
if os.path.isabs(path):
extra_sys_path.append(path)
@@ -272,17 +272,17 @@ def pytest_configure(config):
os.environ["PYTHONPATH"] = os.pathsep.join(sys.path)
- if not config.option.collectonly:
- if config.option.ya_trace_path:
- config.ya_trace_reporter = TraceReportGenerator(config.option.ya_trace_path)
- else:
- config.ya_trace_reporter = DryTraceReportGenerator(config.option.ya_trace_path)
+ if not config.option.collectonly:
+ if config.option.ya_trace_path:
+ config.ya_trace_reporter = TraceReportGenerator(config.option.ya_trace_path)
+ else:
+ config.ya_trace_reporter = DryTraceReportGenerator(config.option.ya_trace_path)
config.ya_version = config.option.ya_version
-
- sys.meta_path.append(CustomImporter([config.option.build_root] + [os.path.join(config.option.build_root, dep) for dep in config.option.dep_roots]))
- if config.option.pdb_on_sigusr1:
- configure_pdb_on_demand()
-
+
+ sys.meta_path.append(CustomImporter([config.option.build_root] + [os.path.join(config.option.build_root, dep) for dep in config.option.dep_roots]))
+ if config.option.pdb_on_sigusr1:
+ configure_pdb_on_demand()
+
# Dump python backtrace in case of any errors
faulthandler.enable()
if hasattr(signal, "SIGQUIT"):
@@ -291,7 +291,7 @@ def pytest_configure(config):
if hasattr(signal, "SIGUSR2"):
signal.signal(signal.SIGUSR2, _graceful_shutdown)
-
+
session_should_exit = False
@@ -327,122 +327,122 @@ def _graceful_shutdown(*args):
_graceful_shutdown_on_log(not capman.is_globally_capturing())
-def _get_rusage():
- return resource and resource.getrusage(resource.RUSAGE_SELF)
-
-
-def _collect_test_rusage(item):
- if resource and hasattr(item, "rusage"):
- finish_rusage = _get_rusage()
+def _get_rusage():
+ return resource and resource.getrusage(resource.RUSAGE_SELF)
+
+
+def _collect_test_rusage(item):
+ if resource and hasattr(item, "rusage"):
+ finish_rusage = _get_rusage()
ya_inst = pytest_config.ya
-
- def add_metric(attr_name, metric_name=None, modifier=None):
- if not metric_name:
- metric_name = attr_name
- if not modifier:
- modifier = lambda x: x
- if hasattr(item.rusage, attr_name):
+
+ def add_metric(attr_name, metric_name=None, modifier=None):
+ if not metric_name:
+ metric_name = attr_name
+ if not modifier:
+ modifier = lambda x: x
+ if hasattr(item.rusage, attr_name):
ya_inst.set_metric_value(metric_name, modifier(getattr(finish_rusage, attr_name) - getattr(item.rusage, attr_name)))
-
- for args in [
- ("ru_maxrss", "ru_rss", lambda x: x*1024), # to be the same as in util/system/rusage.cpp
- ("ru_utime",),
- ("ru_stime",),
- ("ru_ixrss", None, lambda x: x*1024),
- ("ru_idrss", None, lambda x: x*1024),
- ("ru_isrss", None, lambda x: x*1024),
- ("ru_majflt", "ru_major_pagefaults"),
- ("ru_minflt", "ru_minor_pagefaults"),
- ("ru_nswap",),
- ("ru_inblock",),
- ("ru_oublock",),
- ("ru_msgsnd",),
- ("ru_msgrcv",),
- ("ru_nsignals",),
- ("ru_nvcsw",),
- ("ru_nivcsw",),
- ]:
- add_metric(*args)
-
-
-def _get_item_tags(item):
- tags = []
- for key, value in item.keywords.items():
+
+ for args in [
+ ("ru_maxrss", "ru_rss", lambda x: x*1024), # to be the same as in util/system/rusage.cpp
+ ("ru_utime",),
+ ("ru_stime",),
+ ("ru_ixrss", None, lambda x: x*1024),
+ ("ru_idrss", None, lambda x: x*1024),
+ ("ru_isrss", None, lambda x: x*1024),
+ ("ru_majflt", "ru_major_pagefaults"),
+ ("ru_minflt", "ru_minor_pagefaults"),
+ ("ru_nswap",),
+ ("ru_inblock",),
+ ("ru_oublock",),
+ ("ru_msgsnd",),
+ ("ru_msgrcv",),
+ ("ru_nsignals",),
+ ("ru_nvcsw",),
+ ("ru_nivcsw",),
+ ]:
+ add_metric(*args)
+
+
+def _get_item_tags(item):
+ tags = []
+ for key, value in item.keywords.items():
if key == 'pytestmark' and isinstance(value, list):
for mark in value:
tags.append(mark.name)
elif isinstance(value, _pytest.mark.MarkDecorator):
- tags.append(key)
- return tags
-
-
-def pytest_runtest_setup(item):
- item.rusage = _get_rusage()
+ tags.append(key)
+ return tags
+
+
+def pytest_runtest_setup(item):
+ item.rusage = _get_rusage()
pytest_config.test_cores_count = 0
pytest_config.current_item_nodeid = item.nodeid
- class_name, test_name = tools.split_node_id(item.nodeid)
+ class_name, test_name = tools.split_node_id(item.nodeid)
test_log_path = tools.get_test_log_file_path(pytest_config.ya.output_dir, class_name, test_name)
- setup_logging(
+ setup_logging(
os.path.join(pytest_config.ya.output_dir, "run.log"),
pytest_config.option.test_log_level,
- test_log_path
- )
+ test_log_path
+ )
pytest_config.test_logs[item.nodeid]['log'] = test_log_path
pytest_config.test_logs[item.nodeid]['logsdir'] = pytest_config.ya.output_dir
pytest_config.current_test_log_path = test_log_path
pytest_config.current_test_name = "{}::{}".format(class_name, test_name)
- separator = "#" * 100
- yatest_logger.info(separator)
- yatest_logger.info(test_name)
- yatest_logger.info(separator)
- yatest_logger.info("Test setup")
-
+ separator = "#" * 100
+ yatest_logger.info(separator)
+ yatest_logger.info(test_name)
+ yatest_logger.info(separator)
+ yatest_logger.info("Test setup")
+
test_item = CrashedTestItem(item.nodeid, pytest_config.option.test_suffix)
pytest_config.ya_trace_reporter.on_start_test_class(test_item)
pytest_config.ya_trace_reporter.on_start_test_case(test_item)
-
-
-def pytest_runtest_teardown(item, nextitem):
- yatest_logger.info("Test teardown")
-
-
-def pytest_runtest_call(item):
+
+
+def pytest_runtest_teardown(item, nextitem):
+ yatest_logger.info("Test teardown")
+
+
+def pytest_runtest_call(item):
class_name, test_name = tools.split_node_id(item.nodeid)
yatest_logger.info("Test call (class_name: %s, test_name: %s)", class_name, test_name)
-
-
-def pytest_deselected(items):
+
+
+def pytest_deselected(items):
config = pytest_config
- if config.option.report_deselected:
- for item in items:
- deselected_item = DeselectedTestItem(item.nodeid, config.option.test_suffix)
- config.ya_trace_reporter.on_start_test_class(deselected_item)
- config.ya_trace_reporter.on_finish_test_case(deselected_item)
- config.ya_trace_reporter.on_finish_test_class(deselected_item)
-
-
-@pytest.mark.trylast
-def pytest_collection_modifyitems(items, config):
-
- def filter_items(filters):
- filtered_items = []
- deselected_items = []
- for item in items:
+ if config.option.report_deselected:
+ for item in items:
+ deselected_item = DeselectedTestItem(item.nodeid, config.option.test_suffix)
+ config.ya_trace_reporter.on_start_test_class(deselected_item)
+ config.ya_trace_reporter.on_finish_test_case(deselected_item)
+ config.ya_trace_reporter.on_finish_test_class(deselected_item)
+
+
+@pytest.mark.trylast
+def pytest_collection_modifyitems(items, config):
+
+ def filter_items(filters):
+ filtered_items = []
+ deselected_items = []
+ for item in items:
canonical_node_id = str(CustomTestItem(item.nodeid, pytest_config.option.test_suffix))
- matched = False
- for flt in filters:
+ matched = False
+ for flt in filters:
if "::" not in flt and "*" not in flt:
- flt += "*" # add support for filtering by module name
- if canonical_node_id.endswith(flt) or fnmatch.fnmatch(tools.escape_for_fnmatch(canonical_node_id), tools.escape_for_fnmatch(flt)):
- matched = True
- if matched:
- filtered_items.append(item)
- else:
- deselected_items.append(item)
-
- config.hook.pytest_deselected(items=deselected_items)
- items[:] = filtered_items
-
+ flt += "*" # add support for filtering by module name
+ if canonical_node_id.endswith(flt) or fnmatch.fnmatch(tools.escape_for_fnmatch(canonical_node_id), tools.escape_for_fnmatch(flt)):
+ matched = True
+ if matched:
+ filtered_items.append(item)
+ else:
+ deselected_items.append(item)
+
+ config.hook.pytest_deselected(items=deselected_items)
+ items[:] = filtered_items
+
def filter_by_full_name(filters):
filter_set = {flt for flt in filters}
filtered_items = []
@@ -456,10 +456,10 @@ def pytest_collection_modifyitems(items, config):
config.hook.pytest_deselected(items=deselected_items)
items[:] = filtered_items
- # XXX - check to be removed when tests for peerdirs don't run
- for item in items:
- if not item.nodeid:
- item._nodeid = os.path.basename(item.location[0])
+ # XXX - check to be removed when tests for peerdirs don't run
+ for item in items:
+ if not item.nodeid:
+ item._nodeid = os.path.basename(item.location[0])
if os.path.exists(config.option.test_list_path):
with open(config.option.test_list_path, 'r') as afile:
chunks = json.load(afile)
@@ -490,39 +490,39 @@ def pytest_collection_modifyitems(items, config):
for item in chunk_items:
items.extend(item)
yatest_logger.info("Modulo %s tests are: %s", modulo_index, chunk_items)
-
+
if config.option.mode == yatest_lib.ya.RunMode.Run:
- for item in items:
- test_item = NotLaunchedTestItem(item.nodeid, config.option.test_suffix)
- config.ya_trace_reporter.on_start_test_class(test_item)
- config.ya_trace_reporter.on_finish_test_case(test_item)
- config.ya_trace_reporter.on_finish_test_class(test_item)
+ for item in items:
+ test_item = NotLaunchedTestItem(item.nodeid, config.option.test_suffix)
+ config.ya_trace_reporter.on_start_test_class(test_item)
+ config.ya_trace_reporter.on_finish_test_case(test_item)
+ config.ya_trace_reporter.on_finish_test_class(test_item)
elif config.option.mode == yatest_lib.ya.RunMode.List:
- tests = []
- for item in items:
+ tests = []
+ for item in items:
item = CustomTestItem(item.nodeid, pytest_config.option.test_suffix, item.keywords)
- record = {
- "class": item.class_name,
- "test": item.test_name,
- "tags": _get_item_tags(item),
- }
- tests.append(record)
+ record = {
+ "class": item.class_name,
+ "test": item.test_name,
+ "tags": _get_item_tags(item),
+ }
+ tests.append(record)
if config.option.test_list_file:
with open(config.option.test_list_file, 'w') as afile:
json.dump(tests, afile)
# TODO prettyboy remove after test_tool release - currently it's required for backward compatibility
- sys.stderr.write(json.dumps(tests))
-
-
-def pytest_collectreport(report):
- if not report.passed:
+ sys.stderr.write(json.dumps(tests))
+
+
+def pytest_collectreport(report):
+ if not report.passed:
if hasattr(pytest_config, 'ya_trace_reporter'):
test_item = TestItem(report, None, pytest_config.option.test_suffix)
pytest_config.ya_trace_reporter.on_error(test_item)
- else:
- sys.stderr.write(yatest_lib.tools.to_utf8(report.longrepr))
-
-
+ else:
+ sys.stderr.write(yatest_lib.tools.to_utf8(report.longrepr))
+
+
@pytest.mark.tryfirst
def pytest_pyfunc_call(pyfuncitem):
testfunction = pyfuncitem.obj
@@ -542,7 +542,7 @@ def pytest_pyfunc_call(pyfuncitem):
@pytest.hookimpl(hookwrapper=True)
-def pytest_runtest_makereport(item, call):
+def pytest_runtest_makereport(item, call):
def logreport(report, result, call):
test_item = TestItem(report, result, pytest_config.option.test_suffix)
if not pytest_config.suite_metrics and context.Ctx.get("YA_PYTEST_START_TIMESTAMP"):
@@ -554,24 +554,24 @@ def pytest_runtest_makereport(item, call):
if report.outcome == "failed":
yatest_logger.error(report.longrepr)
- if report.when == "call":
- _collect_test_rusage(item)
+ if report.when == "call":
+ _collect_test_rusage(item)
pytest_config.ya_trace_reporter.on_finish_test_case(test_item)
- elif report.when == "setup":
+ elif report.when == "setup":
pytest_config.ya_trace_reporter.on_start_test_class(test_item)
- if report.outcome != "passed":
+ if report.outcome != "passed":
pytest_config.ya_trace_reporter.on_start_test_case(test_item)
pytest_config.ya_trace_reporter.on_finish_test_case(test_item)
- else:
+ else:
pytest_config.ya_trace_reporter.on_start_test_case(test_item)
- elif report.when == "teardown":
- if report.outcome == "failed":
+ elif report.when == "teardown":
+ if report.outcome == "failed":
pytest_config.ya_trace_reporter.on_start_test_case(test_item)
pytest_config.ya_trace_reporter.on_finish_test_case(test_item)
else:
pytest_config.ya_trace_reporter.on_finish_test_case(test_item, duration_only=True)
pytest_config.ya_trace_reporter.on_finish_test_class(test_item)
-
+
outcome = yield
rep = outcome.get_result()
result = None
@@ -580,10 +580,10 @@ def pytest_runtest_makereport(item, call):
if not pytest_config.from_ya_test:
ti = TestItem(rep, result, pytest_config.option.test_suffix)
tr = pytest_config.pluginmanager.getplugin('terminalreporter')
- tr.write_line("{} - Validating canonical data is not supported when running standalone binary".format(ti), yellow=True, bold=True)
+ tr.write_line("{} - Validating canonical data is not supported when running standalone binary".format(ti), yellow=True, bold=True)
logreport(rep, result, call)
-
-
+
+
def pytest_make_parametrize_id(config, val, argname):
# Avoid <, > symbols in canondata file names
if inspect.isfunction(val) and val.__name__ == "<lambda>":
@@ -598,7 +598,7 @@ def get_formatted_error(report):
text += colorize(entry)
else:
text = colorize(report.longrepr)
- text = yatest_lib.tools.to_utf8(text)
+ text = yatest_lib.tools.to_utf8(text)
return text
@@ -616,9 +616,9 @@ def colorize(longrepr):
if hasattr(longrepr, 'reprtraceback') and hasattr(longrepr.reprtraceback, 'toterminal'):
longrepr.reprtraceback.toterminal(writer)
return io.getvalue().strip()
- return yatest_lib.tools.to_utf8(longrepr)
+ return yatest_lib.tools.to_utf8(longrepr)
- text = yatest_lib.tools.to_utf8(longrepr)
+ text = yatest_lib.tools.to_utf8(longrepr)
pos = text.find("E ")
if pos == -1:
return text
@@ -633,25 +633,25 @@ def colorize(longrepr):
return "{}[[bad]]{}".format(bt, error)
-class TestItem(object):
-
- def __init__(self, report, result, test_suffix):
- self._result = result
- self.nodeid = report.nodeid
- self._class_name, self._test_name = tools.split_node_id(self.nodeid, test_suffix)
- self._error = None
- self._status = None
- self._process_report(report)
- self._duration = hasattr(report, 'duration') and report.duration or 0
- self._keywords = getattr(report, "keywords", {})
-
- def _process_report(self, report):
- if report.longrepr:
- self.set_error(report)
- if hasattr(report, 'when') and report.when != "call":
- self.set_error(report.when + " failed:\n" + self._error)
- else:
- self.set_error("")
+class TestItem(object):
+
+ def __init__(self, report, result, test_suffix):
+ self._result = result
+ self.nodeid = report.nodeid
+ self._class_name, self._test_name = tools.split_node_id(self.nodeid, test_suffix)
+ self._error = None
+ self._status = None
+ self._process_report(report)
+ self._duration = hasattr(report, 'duration') and report.duration or 0
+ self._keywords = getattr(report, "keywords", {})
+
+ def _process_report(self, report):
+ if report.longrepr:
+ self.set_error(report)
+ if hasattr(report, 'when') and report.when != "call":
+ self.set_error(report.when + " failed:\n" + self._error)
+ else:
+ self.set_error("")
report_teststatus = _pytest.skipping.pytest_report_teststatus(report)
if report_teststatus is not None:
@@ -667,89 +667,89 @@ class TestItem(object):
self._status = 'skipped'
self.set_error(yatest_lib.tools.to_utf8(report.longrepr[-1]))
elif report.passed:
- self._status = 'good'
- self.set_error("")
+ self._status = 'good'
+ self.set_error("")
else:
self._status = 'fail'
-
- @property
- def status(self):
- return self._status
-
- def set_status(self, status):
- self._status = status
-
- @property
- def test_name(self):
- return tools.normalize_name(self._test_name)
-
- @property
- def class_name(self):
- return tools.normalize_name(self._class_name)
-
- @property
- def error(self):
- return self._error
-
+
+ @property
+ def status(self):
+ return self._status
+
+ def set_status(self, status):
+ self._status = status
+
+ @property
+ def test_name(self):
+ return tools.normalize_name(self._test_name)
+
+ @property
+ def class_name(self):
+ return tools.normalize_name(self._class_name)
+
+ @property
+ def error(self):
+ return self._error
+
def set_error(self, entry, marker='bad'):
if isinstance(entry, _pytest.reports.BaseReport):
- self._error = get_formatted_error(entry)
- else:
+ self._error = get_formatted_error(entry)
+ else:
self._error = "[[{}]]{}".format(yatest_lib.tools.to_str(marker), yatest_lib.tools.to_str(entry))
-
- @property
- def duration(self):
- return self._duration
-
- @property
- def result(self):
- if 'not_canonize' in self._keywords:
- return None
- return self._result
-
- @property
- def keywords(self):
- return self._keywords
-
- def __str__(self):
- return "{}::{}".format(self.class_name, self.test_name)
-
-
-class CustomTestItem(TestItem):
-
- def __init__(self, nodeid, test_suffix, keywords=None):
- self._result = None
- self.nodeid = nodeid
- self._class_name, self._test_name = tools.split_node_id(nodeid, test_suffix)
- self._duration = 0
- self._error = ""
- self._keywords = keywords if keywords is not None else {}
-
-
-class NotLaunchedTestItem(CustomTestItem):
-
- def __init__(self, nodeid, test_suffix):
- super(NotLaunchedTestItem, self).__init__(nodeid, test_suffix)
- self._status = "not_launched"
-
-
-class CrashedTestItem(CustomTestItem):
-
- def __init__(self, nodeid, test_suffix):
- super(CrashedTestItem, self).__init__(nodeid, test_suffix)
- self._status = "crashed"
-
-
-class DeselectedTestItem(CustomTestItem):
-
- def __init__(self, nodeid, test_suffix):
- super(DeselectedTestItem, self).__init__(nodeid, test_suffix)
- self._status = "deselected"
-
-
-class TraceReportGenerator(object):
-
- def __init__(self, out_file_path):
+
+ @property
+ def duration(self):
+ return self._duration
+
+ @property
+ def result(self):
+ if 'not_canonize' in self._keywords:
+ return None
+ return self._result
+
+ @property
+ def keywords(self):
+ return self._keywords
+
+ def __str__(self):
+ return "{}::{}".format(self.class_name, self.test_name)
+
+
+class CustomTestItem(TestItem):
+
+ def __init__(self, nodeid, test_suffix, keywords=None):
+ self._result = None
+ self.nodeid = nodeid
+ self._class_name, self._test_name = tools.split_node_id(nodeid, test_suffix)
+ self._duration = 0
+ self._error = ""
+ self._keywords = keywords if keywords is not None else {}
+
+
+class NotLaunchedTestItem(CustomTestItem):
+
+ def __init__(self, nodeid, test_suffix):
+ super(NotLaunchedTestItem, self).__init__(nodeid, test_suffix)
+ self._status = "not_launched"
+
+
+class CrashedTestItem(CustomTestItem):
+
+ def __init__(self, nodeid, test_suffix):
+ super(CrashedTestItem, self).__init__(nodeid, test_suffix)
+ self._status = "crashed"
+
+
+class DeselectedTestItem(CustomTestItem):
+
+ def __init__(self, nodeid, test_suffix):
+ super(DeselectedTestItem, self).__init__(nodeid, test_suffix)
+ self._status = "deselected"
+
+
+class TraceReportGenerator(object):
+
+ def __init__(self, out_file_path):
self._filename = out_file_path
self._file = open(out_file_path, 'w')
self._wreckage_filename = out_file_path + '.wreckage'
@@ -759,7 +759,7 @@ class TraceReportGenerator(object):
self._current_test = (None, None)
self._pid = os.getpid()
self._check_intricate_respawn()
-
+
def _check_intricate_respawn(self):
pid_file = self._filename + '.pid'
try:
@@ -803,40 +803,40 @@ class TraceReportGenerator(object):
# Test binary is launched without `ya make -t`'s testing machinery - don't rely on clean environment
pass
- def on_start_test_class(self, test_item):
+ def on_start_test_class(self, test_item):
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
class_name = test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name
self._current_test = (class_name, None)
self.trace('test-started', {'class': class_name})
-
- def on_finish_test_class(self, test_item):
+
+ def on_finish_test_class(self, test_item):
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
self.trace('test-finished', {'class': test_item.class_name.decode('utf-8') if sys.version_info[0] < 3 else test_item.class_name})
-
- def on_start_test_case(self, test_item):
+
+ def on_start_test_case(self, test_item):
class_name = yatest_lib.tools.to_utf8(test_item.class_name)
subtest_name = yatest_lib.tools.to_utf8(test_item.test_name)
- message = {
+ message = {
'class': class_name,
'subtest': subtest_name,
- }
+ }
if test_item.nodeid in pytest_config.test_logs:
message['logs'] = pytest_config.test_logs[test_item.nodeid]
pytest_config.ya.set_test_item_node_id(test_item.nodeid)
self._current_test = (class_name, subtest_name)
- self.trace('subtest-started', message)
-
+ self.trace('subtest-started', message)
+
def on_finish_test_case(self, test_item, duration_only=False):
if test_item.result is not None:
- try:
+ try:
result = canon.serialize(test_item.result)
- except Exception as e:
- yatest_logger.exception("Error while serializing test results")
- test_item.set_error("Invalid test result: {}".format(e))
- test_item.set_status("fail")
- result = None
- else:
- result = None
+ except Exception as e:
+ yatest_logger.exception("Error while serializing test results")
+ test_item.set_error("Invalid test result: {}".format(e))
+ test_item.set_status("fail")
+ result = None
+ else:
+ result = None
if duration_only and test_item.nodeid in self._test_messages: # add teardown time
message = self._test_messages[test_item.nodeid]
@@ -860,7 +860,7 @@ class TraceReportGenerator(object):
self.trace('subtest-finished', message)
self._test_messages[test_item.nodeid] = message
-
+
def dump_suite_metrics(self):
message = {"metrics": pytest_config.suite_metrics}
self.trace("suite-event", message)
@@ -874,28 +874,28 @@ class TraceReportGenerator(object):
else:
self._test_duration[test_item.nodeid] = test_item._duration
- @staticmethod
- def _get_comment(test_item):
- msg = yatest_lib.tools.to_utf8(test_item.error)
- if not msg:
- return ""
+ @staticmethod
+ def _get_comment(test_item):
+ msg = yatest_lib.tools.to_utf8(test_item.error)
+ if not msg:
+ return ""
return msg + "[[rst]]"
-
+
def _dump_trace(self, name, value):
- event = {
- 'timestamp': time.time(),
- 'value': value,
- 'name': name
- }
+ event = {
+ 'timestamp': time.time(),
+ 'value': value,
+ 'name': name
+ }
data = yatest_lib.tools.to_str(json.dumps(event, ensure_ascii=False))
self._file.write(data + '\n')
self._file.flush()
-
+
def _check_sloppy_fork(self, name, value):
if self._pid == os.getpid():
return
-
+
yatest_logger.error("Skip tracing to avoid data corruption, name = %s, value = %s", name, value)
try:
@@ -950,14 +950,14 @@ class TraceReportGenerator(object):
self._dump_trace(name, value)
-class DryTraceReportGenerator(TraceReportGenerator):
- """
- Generator does not write any information.
- """
-
- def __init__(self, *args, **kwargs):
+class DryTraceReportGenerator(TraceReportGenerator):
+ """
+ Generator does not write any information.
+ """
+
+ def __init__(self, *args, **kwargs):
self._test_messages = {}
self._test_duration = {}
-
- def trace(self, name, value):
- pass
+
+ def trace(self, name, value):
+ pass
diff --git a/library/python/pytest/pytest.yatest.ini b/library/python/pytest/pytest.yatest.ini
index 554a1eb84f..70d6c98516 100644
--- a/library/python/pytest/pytest.yatest.ini
+++ b/library/python/pytest/pytest.yatest.ini
@@ -1,7 +1,7 @@
-[pytest]
+[pytest]
pep8maxlinelength = 200
norecursedirs = *
-pep8ignore = E127 E123 E226 E24
+pep8ignore = E127 E123 E226 E24
filterwarnings =
ignore::pytest.RemovedInPytest4Warning
addopts = -p no:warnings
diff --git a/library/python/pytest/ya.make b/library/python/pytest/ya.make
index ee3c47dccb..060c92c313 100644
--- a/library/python/pytest/ya.make
+++ b/library/python/pytest/ya.make
@@ -1,32 +1,32 @@
PY23_LIBRARY()
-
+
OWNER(
g:yatool
dmitko
)
-
-PY_SRCS(
+
+PY_SRCS(
__init__.py
- main.py
+ main.py
rewrite.py
- yatest_tools.py
+ yatest_tools.py
context.py
-)
-
-PEERDIR(
+)
+
+PEERDIR(
contrib/python/dateutil
contrib/python/ipdb
contrib/python/py
contrib/python/pytest
contrib/python/requests
- library/python/pytest/plugins
- library/python/testing/yatest_common
- library/python/testing/yatest_lib
-)
-
+ library/python/pytest/plugins
+ library/python/testing/yatest_common
+ library/python/testing/yatest_lib
+)
+
RESOURCE_FILES(
PREFIX library/python/pytest/
pytest.yatest.ini
)
-END()
+END()
diff --git a/library/python/pytest/yatest_tools.py b/library/python/pytest/yatest_tools.py
index c618f8ff07..6b8b896394 100644
--- a/library/python/pytest/yatest_tools.py
+++ b/library/python/pytest/yatest_tools.py
@@ -3,13 +3,13 @@
import collections
import functools
import math
-import os
-import re
+import os
+import re
import sys
-
-import yatest_lib.tools
-
-
+
+import yatest_lib.tools
+
+
class Subtest(object):
def __init__(self, name, test_name, status, comment, elapsed, result=None, test_type=None, logs=None, cwd=None, metrics=None):
self._name = name
@@ -17,103 +17,103 @@ class Subtest(object):
self.status = status
self.elapsed = elapsed
self.comment = comment
- self.result = result
- self.test_type = test_type
+ self.result = result
+ self.test_type = test_type
self.logs = logs or {}
- self.cwd = cwd
- self.metrics = metrics
+ self.cwd = cwd
+ self.metrics = metrics
- def __eq__(self, other):
- if not isinstance(other, Subtest):
- return False
- return self.name == other.name and self.test_name == other.test_name
+ def __eq__(self, other):
+ if not isinstance(other, Subtest):
+ return False
+ return self.name == other.name and self.test_name == other.test_name
+
+ def __str__(self):
+ return yatest_lib.tools.to_utf8(unicode(self))
- def __str__(self):
- return yatest_lib.tools.to_utf8(unicode(self))
-
def __unicode__(self):
return u"{}::{}".format(self.test_name, self.test_name)
@property
def name(self):
- return yatest_lib.tools.to_utf8(self._name)
+ return yatest_lib.tools.to_utf8(self._name)
@property
def test_name(self):
- return yatest_lib.tools.to_utf8(self._test_name)
-
- def __repr__(self):
- return "Subtest [{}::{} - {}[{}]: {}]".format(self.name, self.test_name, self.status, self.elapsed, self.comment)
-
- def __hash__(self):
- return hash(str(self))
-
-
-class SubtestInfo(object):
-
+ return yatest_lib.tools.to_utf8(self._test_name)
+
+ def __repr__(self):
+ return "Subtest [{}::{} - {}[{}]: {}]".format(self.name, self.test_name, self.status, self.elapsed, self.comment)
+
+ def __hash__(self):
+ return hash(str(self))
+
+
+class SubtestInfo(object):
+
skipped_prefix = '[SKIPPED] '
- @classmethod
- def from_str(cls, s):
+ @classmethod
+ def from_str(cls, s):
if s.startswith(SubtestInfo.skipped_prefix):
s = s[len(SubtestInfo.skipped_prefix):]
skipped = True
-
+
else:
skipped = False
return SubtestInfo(*s.rsplit(TEST_SUBTEST_SEPARATOR, 1), skipped=skipped)
def __init__(self, test, subtest="", skipped=False, **kwargs):
- self.test = test
- self.subtest = subtest
+ self.test = test
+ self.subtest = subtest
self.skipped = skipped
- for key, value in kwargs.iteritems():
- setattr(self, key, value)
-
- def __str__(self):
+ for key, value in kwargs.iteritems():
+ setattr(self, key, value)
+
+ def __str__(self):
s = ''
-
+
if self.skipped:
s += SubtestInfo.skipped_prefix
return s + TEST_SUBTEST_SEPARATOR.join([self.test, self.subtest])
- def __repr__(self):
- return str(self)
-
-
+ def __repr__(self):
+ return str(self)
+
+
class Status(object):
GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(7)
- SKIPPED = -100
- NOT_LAUNCHED = -200
- CANON_DIFF = -300
- FLAKY = -1
- BY_NAME = {'good': GOOD, 'fail': FAIL, 'xfail': XFAIL, 'xpass': XPASS, 'missing': MISSING, 'crashed': CRASHED,
- 'skipped': SKIPPED, 'flaky': FLAKY, 'not_launched': NOT_LAUNCHED, 'timeout': TIMEOUT, 'diff': CANON_DIFF}
- TO_STR = {GOOD: 'good', FAIL: 'fail', XFAIL: 'xfail', XPASS: 'xpass', MISSING: 'missing', CRASHED: 'crashed',
- SKIPPED: 'skipped', FLAKY: 'flaky', NOT_LAUNCHED: 'not_launched', TIMEOUT: 'timeout', CANON_DIFF: 'diff'}
+ SKIPPED = -100
+ NOT_LAUNCHED = -200
+ CANON_DIFF = -300
+ FLAKY = -1
+ BY_NAME = {'good': GOOD, 'fail': FAIL, 'xfail': XFAIL, 'xpass': XPASS, 'missing': MISSING, 'crashed': CRASHED,
+ 'skipped': SKIPPED, 'flaky': FLAKY, 'not_launched': NOT_LAUNCHED, 'timeout': TIMEOUT, 'diff': CANON_DIFF}
+ TO_STR = {GOOD: 'good', FAIL: 'fail', XFAIL: 'xfail', XPASS: 'xpass', MISSING: 'missing', CRASHED: 'crashed',
+ SKIPPED: 'skipped', FLAKY: 'flaky', NOT_LAUNCHED: 'not_launched', TIMEOUT: 'timeout', CANON_DIFF: 'diff'}
class Test(object):
- def __init__(self, name, path, status=None, comment=None, subtests=None):
+ def __init__(self, name, path, status=None, comment=None, subtests=None):
self.name = name
self.path = path
- self.status = status
- self.comment = comment
- self.subtests = subtests or []
-
- def __eq__(self, other):
- if not isinstance(other, Test):
- return False
- return self.name == other.name and self.path == other.path
-
- def __str__(self):
- return "Test [{} {}] - {} - {}".format(self.name, self.path, self.status, self.comment)
-
- def __repr__(self):
- return str(self)
-
+ self.status = status
+ self.comment = comment
+ self.subtests = subtests or []
+
+ def __eq__(self, other):
+ if not isinstance(other, Test):
+ return False
+ return self.name == other.name and self.path == other.path
+
+ def __str__(self):
+ return "Test [{} {}] - {} - {}".format(self.name, self.path, self.status, self.comment)
+
+ def __repr__(self):
+ return str(self)
+
def add_subtest(self, subtest):
self.subtests.append(subtest)
@@ -148,10 +148,10 @@ class YaCtx(object):
pass
ya_ctx = YaCtx()
-
-TRACE_FILE_NAME = "ytest.report.trace"
-
-
+
+TRACE_FILE_NAME = "ytest.report.trace"
+
+
def lazy(func):
mem = {}
@@ -174,7 +174,7 @@ def _get_mtab():
def get_max_filename_length(dirname):
- """
+ """
Return maximum filename length for the filesystem
:return:
"""
@@ -194,10 +194,10 @@ def get_unique_file_path(dir_path, filename, cache=collections.defaultdict(set))
"""
Get unique filename in dir with proper filename length, using given filename/dir.
File/dir won't be created (thread nonsafe)
- :param dir_path: path to dir
+ :param dir_path: path to dir
:param filename: original filename
:return: unique filename
- """
+ """
max_suffix = 10000
# + 1 symbol for dot before suffix
tail_length = int(round(math.log(max_suffix, 10))) + 1
@@ -222,83 +222,83 @@ def get_unique_file_path(dir_path, filename, cache=collections.defaultdict(set))
assert counter < max_suffix
candidate = os.path.join(dir_path, filename + ".{}".format(counter))
return candidate
-
-
-def escape_for_fnmatch(s):
- return s.replace("[", "&#91;").replace("]", "&#93;")
-
-
-def get_python_cmd(opts=None, use_huge=True, suite=None):
+
+
+def escape_for_fnmatch(s):
+ return s.replace("[", "&#91;").replace("]", "&#93;")
+
+
+def get_python_cmd(opts=None, use_huge=True, suite=None):
if opts and getattr(opts, 'flags', {}).get("USE_ARCADIA_PYTHON") == "no":
- return ["python"]
- if suite and not suite._use_arcadia_python:
- return ["python"]
+ return ["python"]
+ if suite and not suite._use_arcadia_python:
+ return ["python"]
if use_huge:
return ["$(PYTHON)/python"]
ymake_path = opts.ymake_bin if opts and getattr(opts, 'ymake_bin', None) else "$(YMAKE)/ymake"
return [ymake_path, "--python"]
-
-
-def normalize_name(name):
- replacements = [
- ("\\", "\\\\"),
- ("\n", "\\n"),
- ("\t", "\\t"),
- ("\r", "\\r"),
- ]
- for l, r in replacements:
- name = name.replace(l, r)
- return name
-
-
+
+
+def normalize_name(name):
+ replacements = [
+ ("\\", "\\\\"),
+ ("\n", "\\n"),
+ ("\t", "\\t"),
+ ("\r", "\\r"),
+ ]
+ for l, r in replacements:
+ name = name.replace(l, r)
+ return name
+
+
def normalize_filename(filename):
- """
- Replace invalid for file names characters with string equivalents
- :param some_string: string to be converted to a valid file name
- :return: valid file name
- """
+ """
+ Replace invalid for file names characters with string equivalents
+ :param some_string: string to be converted to a valid file name
+ :return: valid file name
+ """
not_allowed_pattern = r"[\[\]\/:*?\"\'<>|+\0\\\s\x0b\x0c]"
filename = re.sub(not_allowed_pattern, ".", filename)
return re.sub(r"\.{2,}", ".", filename)
-
-
+
+
def get_test_log_file_path(output_dir, class_name, test_name, extension="log"):
- """
- get test log file path, platform dependant
- :param output_dir: dir where log file should be placed
- :param class_name: test class name
- :param test_name: test name
- :return: test log file name
- """
- if os.name == "nt":
+ """
+ get test log file path, platform dependant
+ :param output_dir: dir where log file should be placed
+ :param class_name: test class name
+ :param test_name: test name
+ :return: test log file name
+ """
+ if os.name == "nt":
# don't add class name to the log's filename
# to reduce it's length on windows
filename = test_name
- else:
+ else:
filename = "{}.{}".format(class_name, test_name)
if not filename:
filename = "test"
filename += "." + extension
filename = normalize_filename(filename)
return get_unique_file_path(output_dir, filename)
-
-
-def split_node_id(nodeid, test_suffix=None):
+
+
+def split_node_id(nodeid, test_suffix=None):
path, possible_open_bracket, params = nodeid.partition('[')
- separator = "::"
+ separator = "::"
if separator in path:
path, test_name = path.split(separator, 1)
- else:
- test_name = os.path.basename(path)
- if test_suffix:
- test_name += "::" + test_suffix
- class_name = os.path.basename(path.strip())
- if separator in test_name:
- klass_name, test_name = test_name.split(separator, 1)
- if not test_suffix:
- # test suffix is used for flakes and pep8, no need to add class_name as it's === class_name
- class_name += separator + klass_name
- if separator in test_name:
- test_name = test_name.split(separator)[-1]
+ else:
+ test_name = os.path.basename(path)
+ if test_suffix:
+ test_name += "::" + test_suffix
+ class_name = os.path.basename(path.strip())
+ if separator in test_name:
+ klass_name, test_name = test_name.split(separator, 1)
+ if not test_suffix:
+ # test suffix is used for flakes and pep8, no need to add class_name as it's === class_name
+ class_name += separator + klass_name
+ if separator in test_name:
+ test_name = test_name.split(separator)[-1]
test_name += possible_open_bracket + params
- return yatest_lib.tools.to_utf8(class_name), yatest_lib.tools.to_utf8(test_name)
+ return yatest_lib.tools.to_utf8(class_name), yatest_lib.tools.to_utf8(test_name)
diff --git a/library/python/strings/strings.py b/library/python/strings/strings.py
index 83856db4c4..5bfddfe78a 100644
--- a/library/python/strings/strings.py
+++ b/library/python/strings/strings.py
@@ -2,24 +2,24 @@ import locale
import logging
import six
import sys
-import codecs
+import codecs
import library.python.func
-logger = logging.getLogger(__name__)
+logger = logging.getLogger(__name__)
+
-
DEFAULT_ENCODING = 'utf-8'
ENCODING_ERRORS_POLICY = 'replace'
-def left_strip(el, prefix):
- """
- Strips prefix at the left of el
- """
- if el.startswith(prefix):
- return el[len(prefix):]
- return el
+def left_strip(el, prefix):
+ """
+ Strips prefix at the left of el
+ """
+ if el.startswith(prefix):
+ return el[len(prefix):]
+ return el
# Explicit to-text conversion
@@ -49,8 +49,8 @@ def to_unicode(value, from_enc=DEFAULT_ENCODING):
else:
return value.decode(from_enc, errors=ENCODING_ERRORS_POLICY)
return six.text_type(value)
-
-
+
+
# Optional from_enc enables transcoding
def to_str(value, to_enc=DEFAULT_ENCODING, from_enc=None):
if isinstance(value, six.binary_type):
@@ -68,7 +68,7 @@ def _convert_deep(x, enc, convert, relaxed=True):
return None
if isinstance(x, (six.text_type, six.binary_type)):
return convert(x, enc)
- if isinstance(x, dict):
+ if isinstance(x, dict):
return {convert(k, enc): _convert_deep(v, enc, convert, relaxed) for k, v in six.iteritems(x)}
if isinstance(x, list):
return [_convert_deep(e, enc, convert, relaxed) for e in x]
@@ -77,9 +77,9 @@ def _convert_deep(x, enc, convert, relaxed=True):
if relaxed:
return x
- raise TypeError('unsupported type')
-
-
+ raise TypeError('unsupported type')
+
+
def unicodize_deep(x, enc=DEFAULT_ENCODING, relaxed=True):
return _convert_deep(x, enc, to_unicode, relaxed)
@@ -99,7 +99,7 @@ def locale_encoding():
logger.debug('Cannot get system locale: %s', e)
return None
except ValueError as e:
- logger.warn('Cannot get system locale: %s', e)
+ logger.warn('Cannot get system locale: %s', e)
return None
@@ -110,16 +110,16 @@ def fs_encoding():
def guess_default_encoding():
enc = locale_encoding()
return enc if enc else DEFAULT_ENCODING
-
-
+
+
@library.python.func.memoize()
-def get_stream_encoding(stream):
- if stream.encoding:
- try:
- codecs.lookup(stream.encoding)
- return stream.encoding
- except LookupError:
- pass
+def get_stream_encoding(stream):
+ if stream.encoding:
+ try:
+ codecs.lookup(stream.encoding)
+ return stream.encoding
+ except LookupError:
+ pass
return DEFAULT_ENCODING
diff --git a/library/python/testing/import_test/import_test.py b/library/python/testing/import_test/import_test.py
index 5b002f7aea..3e3b7234ef 100644
--- a/library/python/testing/import_test/import_test.py
+++ b/library/python/testing/import_test/import_test.py
@@ -2,7 +2,7 @@ from __future__ import print_function
import os
import re
-import sys
+import sys
import time
import traceback
@@ -65,8 +65,8 @@ def check_imports(no_check=(), extra=(), skip_func=None, py_main=None):
s = time.time()
if module == '__main__':
importer.load_module('__main__', '__main__py')
- elif module.endswith('.__init__'):
- __import__(module[:-len('.__init__')])
+ elif module.endswith('.__init__'):
+ __import__(module[:-len('.__init__')])
else:
__import__(module)
diff --git a/library/python/testing/recipe/__init__.py b/library/python/testing/recipe/__init__.py
index 8bc3b8a40b..5ef9c5c189 100644
--- a/library/python/testing/recipe/__init__.py
+++ b/library/python/testing/recipe/__init__.py
@@ -2,8 +2,8 @@ from __future__ import print_function
import os
import sys
-import json
-import logging
+import json
+import logging
import argparse
from yatest_lib.ya import Ya
@@ -17,22 +17,22 @@ sanitizer_extra_checks = None
def _setup_logging(level=logging.DEBUG):
- root_logger = logging.getLogger()
- root_logger.setLevel(level)
-
- log_format = '%(asctime)s - %(levelname)s - %(name)s - %(funcName)s: %(message)s'
-
- stdout_handler = logging.StreamHandler(sys.stdout)
- stdout_handler.setLevel(logging.DEBUG)
- formatter = logging.Formatter(log_format)
- stdout_handler.setFormatter(formatter)
- root_logger.addHandler(stdout_handler)
-
-
+ root_logger = logging.getLogger()
+ root_logger.setLevel(level)
+
+ log_format = '%(asctime)s - %(levelname)s - %(name)s - %(funcName)s: %(message)s'
+
+ stdout_handler = logging.StreamHandler(sys.stdout)
+ stdout_handler.setLevel(logging.DEBUG)
+ formatter = logging.Formatter(log_format)
+ stdout_handler.setFormatter(formatter)
+ root_logger.addHandler(stdout_handler)
+
+
def get_options():
parser = argparse.ArgumentParser()
parser.add_argument("--show-cwd", action="store_true", dest="show_cwd", default=False, help="show recipe cwd")
- parser.add_argument("--test-debug", action="store_true", dest="test_debug", default=False, help="test debug mode")
+ parser.add_argument("--test-debug", action="store_true", dest="test_debug", default=False, help="test debug mode")
parser.add_argument("--test-stderr", action="store_true", dest="test_stderr", default=False, help="test stderr")
parser.add_argument("--pdb", action="store_true", dest="pdb", default=False, help="run pdb on error")
parser.add_argument("--sanitizer-extra-checks", dest="sanitizer_extra_checks", action="store_true", default=False, help="enables extra checks for tests built with sanitizers")
@@ -42,7 +42,7 @@ def get_options():
global ya, sanitizer_extra_checks, collect_cores
_setup_logging()
-
+
context = {
"test_stderr": args.test_stderr,
}
diff --git a/library/python/testing/ya.make b/library/python/testing/ya.make
index 2672c6ca6e..883bc8d7ab 100644
--- a/library/python/testing/ya.make
+++ b/library/python/testing/ya.make
@@ -1,22 +1,22 @@
OWNER(g:yatest)
-
-RECURSE(
- behave
+
+RECURSE(
+ behave
deprecated
fake_ya_package
filter
- gtest
- gtest/test
- gtest/test/gtest
+ gtest
+ gtest/test
+ gtest/test/gtest
pyremock
- pytest_runner
- pytest_runner/example
- pytest_runner/test
+ pytest_runner
+ pytest_runner/example
+ pytest_runner/test
recipe
system_info
types_test
yapackage
yapackage/test
- yatest_common
- yatest_lib
-)
+ yatest_common
+ yatest_lib
+)
diff --git a/library/python/testing/yatest_common/ya.make b/library/python/testing/yatest_common/ya.make
index 0cd9922c06..5662db4c5d 100644
--- a/library/python/testing/yatest_common/ya.make
+++ b/library/python/testing/yatest_common/ya.make
@@ -1,40 +1,40 @@
OWNER(g:yatest)
-
+
PY23_LIBRARY()
-
+
OWNER(g:yatest)
-
+
NO_EXTENDED_SOURCE_SEARCH()
-PY_SRCS(
- TOP_LEVEL
+PY_SRCS(
+ TOP_LEVEL
yatest/__init__.py
- yatest/common/__init__.py
- yatest/common/benchmark.py
- yatest/common/canonical.py
- yatest/common/environment.py
- yatest/common/errors.py
- yatest/common/legacy.py
+ yatest/common/__init__.py
+ yatest/common/benchmark.py
+ yatest/common/canonical.py
+ yatest/common/environment.py
+ yatest/common/errors.py
+ yatest/common/legacy.py
yatest/common/misc.py
- yatest/common/network.py
- yatest/common/path.py
- yatest/common/process.py
- yatest/common/runtime.py
- yatest/common/runtime_java.py
- yatest/common/tags.py
+ yatest/common/network.py
+ yatest/common/path.py
+ yatest/common/process.py
+ yatest/common/runtime.py
+ yatest/common/runtime_java.py
+ yatest/common/tags.py
)
-
+
PEERDIR(
contrib/python/six
library/python/cores
library/python/filelock
library/python/fs
-)
-
+)
+
IF (NOT CATBOOST_OPENSOURCE)
PEERDIR(
library/python/coredump_filter
)
ENDIF()
-END()
+END()
diff --git a/library/python/testing/yatest_common/yatest/__init__.py b/library/python/testing/yatest_common/yatest/__init__.py
index 19593ff8d2..b846b3317a 100644
--- a/library/python/testing/yatest_common/yatest/__init__.py
+++ b/library/python/testing/yatest_common/yatest/__init__.py
@@ -1,3 +1,3 @@
-__all__ = ["common"]
-
+__all__ = ["common"]
+
from . import common
diff --git a/library/python/testing/yatest_common/yatest/common/benchmark.py b/library/python/testing/yatest_common/yatest/common/benchmark.py
index 500dfaca7e..c3784cbe4c 100644
--- a/library/python/testing/yatest_common/yatest/common/benchmark.py
+++ b/library/python/testing/yatest_common/yatest/common/benchmark.py
@@ -1,22 +1,22 @@
-import json
-
+import json
+
from . import process
from . import runtime
-
-
-def execute_benchmark(path, budget=None, threads=None):
- """
- Run benchmark and return values
- :param path: path to benchmark binary
+
+
+def execute_benchmark(path, budget=None, threads=None):
+ """
+ Run benchmark and return values
+ :param path: path to benchmark binary
:param budget: time budget, sec (supported only by ybenchmark)
:param threads: number of threads to run benchmark (supported only by ybenchmark)
- :return: map of benchmark values
- """
- benchmark_path = runtime.binary_path(path)
+ :return: map of benchmark values
+ """
+ benchmark_path = runtime.binary_path(path)
cmd = [benchmark_path, "--benchmark_format=json"]
- if budget is not None:
- cmd += ["-b", str(budget)]
- if threads is not None:
- cmd += ["-t", str(threads)]
- res = process.execute(cmd)
- return json.loads(res.std_out)
+ if budget is not None:
+ cmd += ["-b", str(budget)]
+ if threads is not None:
+ cmd += ["-t", str(threads)]
+ res = process.execute(cmd)
+ return json.loads(res.std_out)
diff --git a/library/python/testing/yatest_common/yatest/common/canonical.py b/library/python/testing/yatest_common/yatest/common/canonical.py
index 9c02fe1c11..b6a136d3e9 100644
--- a/library/python/testing/yatest_common/yatest/common/canonical.py
+++ b/library/python/testing/yatest_common/yatest/common/canonical.py
@@ -1,17 +1,17 @@
-import os
-import logging
-import shutil
-import tempfile
-
+import os
+import logging
+import shutil
+import tempfile
+
import six
from . import process
from . import runtime
from . import path
-
-yatest_logger = logging.getLogger("ya.test")
-
-
+
+yatest_logger = logging.getLogger("ya.test")
+
+
def _copy(src, dst, universal_lines=False):
if universal_lines:
with open(dst, "wb") as f:
@@ -21,21 +21,21 @@ def _copy(src, dst, universal_lines=False):
shutil.copy(src, dst)
-def canonical_file(path, diff_tool=None, local=False, universal_lines=False, diff_file_name=None, diff_tool_timeout=None):
- """
- Create canonical file that can be returned from a test
- :param path: path to the file
- :param diff_tool: custom diff tool to use for comparison with the canonical one, if None - default will be used
- :param local: save file locally, otherwise move to sandbox
+def canonical_file(path, diff_tool=None, local=False, universal_lines=False, diff_file_name=None, diff_tool_timeout=None):
+ """
+ Create canonical file that can be returned from a test
+ :param path: path to the file
+ :param diff_tool: custom diff tool to use for comparison with the canonical one, if None - default will be used
+ :param local: save file locally, otherwise move to sandbox
:param universal_lines: normalize EOL
- :param diff_tool_timeout: timeout for running diff tool
- :return: object that can be canonized
- """
- abs_path = os.path.abspath(path)
- assert os.path.exists(abs_path), "Canonical path {} does not exist".format(path)
- tempdir = tempfile.mkdtemp(prefix="canon_tmp", dir=runtime.build_path())
- safe_path = os.path.join(tempdir, os.path.basename(abs_path))
- # if the created file is in output_path, we copy it, so that it will be available when the tests finishes
+ :param diff_tool_timeout: timeout for running diff tool
+ :return: object that can be canonized
+ """
+ abs_path = os.path.abspath(path)
+ assert os.path.exists(abs_path), "Canonical path {} does not exist".format(path)
+ tempdir = tempfile.mkdtemp(prefix="canon_tmp", dir=runtime.build_path())
+ safe_path = os.path.join(tempdir, os.path.basename(abs_path))
+ # if the created file is in output_path, we copy it, so that it will be available when the tests finishes
_copy(path, safe_path, universal_lines=universal_lines)
if diff_tool:
if not isinstance(diff_tool, six.string_types):
@@ -44,133 +44,133 @@ def canonical_file(path, diff_tool=None, local=False, universal_lines=False, dif
raise Exception("Invalid custom diff-tool: not cmd")
except:
raise Exception("Invalid custom diff-tool: not binary path")
- return runtime._get_ya_plugin_instance().file(safe_path, diff_tool=diff_tool, local=local, diff_file_name=diff_file_name, diff_tool_timeout=diff_tool_timeout)
-
-
-def canonical_dir(path, diff_tool=None, diff_file_name=None, diff_tool_timeout=None):
- abs_path = os.path.abspath(path)
- assert os.path.exists(abs_path), "Canonical path {} does not exist".format(path)
- assert os.path.isdir(abs_path), "Path {} is not a directory".format(path)
- if diff_file_name and not diff_tool:
- raise Exception("diff_file_name can be only be used with diff_tool for canonical_dir")
- tempdir = tempfile.mkdtemp()
- safe_path = os.path.join(tempdir, os.path.basename(abs_path))
- shutil.copytree(abs_path, safe_path)
- return runtime._get_ya_plugin_instance().file(safe_path, diff_tool=diff_tool, diff_file_name=diff_file_name, diff_tool_timeout=diff_tool_timeout)
-
-
-def canonical_execute(
- binary, args=None, check_exit_code=True,
- shell=False, timeout=None, cwd=None,
- env=None, stdin=None, stderr=None, creationflags=0,
+ return runtime._get_ya_plugin_instance().file(safe_path, diff_tool=diff_tool, local=local, diff_file_name=diff_file_name, diff_tool_timeout=diff_tool_timeout)
+
+
+def canonical_dir(path, diff_tool=None, diff_file_name=None, diff_tool_timeout=None):
+ abs_path = os.path.abspath(path)
+ assert os.path.exists(abs_path), "Canonical path {} does not exist".format(path)
+ assert os.path.isdir(abs_path), "Path {} is not a directory".format(path)
+ if diff_file_name and not diff_tool:
+ raise Exception("diff_file_name can be only be used with diff_tool for canonical_dir")
+ tempdir = tempfile.mkdtemp()
+ safe_path = os.path.join(tempdir, os.path.basename(abs_path))
+ shutil.copytree(abs_path, safe_path)
+ return runtime._get_ya_plugin_instance().file(safe_path, diff_tool=diff_tool, diff_file_name=diff_file_name, diff_tool_timeout=diff_tool_timeout)
+
+
+def canonical_execute(
+ binary, args=None, check_exit_code=True,
+ shell=False, timeout=None, cwd=None,
+ env=None, stdin=None, stderr=None, creationflags=0,
file_name=None, save_locally=False, close_fds=False,
diff_tool=None, diff_file_name=None, diff_tool_timeout=None,
-):
- """
- Shortcut to execute a binary and canonize its stdout
+):
+ """
+ Shortcut to execute a binary and canonize its stdout
:param binary: absolute path to the binary
- :param args: binary arguments
- :param check_exit_code: will raise ExecutionError if the command exits with non zero code
- :param shell: use shell to run the command
- :param timeout: execution timeout
- :param cwd: working directory
- :param env: command environment
- :param stdin: command stdin
- :param stderr: command stderr
- :param creationflags: command creation flags
+ :param args: binary arguments
+ :param check_exit_code: will raise ExecutionError if the command exits with non zero code
+ :param shell: use shell to run the command
+ :param timeout: execution timeout
+ :param cwd: working directory
+ :param env: command environment
+ :param stdin: command stdin
+ :param stderr: command stderr
+ :param creationflags: command creation flags
:param file_name: output file name. if not specified program name will be used
- :param diff_tool: path to custome diff tool
- :param diff_file_name: custom diff file name to create when diff is found
+ :param diff_tool: path to custome diff tool
+ :param diff_file_name: custom diff file name to create when diff is found
:param diff_tool_timeout: timeout for running diff tool
- :return: object that can be canonized
- """
- if type(binary) == list:
- command = binary
- else:
- command = [binary]
- command += _prepare_args(args)
- if shell:
- command = " ".join(command)
- execute_args = locals()
- del execute_args["binary"]
- del execute_args["args"]
+ :return: object that can be canonized
+ """
+ if type(binary) == list:
+ command = binary
+ else:
+ command = [binary]
+ command += _prepare_args(args)
+ if shell:
+ command = " ".join(command)
+ execute_args = locals()
+ del execute_args["binary"]
+ del execute_args["args"]
del execute_args["file_name"]
- del execute_args["save_locally"]
- del execute_args["diff_tool"]
- del execute_args["diff_file_name"]
+ del execute_args["save_locally"]
+ del execute_args["diff_tool"]
+ del execute_args["diff_file_name"]
del execute_args["diff_tool_timeout"]
- if not file_name and stdin:
- file_name = os.path.basename(stdin.name)
+ if not file_name and stdin:
+ file_name = os.path.basename(stdin.name)
return _canonical_execute(process.execute, execute_args, file_name, save_locally, diff_tool, diff_file_name, diff_tool_timeout)
-
-
-def canonical_py_execute(
- script_path, args=None, check_exit_code=True,
- shell=False, timeout=None, cwd=None, env=None,
- stdin=None, stderr=None, creationflags=0,
- file_name=None, save_locally=False, close_fds=False,
+
+
+def canonical_py_execute(
+ script_path, args=None, check_exit_code=True,
+ shell=False, timeout=None, cwd=None, env=None,
+ stdin=None, stderr=None, creationflags=0,
+ file_name=None, save_locally=False, close_fds=False,
diff_tool=None, diff_file_name=None, diff_tool_timeout=None,
-):
- """
- Shortcut to execute a python script and canonize its stdout
- :param script_path: path to the script arcadia relative
- :param args: script arguments
- :param check_exit_code: will raise ExecutionError if the command exits with non zero code
- :param shell: use shell to run the command
- :param timeout: execution timeout
- :param cwd: working directory
- :param env: command environment
- :param stdin: command stdin
- :param stderr: command stderr
- :param creationflags: command creation flags
+):
+ """
+ Shortcut to execute a python script and canonize its stdout
+ :param script_path: path to the script arcadia relative
+ :param args: script arguments
+ :param check_exit_code: will raise ExecutionError if the command exits with non zero code
+ :param shell: use shell to run the command
+ :param timeout: execution timeout
+ :param cwd: working directory
+ :param env: command environment
+ :param stdin: command stdin
+ :param stderr: command stderr
+ :param creationflags: command creation flags
:param file_name: output file name. if not specified program name will be used
- :param diff_tool: path to custome diff tool
- :param diff_file_name: custom diff file name to create when diff is found
+ :param diff_tool: path to custome diff tool
+ :param diff_file_name: custom diff file name to create when diff is found
:param diff_tool_timeout: timeout for running diff tool
- :return: object that can be canonized
- """
- command = [runtime.source_path(script_path)] + _prepare_args(args)
- if shell:
- command = " ".join(command)
- execute_args = locals()
- del execute_args["script_path"]
- del execute_args["args"]
+ :return: object that can be canonized
+ """
+ command = [runtime.source_path(script_path)] + _prepare_args(args)
+ if shell:
+ command = " ".join(command)
+ execute_args = locals()
+ del execute_args["script_path"]
+ del execute_args["args"]
del execute_args["file_name"]
- del execute_args["save_locally"]
- del execute_args["diff_tool"]
- del execute_args["diff_file_name"]
+ del execute_args["save_locally"]
+ del execute_args["diff_tool"]
+ del execute_args["diff_file_name"]
del execute_args["diff_tool_timeout"]
return _canonical_execute(process.py_execute, execute_args, file_name, save_locally, diff_tool, diff_file_name, diff_tool_timeout)
-
-
-def _prepare_args(args):
- if args is None:
- args = []
+
+
+def _prepare_args(args):
+ if args is None:
+ args = []
if isinstance(args, six.string_types):
- args = map(lambda a: a.strip(), args.split())
- return args
-
-
+ args = map(lambda a: a.strip(), args.split())
+ return args
+
+
def _canonical_execute(excutor, kwargs, file_name, save_locally, diff_tool, diff_file_name, diff_tool_timeout):
- res = excutor(**kwargs)
- command = kwargs["command"]
- file_name = file_name or process.get_command_name(command)
- if file_name.endswith(".exe"):
- file_name = os.path.splitext(file_name)[0] # don't want to bring windows stuff in file names
- out_file_path = path.get_unique_file_path(runtime.output_path(), "{}.out.txt".format(file_name))
- err_file_path = path.get_unique_file_path(runtime.output_path(), "{}.err.txt".format(file_name))
-
- try:
- os.makedirs(os.path.dirname(out_file_path))
- except OSError:
- pass
-
+ res = excutor(**kwargs)
+ command = kwargs["command"]
+ file_name = file_name or process.get_command_name(command)
+ if file_name.endswith(".exe"):
+ file_name = os.path.splitext(file_name)[0] # don't want to bring windows stuff in file names
+ out_file_path = path.get_unique_file_path(runtime.output_path(), "{}.out.txt".format(file_name))
+ err_file_path = path.get_unique_file_path(runtime.output_path(), "{}.err.txt".format(file_name))
+
+ try:
+ os.makedirs(os.path.dirname(out_file_path))
+ except OSError:
+ pass
+
with open(out_file_path, "wb") as out_file:
- yatest_logger.debug("Will store file in %s", out_file_path)
- out_file.write(res.std_out)
-
- if res.std_err:
+ yatest_logger.debug("Will store file in %s", out_file_path)
+ out_file.write(res.std_out)
+
+ if res.std_err:
with open(err_file_path, "wb") as err_file:
- err_file.write(res.std_err)
-
+ err_file.write(res.std_err)
+
return canonical_file(out_file_path, local=save_locally, diff_tool=diff_tool, diff_file_name=diff_file_name, diff_tool_timeout=diff_tool_timeout)
diff --git a/library/python/testing/yatest_common/yatest/common/environment.py b/library/python/testing/yatest_common/yatest/common/environment.py
index f3d55058e4..43f48d0958 100644
--- a/library/python/testing/yatest_common/yatest/common/environment.py
+++ b/library/python/testing/yatest_common/yatest/common/environment.py
@@ -1,5 +1,5 @@
# coding: utf-8
-
+
def extend_env_var(env, name, value, sep=":"):
return sep.join(filter(None, [env.get(name), value]))
diff --git a/library/python/testing/yatest_common/yatest/common/errors.py b/library/python/testing/yatest_common/yatest/common/errors.py
index 5cdd62c6d0..8c038fc381 100644
--- a/library/python/testing/yatest_common/yatest/common/errors.py
+++ b/library/python/testing/yatest_common/yatest/common/errors.py
@@ -1,20 +1,20 @@
import os
-import sys
-
-
-class RestartTestException(Exception):
-
- def __init__(self, *args, **kwargs):
+import sys
+
+
+class RestartTestException(Exception):
+
+ def __init__(self, *args, **kwargs):
super(RestartTestException, self).__init__(*args, **kwargs)
- sys.stderr.write("##restart-test##\n")
- sys.stderr.flush()
+ sys.stderr.write("##restart-test##\n")
+ sys.stderr.flush()
os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
-
-
-class InfrastructureException(Exception):
-
- def __init__(self, *args, **kwargs):
+
+
+class InfrastructureException(Exception):
+
+ def __init__(self, *args, **kwargs):
super(InfrastructureException, self).__init__(*args, **kwargs)
- sys.stderr.write("##infrastructure-error##\n")
- sys.stderr.flush()
+ sys.stderr.write("##infrastructure-error##\n")
+ sys.stderr.flush()
os.environ["FORCE_EXIT_TESTSFAILED"] = "1"
diff --git a/library/python/testing/yatest_common/yatest/common/legacy.py b/library/python/testing/yatest_common/yatest/common/legacy.py
index cfb6975802..459972d253 100644
--- a/library/python/testing/yatest_common/yatest/common/legacy.py
+++ b/library/python/testing/yatest_common/yatest/common/legacy.py
@@ -1,12 +1,12 @@
from . import canonical
-
-
-def old_canonical_file(output_file_name, storage_md5):
- import yalibrary.svn
- yalibrary.svn.run_svn([
- 'export',
- 'svn+ssh://arcadia.yandex.ru/arc/trunk/arcadia_tests_data/tests_canonical_output/' + storage_md5,
- output_file_name,
- "--force"
- ])
- return canonical.canonical_file(output_file_name)
+
+
+def old_canonical_file(output_file_name, storage_md5):
+ import yalibrary.svn
+ yalibrary.svn.run_svn([
+ 'export',
+ 'svn+ssh://arcadia.yandex.ru/arc/trunk/arcadia_tests_data/tests_canonical_output/' + storage_md5,
+ output_file_name,
+ "--force"
+ ])
+ return canonical.canonical_file(output_file_name)
diff --git a/library/python/testing/yatest_common/yatest/common/path.py b/library/python/testing/yatest_common/yatest/common/path.py
index d31df2d278..6fed7dda8a 100644
--- a/library/python/testing/yatest_common/yatest/common/path.py
+++ b/library/python/testing/yatest_common/yatest/common/path.py
@@ -1,5 +1,5 @@
# coding=utf-8
-
+
import errno
import os
import shutil
@@ -8,19 +8,19 @@ import contextlib
import library.python.fs as lpf
-def replace_in_file(path, old, new):
- """
- Replace text occurrences in a file
- :param path: path to the file
- :param old: text to replace
- :param new: replacement
- """
- with open(path) as fp:
- content = fp.read()
-
+def replace_in_file(path, old, new):
+ """
+ Replace text occurrences in a file
+ :param path: path to the file
+ :param old: text to replace
+ :param new: replacement
+ """
+ with open(path) as fp:
+ content = fp.read()
+
lpf.ensure_removed(path)
- with open(path, 'w') as fp:
- fp.write(content.replace(old, new))
+ with open(path, 'w') as fp:
+ fp.write(content.replace(old, new))
@contextlib.contextmanager
@@ -38,8 +38,8 @@ def copytree(src, dst, symlinks=False, ignore=None, postprocessing=None):
Copy an entire directory of files into an existing directory
instead of raising Exception what shtuil.copytree does
'''
- if not os.path.exists(dst) and os.path.isdir(src):
- os.makedirs(dst)
+ if not os.path.exists(dst) and os.path.isdir(src):
+ os.makedirs(dst)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
@@ -54,8 +54,8 @@ def copytree(src, dst, symlinks=False, ignore=None, postprocessing=None):
postprocessing(os.path.join(root, path), False)
for path in files:
postprocessing(os.path.join(root, path), True)
-
-
+
+
def get_unique_file_path(dir_path, file_pattern, create_file=True, max_suffix=10000):
def atomic_file_create(path):
try:
@@ -79,12 +79,12 @@ def get_unique_file_path(dir_path, file_pattern, create_file=True, max_suffix=10
return False
raise e
- file_path = os.path.join(dir_path, file_pattern)
+ file_path = os.path.join(dir_path, file_pattern)
lpf.ensure_dir(os.path.dirname(file_path))
- file_counter = 0
+ file_counter = 0
handler = atomic_file_create if create_file else atomic_dir_create
while os.path.exists(file_path) or not handler(file_path):
- file_path = os.path.join(dir_path, file_pattern + ".{}".format(file_counter))
- file_counter += 1
+ file_path = os.path.join(dir_path, file_pattern + ".{}".format(file_counter))
+ file_counter += 1
assert file_counter < max_suffix
- return file_path
+ return file_path
diff --git a/library/python/testing/yatest_common/yatest/common/process.py b/library/python/testing/yatest_common/yatest/common/process.py
index 672ebe0823..a8bcc21f51 100644
--- a/library/python/testing/yatest_common/yatest/common/process.py
+++ b/library/python/testing/yatest_common/yatest/common/process.py
@@ -1,16 +1,16 @@
# coding: utf-8
-import os
+import os
import re
-import time
-import signal
+import time
+import signal
import shutil
-import logging
-import tempfile
-import subprocess
-import errno
+import logging
+import tempfile
+import subprocess
+import errno
import distutils.version
-
+
import six
try:
@@ -22,13 +22,13 @@ except ImportError:
from . import runtime
from . import path
from . import environment
-
-
+
+
MAX_OUT_LEN = 1000 * 1000 # 1 mb
MAX_MESSAGE_LEN = 1500
SANITIZER_ERROR_PATTERN = br": ([A-Z][\w]+Sanitizer)"
GLIBC_PATTERN = re.compile(r"\S+@GLIBC_([0-9.]+)")
-yatest_logger = logging.getLogger("ya.test")
+yatest_logger = logging.getLogger("ya.test")
def truncate(s, size):
@@ -40,45 +40,45 @@ def truncate(s, size):
return (b'...' if isinstance(s, bytes) else '...') + s[-(size - 3):]
-def get_command_name(command):
+def get_command_name(command):
return os.path.basename(command.split()[0] if isinstance(command, six.string_types) else command[0])
-
-
-class ExecutionError(Exception):
-
- def __init__(self, execution_result):
+
+
+class ExecutionError(Exception):
+
+ def __init__(self, execution_result):
if not isinstance(execution_result.command, six.string_types):
command = " ".join(str(arg) for arg in execution_result.command)
else:
command = execution_result.command
- message = "Command '{command}' has failed with code {code}.\nErrors:\n{err}\n".format(
+ message = "Command '{command}' has failed with code {code}.\nErrors:\n{err}\n".format(
command=command,
- code=execution_result.exit_code,
- err=_format_error(execution_result.std_err))
+ code=execution_result.exit_code,
+ err=_format_error(execution_result.std_err))
if cores:
if execution_result.backtrace:
message += "Backtrace:\n[[rst]]{}[[bad]]\n".format(cores.colorize_backtrace(execution_result._backtrace))
else:
message += "Backtrace is not available: module cores isn't available"
- super(ExecutionError, self).__init__(message)
+ super(ExecutionError, self).__init__(message)
+ self.execution_result = execution_result
+
+
+class TimeoutError(Exception):
+ pass
+
+
+class ExecutionTimeoutError(TimeoutError):
+ def __init__(self, execution_result, *args, **kwargs):
+ super(ExecutionTimeoutError, self).__init__(args, kwargs)
self.execution_result = execution_result
-
-
-class TimeoutError(Exception):
- pass
-
-
-class ExecutionTimeoutError(TimeoutError):
- def __init__(self, execution_result, *args, **kwargs):
- super(ExecutionTimeoutError, self).__init__(args, kwargs)
- self.execution_result = execution_result
-
-
-class InvalidExecutionStateError(Exception):
- pass
-
-
+
+
+class InvalidExecutionStateError(Exception):
+ pass
+
+
class SignalInterruptionError(Exception):
def __init__(self, message=None):
super(SignalInterruptionError, self).__init__(message)
@@ -89,39 +89,39 @@ class InvalidCommandError(Exception):
pass
-class _Execution(object):
-
+class _Execution(object):
+
def __init__(self, command, process, out_file, err_file, process_progress_listener=None, cwd=None, collect_cores=True, check_sanitizer=True, started=0, user_stdout=False, user_stderr=False):
- self._command = command
- self._process = process
- self._out_file = out_file
- self._err_file = err_file
- self._std_out = None
- self._std_err = None
+ self._command = command
+ self._process = process
+ self._out_file = out_file
+ self._err_file = err_file
+ self._std_out = None
+ self._std_err = None
self._elapsed = None
self._start = time.time()
- self._process_progress_listener = process_progress_listener
+ self._process_progress_listener = process_progress_listener
self._cwd = cwd or os.getcwd()
self._collect_cores = collect_cores
self._backtrace = ''
self._check_sanitizer = check_sanitizer
- self._metrics = {}
+ self._metrics = {}
self._started = started
self._user_stdout = bool(user_stdout)
self._user_stderr = bool(user_stderr)
self._exit_code = None
- if process_progress_listener:
- process_progress_listener.open(command, process, out_file, err_file)
-
- @property
- def running(self):
- return self._process.poll() is None
-
- def kill(self):
- if self.running:
- self._save_outputs(False)
- _kill_process_tree(self._process.pid)
- self._clean_files()
+ if process_progress_listener:
+ process_progress_listener.open(command, process, out_file, err_file)
+
+ @property
+ def running(self):
+ return self._process.poll() is None
+
+ def kill(self):
+ if self.running:
+ self._save_outputs(False)
+ _kill_process_tree(self._process.pid)
+ self._clean_files()
# DEVTOOLS-2347
yatest_logger.debug("Process status before wait_for: %s", self.running)
try:
@@ -130,64 +130,64 @@ class _Execution(object):
yatest_logger.debug("Process status after wait_for: %s", self.running)
yatest_logger.debug("Process %d info: %s", self._process.pid, _get_proc_tree_info([self._process.pid]))
raise
- else:
- raise InvalidExecutionStateError("Cannot kill a stopped process")
-
+ else:
+ raise InvalidExecutionStateError("Cannot kill a stopped process")
+
def terminate(self):
if self.running:
self._process.terminate()
- @property
- def process(self):
- return self._process
-
- @property
- def command(self):
- return self._command
-
- @property
+ @property
+ def process(self):
+ return self._process
+
+ @property
+ def command(self):
+ return self._command
+
+ @property
def returncode(self):
return self.exit_code
@property
- def exit_code(self):
+ def exit_code(self):
"""
Deprecated, use returncode
"""
if self._exit_code is None:
self._exit_code = self._process.returncode
return self._exit_code
-
- @property
+
+ @property
def stdout(self):
return self.std_out
@property
- def std_out(self):
+ def std_out(self):
"""
Deprecated, use stdout
"""
- if self._std_out is not None:
- return self._std_out
+ if self._std_out is not None:
+ return self._std_out
if self._process.stdout and not self._user_stdout:
self._std_out = self._process.stdout.read()
return self._std_out
-
- @property
+
+ @property
def stderr(self):
return self.std_err
@property
- def std_err(self):
+ def std_err(self):
"""
Deprecated, use stderr
"""
- if self._std_err is not None:
- return self._std_err
+ if self._std_err is not None:
+ return self._std_err
if self._process.stderr and not self._user_stderr:
self._std_err = self._process.stderr.read()
return self._std_err
-
+
@property
def elapsed(self):
return self._elapsed
@@ -196,14 +196,14 @@ class _Execution(object):
def backtrace(self):
return self._backtrace
- @property
- def metrics(self):
- return self._metrics
-
- def _save_outputs(self, clean_files=True):
- if self._process_progress_listener:
- self._process_progress_listener()
- self._process_progress_listener.close()
+ @property
+ def metrics(self):
+ return self._metrics
+
+ def _save_outputs(self, clean_files=True):
+ if self._process_progress_listener:
+ self._process_progress_listener()
+ self._process_progress_listener.close()
if not self._user_stdout:
if self._out_file is None:
pass
@@ -223,14 +223,14 @@ class _Execution(object):
else:
self._std_err = self._process.stderr.read()
- if clean_files:
- self._clean_files()
- yatest_logger.debug("Command (pid %s) rc: %s", self._process.pid, self.exit_code)
- yatest_logger.debug("Command (pid %s) elapsed time (sec): %s", self._process.pid, self.elapsed)
- if self._metrics:
+ if clean_files:
+ self._clean_files()
+ yatest_logger.debug("Command (pid %s) rc: %s", self._process.pid, self.exit_code)
+ yatest_logger.debug("Command (pid %s) elapsed time (sec): %s", self._process.pid, self.elapsed)
+ if self._metrics:
for key, value in six.iteritems(self._metrics):
- yatest_logger.debug("Command (pid %s) %s: %s", self._process.pid, key, value)
-
+ yatest_logger.debug("Command (pid %s) %s: %s", self._process.pid, key, value)
+
# Since this code is Python2/3 compatible, we don't know is _std_out/_std_err is real bytes or bytes-str.
printable_std_out, err = _try_convert_bytes_to_string(self._std_out)
if err:
@@ -244,14 +244,14 @@ class _Execution(object):
yatest_logger.debug("Command (pid %s) output:\n%s", self._process.pid, truncate(printable_std_out, MAX_OUT_LEN))
yatest_logger.debug("Command (pid %s) errors:\n%s", self._process.pid, truncate(printable_std_err, MAX_OUT_LEN))
- def _clean_files(self):
+ def _clean_files(self):
if self._err_file and not self._user_stderr and self._err_file != subprocess.PIPE:
self._err_file.close()
self._err_file = None
if self._out_file and not self._user_stdout and self._out_file != subprocess.PIPE:
self._out_file.close()
self._out_file = None
-
+
def _recover_core(self):
core_path = cores.recover_core_dump_file(self.command[0], self._cwd, self.process.pid)
if core_path:
@@ -260,7 +260,7 @@ class _Execution(object):
if store_cores:
new_core_path = path.get_unique_file_path(runtime.output_path(), "{}.{}.core".format(os.path.basename(self.command[0]), self._process.pid))
# Copy core dump file, because it may be overwritten
- yatest_logger.debug("Coping core dump file from '%s' to the '%s'", core_path, new_core_path)
+ yatest_logger.debug("Coping core dump file from '%s' to the '%s'", core_path, new_core_path)
shutil.copyfile(core_path, new_core_path)
core_path = new_core_path
@@ -281,92 +281,92 @@ class _Execution(object):
else:
runtime._register_core(os.path.basename(self.command[0]), None, None, bt_filename, pbt_filename)
- def wait(self, check_exit_code=True, timeout=None, on_timeout=None):
- def _wait():
+ def wait(self, check_exit_code=True, timeout=None, on_timeout=None):
+ def _wait():
finished = None
interrupted = False
- try:
- if hasattr(os, "wait4"):
- try:
+ try:
+ if hasattr(os, "wait4"):
+ try:
if hasattr(subprocess, "_eintr_retry_call"):
pid, sts, rusage = subprocess._eintr_retry_call(os.wait4, self._process.pid, 0)
else:
# PEP 475
pid, sts, rusage = os.wait4(self._process.pid, 0)
finished = time.time()
- self._process._handle_exitstatus(sts)
- for field in [
- "ru_idrss",
- "ru_inblock",
- "ru_isrss",
- "ru_ixrss",
- "ru_majflt",
- "ru_maxrss",
- "ru_minflt",
- "ru_msgrcv",
- "ru_msgsnd",
- "ru_nivcsw",
- "ru_nsignals",
- "ru_nswap",
- "ru_nvcsw",
- "ru_oublock",
- "ru_stime",
- "ru_utime",
- ]:
- if hasattr(rusage, field):
- self._metrics[field.replace("ru_", "")] = getattr(rusage, field)
- except OSError as exc:
-
- if exc.errno == errno.ECHILD:
- yatest_logger.debug("Process resource usage is not available as process finished before wait4 was called")
- else:
- raise
+ self._process._handle_exitstatus(sts)
+ for field in [
+ "ru_idrss",
+ "ru_inblock",
+ "ru_isrss",
+ "ru_ixrss",
+ "ru_majflt",
+ "ru_maxrss",
+ "ru_minflt",
+ "ru_msgrcv",
+ "ru_msgsnd",
+ "ru_nivcsw",
+ "ru_nsignals",
+ "ru_nswap",
+ "ru_nvcsw",
+ "ru_oublock",
+ "ru_stime",
+ "ru_utime",
+ ]:
+ if hasattr(rusage, field):
+ self._metrics[field.replace("ru_", "")] = getattr(rusage, field)
+ except OSError as exc:
+
+ if exc.errno == errno.ECHILD:
+ yatest_logger.debug("Process resource usage is not available as process finished before wait4 was called")
+ else:
+ raise
except SignalInterruptionError:
interrupted = True
raise
- finally:
+ finally:
if not interrupted:
self._process.wait() # this has to be here unconditionally, so that all process properties are set
-
+
if not finished:
finished = time.time()
self._metrics["wtime"] = round(finished - self._started, 3)
- try:
+ try:
if timeout:
process_is_finished = lambda: not self.running
fail_message = "Command '%s' stopped by %d seconds timeout" % (self._command, timeout)
- try:
- wait_for(process_is_finished, timeout, fail_message, sleep_time=0.1, on_check_condition=self._process_progress_listener)
- except TimeoutError as e:
- if on_timeout:
+ try:
+ wait_for(process_is_finished, timeout, fail_message, sleep_time=0.1, on_check_condition=self._process_progress_listener)
+ except TimeoutError as e:
+ if on_timeout:
yatest_logger.debug("Calling user specified on_timeout function")
- try:
- on_timeout(self, timeout)
- except Exception:
- yatest_logger.exception("Exception while calling on_timeout")
- raise ExecutionTimeoutError(self, str(e))
+ try:
+ on_timeout(self, timeout)
+ except Exception:
+ yatest_logger.exception("Exception while calling on_timeout")
+ raise ExecutionTimeoutError(self, str(e))
# Wait should be always called here, it finalizes internal states of its process and sets up return code
- _wait()
+ _wait()
except BaseException as e:
- _kill_process_tree(self._process.pid)
- _wait()
- yatest_logger.debug("Command exception: %s", e)
+ _kill_process_tree(self._process.pid)
+ _wait()
+ yatest_logger.debug("Command exception: %s", e)
raise
- finally:
+ finally:
self._elapsed = time.time() - self._start
self._save_outputs()
self.verify_no_coredumps()
-
+
self._finalise(check_exit_code)
def _finalise(self, check_exit_code):
- # Set the signal (negative number) which caused the process to exit
- if check_exit_code and self.exit_code != 0:
- yatest_logger.error("Execution failed with exit code: %s\n\t,std_out:%s\n\tstd_err:%s\n",
- self.exit_code, truncate(self.std_out, MAX_OUT_LEN), truncate(self.std_err, MAX_OUT_LEN))
- raise ExecutionError(self)
-
+ # Set the signal (negative number) which caused the process to exit
+ if check_exit_code and self.exit_code != 0:
+ yatest_logger.error("Execution failed with exit code: %s\n\t,std_out:%s\n\tstd_err:%s\n",
+ self.exit_code, truncate(self.std_out, MAX_OUT_LEN), truncate(self.std_err, MAX_OUT_LEN))
+ raise ExecutionError(self)
+
# Don't search for sanitize errors if stderr was redirected
self.verify_sanitize_errors()
@@ -394,11 +394,11 @@ class _Execution(object):
if match:
yatest_logger.error("%s sanitizer found errors:\n\tstd_err:%s\n", match.group(1), truncate(self.std_err, MAX_OUT_LEN))
raise ExecutionError(self)
- else:
- yatest_logger.debug("No sanitizer errors found")
+ else:
+ yatest_logger.debug("No sanitizer errors found")
else:
- yatest_logger.debug("'%s' doesn't belong to '%s' - no check for sanitize errors", self.command[0], build_path)
-
+ yatest_logger.debug("'%s' doesn't belong to '%s' - no check for sanitize errors", self.command[0], build_path)
+
def on_timeout_gen_coredump(exec_obj, _):
"""
@@ -413,38 +413,38 @@ def on_timeout_gen_coredump(exec_obj, _):
pass
-def execute(
- command, check_exit_code=True,
- shell=False, timeout=None,
- cwd=None, env=None,
- stdin=None, stdout=None, stderr=None,
- creationflags=0, wait=True,
- process_progress_listener=None, close_fds=False,
- collect_cores=True, check_sanitizer=True, preexec_fn=None, on_timeout=None,
+def execute(
+ command, check_exit_code=True,
+ shell=False, timeout=None,
+ cwd=None, env=None,
+ stdin=None, stdout=None, stderr=None,
+ creationflags=0, wait=True,
+ process_progress_listener=None, close_fds=False,
+ collect_cores=True, check_sanitizer=True, preexec_fn=None, on_timeout=None,
executor=_Execution,
-):
- """
- Executes a command
- :param command: command: can be a list of arguments or a string
- :param check_exit_code: will raise ExecutionError if the command exits with non zero code
- :param shell: use shell to run the command
- :param timeout: execution timeout
- :param cwd: working directory
- :param env: command environment
- :param stdin: command stdin
- :param stdout: command stdout
- :param stderr: command stderr
- :param creationflags: command creation flags
- :param wait: should wait until the command finishes
- :param process_progress_listener=object that is polled while execution is in progress
- :param close_fds: subrpocess.Popen close_fds args
+):
+ """
+ Executes a command
+ :param command: command: can be a list of arguments or a string
+ :param check_exit_code: will raise ExecutionError if the command exits with non zero code
+ :param shell: use shell to run the command
+ :param timeout: execution timeout
+ :param cwd: working directory
+ :param env: command environment
+ :param stdin: command stdin
+ :param stdout: command stdout
+ :param stderr: command stderr
+ :param creationflags: command creation flags
+ :param wait: should wait until the command finishes
+ :param process_progress_listener=object that is polled while execution is in progress
+ :param close_fds: subrpocess.Popen close_fds args
:param collect_cores: recover core dump files if shell == False
:param check_sanitizer: raise ExecutionError if stderr contains sanitize errors
- :param preexec_fn: subrpocess.Popen preexec_fn arg
- :param on_timeout: on_timeout(<execution object>, <timeout value>) callback
-
+ :param preexec_fn: subrpocess.Popen preexec_fn arg
+ :param on_timeout: on_timeout(<execution object>, <timeout value>) callback
+
:return _Execution: Execution object
- """
+ """
if env is None:
env = os.environ.copy()
else:
@@ -462,9 +462,9 @@ def execute(
if var not in env and var in os.environ:
env[var] = os.environ[var]
- if not wait and timeout is not None:
- raise ValueError("Incompatible arguments 'timeout' and wait=False")
-
+ if not wait and timeout is not None:
+ raise ValueError("Incompatible arguments 'timeout' and wait=False")
+
# if subprocess.PIPE in [stdout, stderr]:
# raise ValueError("Don't use pipe to obtain stream data - it may leads to the deadlock")
@@ -481,15 +481,15 @@ def execute(
is_pipe = stream == subprocess.PIPE
return stream, not is_pipe
- # to be able to have stdout/stderr and track the process time execution, we don't use subprocess.PIPE,
- # as it can cause processes hangs, but use tempfiles instead
+ # to be able to have stdout/stderr and track the process time execution, we don't use subprocess.PIPE,
+ # as it can cause processes hangs, but use tempfiles instead
out_file, user_stdout = get_out_stream(stdout, 'out')
err_file, user_stderr = get_out_stream(stderr, 'err')
- in_file = stdin
-
- if shell and type(command) == list:
- command = " ".join(command)
-
+ in_file = stdin
+
+ if shell and type(command) == list:
+ command = " ".join(command)
+
if shell:
collect_cores = False
check_sanitizer = False
@@ -516,7 +516,7 @@ def execute(
name = "PIPE" if stdin == subprocess.PIPE else stdin.name
yatest_logger.debug("Executing '%s' with input '%s' in '%s'", command, name, cwd)
else:
- yatest_logger.debug("Executing '%s' in '%s'", command, cwd)
+ yatest_logger.debug("Executing '%s' in '%s'", command, cwd)
# XXX
started = time.time()
@@ -528,11 +528,11 @@ def execute(
yatest_logger.debug("Command pid: %s", process.pid)
res = executor(command, process, out_file, err_file, process_progress_listener, cwd, collect_cores, check_sanitizer, started, user_stdout=user_stdout, user_stderr=user_stderr)
- if wait:
- res.wait(check_exit_code, timeout, on_timeout)
- return res
-
-
+ if wait:
+ res.wait(check_exit_code, timeout, on_timeout)
+ return res
+
+
def _get_command_output_file(cmd, ext):
parts = [get_command_name(cmd)]
if 'YA_RETRY_INDEX' in os.environ:
@@ -570,74 +570,74 @@ def py_execute(
creationflags=0, wait=True,
process_progress_listener=None, close_fds=False
):
- """
- Executes a command with the arcadia python
- :param command: command to pass to python
- :param check_exit_code: will raise ExecutionError if the command exits with non zero code
- :param shell: use shell to run the command
- :param timeout: execution timeout
- :param cwd: working directory
- :param env: command environment
- :param stdin: command stdin
- :param stdout: command stdout
- :param stderr: command stderr
- :param creationflags: command creation flags
- :param wait: should wait until the command finishes
- :param process_progress_listener=object that is polled while execution is in progress
+ """
+ Executes a command with the arcadia python
+ :param command: command to pass to python
+ :param check_exit_code: will raise ExecutionError if the command exits with non zero code
+ :param shell: use shell to run the command
+ :param timeout: execution timeout
+ :param cwd: working directory
+ :param env: command environment
+ :param stdin: command stdin
+ :param stdout: command stdout
+ :param stderr: command stderr
+ :param creationflags: command creation flags
+ :param wait: should wait until the command finishes
+ :param process_progress_listener=object that is polled while execution is in progress
:return _Execution: Execution object
- """
+ """
if isinstance(command, six.string_types):
- command = [command]
- command = [runtime.python_path()] + command
- if shell:
- command = " ".join(command)
- return execute(**locals())
-
-
-def _format_error(error):
- return truncate(error, MAX_MESSAGE_LEN)
-
-
-def wait_for(check_function, timeout, fail_message="", sleep_time=1.0, on_check_condition=None):
- """
- Tries to execute `check_function` for `timeout` seconds.
- Continue until function returns nonfalse value.
- If function doesn't return nonfalse value for `timeout` seconds
- OperationTimeoutException is raised.
- Return first nonfalse result returned by `checkFunction`.
- """
- if sleep_time <= 0:
- raise ValueError("Incorrect sleep time value {}".format(sleep_time))
- if timeout < 0:
- raise ValueError("Incorrect timeout value {}".format(timeout))
- start = time.time()
- while start + timeout > time.time():
- if on_check_condition:
- on_check_condition()
-
- res = check_function()
- if res:
- return res
- time.sleep(sleep_time)
-
- message = "{} second(s) wait timeout has expired".format(timeout)
- if fail_message:
- message += ": {}".format(fail_message)
+ command = [command]
+ command = [runtime.python_path()] + command
+ if shell:
+ command = " ".join(command)
+ return execute(**locals())
+
+
+def _format_error(error):
+ return truncate(error, MAX_MESSAGE_LEN)
+
+
+def wait_for(check_function, timeout, fail_message="", sleep_time=1.0, on_check_condition=None):
+ """
+ Tries to execute `check_function` for `timeout` seconds.
+ Continue until function returns nonfalse value.
+ If function doesn't return nonfalse value for `timeout` seconds
+ OperationTimeoutException is raised.
+ Return first nonfalse result returned by `checkFunction`.
+ """
+ if sleep_time <= 0:
+ raise ValueError("Incorrect sleep time value {}".format(sleep_time))
+ if timeout < 0:
+ raise ValueError("Incorrect timeout value {}".format(timeout))
+ start = time.time()
+ while start + timeout > time.time():
+ if on_check_condition:
+ on_check_condition()
+
+ res = check_function()
+ if res:
+ return res
+ time.sleep(sleep_time)
+
+ message = "{} second(s) wait timeout has expired".format(timeout)
+ if fail_message:
+ message += ": {}".format(fail_message)
raise TimeoutError(truncate(message, MAX_MESSAGE_LEN))
-
+
def _kill_process_tree(process_pid, target_pid_signal=None):
- """
- Kills child processes, req. Note that psutil should be installed
- @param process_pid: parent id to search for descendants
- """
- yatest_logger.debug("Killing process %s", process_pid)
- if os.name == 'nt':
- _win_kill_process_tree(process_pid)
- else:
+ """
+ Kills child processes, req. Note that psutil should be installed
+ @param process_pid: parent id to search for descendants
+ """
+ yatest_logger.debug("Killing process %s", process_pid)
+ if os.name == 'nt':
+ _win_kill_process_tree(process_pid)
+ else:
_nix_kill_process_tree(process_pid, target_pid_signal)
-
-
+
+
def _nix_get_proc_children(pid):
try:
cmd = ["pgrep", "-P", str(pid)]
@@ -654,21 +654,21 @@ def _get_binname(pid):
def _nix_kill_process_tree(pid, target_pid_signal=None):
- """
- Kills the process tree.
- """
+ """
+ Kills the process tree.
+ """
yatest_logger.debug("Killing process tree for pid {} (bin:'{}')".format(pid, _get_binname(pid)))
-
- def try_to_send_signal(pid, sig):
- try:
- os.kill(pid, sig)
+
+ def try_to_send_signal(pid, sig):
+ try:
+ os.kill(pid, sig)
yatest_logger.debug("Sent signal %d to the pid %d", sig, pid)
- except Exception as exc:
+ except Exception as exc:
yatest_logger.debug("Error while sending signal {sig} to pid {pid}: {error}".format(sig=sig, pid=pid, error=str(exc)))
-
- try_to_send_signal(pid, signal.SIGSTOP) # Stop the process to prevent it from starting any child processes.
-
- # Get the child process PID list.
+
+ try_to_send_signal(pid, signal.SIGSTOP) # Stop the process to prevent it from starting any child processes.
+
+ # Get the child process PID list.
child_pids = _nix_get_proc_children(pid)
# Stop the child processes.
for child_pid in child_pids:
@@ -679,16 +679,16 @@ def _nix_kill_process_tree(pid, target_pid_signal=None):
# Skip the error and continue killing.
yatest_logger.debug("Killing child pid {pid} failed: {error}".format(pid=child_pid, error=e))
continue
-
+
try_to_send_signal(pid, target_pid_signal or signal.SIGKILL) # Kill the root process.
-
- # sometimes on freebsd sigkill cannot kill the process and either sigkill or sigcont should be sent
- # https://www.mail-archive.com/freebsd-hackers@freebsd.org/msg159646.html
- try_to_send_signal(pid, signal.SIGCONT)
-
-
-def _win_kill_process_tree(pid):
- subprocess.call(['taskkill', '/F', '/T', '/PID', str(pid)])
+
+ # sometimes on freebsd sigkill cannot kill the process and either sigkill or sigcont should be sent
+ # https://www.mail-archive.com/freebsd-hackers@freebsd.org/msg159646.html
+ try_to_send_signal(pid, signal.SIGCONT)
+
+
+def _win_kill_process_tree(pid):
+ subprocess.call(['taskkill', '/F', '/T', '/PID', str(pid)])
def _run_readelf(binary_path):
diff --git a/library/python/testing/yatest_common/yatest/common/runtime.py b/library/python/testing/yatest_common/yatest/common/runtime.py
index aa9161aead..e55e193446 100644
--- a/library/python/testing/yatest_common/yatest/common/runtime.py
+++ b/library/python/testing/yatest_common/yatest/common/runtime.py
@@ -3,49 +3,49 @@ import functools
import json
import os
import threading
-
+
import six
-
+
_lock = threading.Lock()
def _get_ya_config():
- try:
+ try:
import library.python.pytest.plugins.ya as ya_plugin
if ya_plugin.pytest_config is not None:
return ya_plugin.pytest_config
- import pytest
+ import pytest
return pytest.config
- except (ImportError, AttributeError):
+ except (ImportError, AttributeError):
try:
import library.python.testing.recipe
if library.python.testing.recipe.ya:
return library.python.testing.recipe
except (ImportError, AttributeError):
pass
- raise NotImplementedError("yatest.common.* is only available from the testing runtime")
-
-
+ raise NotImplementedError("yatest.common.* is only available from the testing runtime")
+
+
def _get_ya_plugin_instance():
return _get_ya_config().ya
-def _norm_path(path):
- if path is None:
- return None
+def _norm_path(path):
+ if path is None:
+ return None
assert isinstance(path, six.string_types)
- if "\\" in path:
- raise AssertionError("path {} contains Windows seprators \\ - replace them with '/'".format(path))
- return os.path.normpath(path)
-
-
-def _join_path(main_path, path):
- if not path:
- return main_path
- return os.path.join(main_path, _norm_path(path))
-
-
+ if "\\" in path:
+ raise AssertionError("path {} contains Windows seprators \\ - replace them with '/'".format(path))
+ return os.path.normpath(path)
+
+
+def _join_path(main_path, path):
+ if not path:
+ return main_path
+ return os.path.join(main_path, _norm_path(path))
+
+
def not_test(func):
"""
Mark any function as not a test for py.test
@@ -60,70 +60,70 @@ def not_test(func):
def source_path(path=None):
- """
- Get source path inside arcadia
+ """
+ Get source path inside arcadia
:param path: path arcadia relative, e.g. yatest.common.source_path('devtools/ya')
- :return: absolute path to the source folder
- """
+ :return: absolute path to the source folder
+ """
return _join_path(_get_ya_plugin_instance().source_root, path)
-
-
+
+
def build_path(path=None):
- """
- Get path inside build directory
+ """
+ Get path inside build directory
:param path: path relative to the build directory, e.g. yatest.common.build_path('devtools/ya/bin')
- :return: absolute path inside build directory
- """
+ :return: absolute path inside build directory
+ """
return _join_path(_get_ya_plugin_instance().build_root, path)
-
-
+
+
def java_path():
"""
- [DEPRECATED] Get path to java
+ [DEPRECATED] Get path to java
:return: absolute path to java
"""
from . import runtime_java
return runtime_java.get_java_path(binary_path(os.path.join('contrib', 'tools', 'jdk')))
-def java_home():
- """
- Get jdk directory path
- """
+def java_home():
+ """
+ Get jdk directory path
+ """
from . import runtime_java
- jdk_dir = runtime_java.get_build_java_dir(binary_path('jdk'))
- if not jdk_dir:
- raise Exception("Cannot find jdk - make sure 'jdk' is added to the DEPENDS section and exists for the current platform")
- return jdk_dir
-
-
-def java_bin():
- """
- Get path to the java binary
+ jdk_dir = runtime_java.get_build_java_dir(binary_path('jdk'))
+ if not jdk_dir:
+ raise Exception("Cannot find jdk - make sure 'jdk' is added to the DEPENDS section and exists for the current platform")
+ return jdk_dir
+
+
+def java_bin():
+ """
+ Get path to the java binary
Requires DEPENDS(jdk)
- """
- return os.path.join(java_home(), "bin", "java")
-
-
+ """
+ return os.path.join(java_home(), "bin", "java")
+
+
def data_path(path=None):
- """
- Get path inside arcadia_tests_data directory
+ """
+ Get path inside arcadia_tests_data directory
:param path: path relative to the arcadia_tests_data directory, e.g. yatest.common.data_path("pers/rerank_service")
- :return: absolute path inside arcadia_tests_data
- """
+ :return: absolute path inside arcadia_tests_data
+ """
return _join_path(_get_ya_plugin_instance().data_root, path)
-
-
+
+
def output_path(path=None):
- """
- Get path inside the current test suite output dir.
- Placing files to this dir guarantees that files will be accessible after the test suite execution.
- :param path: path relative to the test suite output dir
- :return: absolute path inside the test suite output dir
- """
+ """
+ Get path inside the current test suite output dir.
+ Placing files to this dir guarantees that files will be accessible after the test suite execution.
+ :param path: path relative to the test suite output dir
+ :return: absolute path inside the test suite output dir
+ """
return _join_path(_get_ya_plugin_instance().output_dir, path)
-
-
+
+
def ram_drive_path(path=None):
"""
:param path: path relative to the ram drive.
@@ -144,60 +144,60 @@ def output_ram_drive_path(path=None):
def binary_path(path=None):
- """
- Get path to the built binary
+ """
+ Get path to the built binary
:param path: path to the binary relative to the build directory e.g. yatest.common.binary_path('devtools/ya/bin/ya-bin')
- :return: absolute path to the binary
- """
+ :return: absolute path to the binary
+ """
path = _norm_path(path)
return _get_ya_plugin_instance().get_binary(path)
-
-
+
+
def work_path(path=None):
- """
- Get path inside the current test suite working directory. Creating files in the work directory does not guarantee
- that files will be accessible after the test suite execution
- :param path: path relative to the test suite working dir
- :return: absolute path inside the test suite working dir
- """
+ """
+ Get path inside the current test suite working directory. Creating files in the work directory does not guarantee
+ that files will be accessible after the test suite execution
+ :param path: path relative to the test suite working dir
+ :return: absolute path inside the test suite working dir
+ """
return _join_path(
os.environ.get("TEST_WORK_PATH") or
_get_ya_plugin_instance().get_context("work_path") or
os.getcwd(),
path)
-
-
-def python_path():
- """
+
+
+def python_path():
+ """
Get path to the arcadia python.
Warn: if you are using build with system python (-DUSE_SYSTEM_PYTHON=X) beware that some python bundles
are built in a stripped-down form that is needed for building, not running tests.
See comments in the file below to find out which version of python is compatible with tests.
https://a.yandex-team.ru/arc/trunk/arcadia/build/platform/python/resources.inc
- :return: absolute path to python
- """
- return _get_ya_plugin_instance().python_path
-
-
-def valgrind_path():
- """
- Get path to valgrind
- :return: absolute path to valgrind
- """
- return _get_ya_plugin_instance().valgrind_path
-
-
-def get_param(key, default=None):
- """
- Get arbitrary parameter passed via command line
- :param key: key
- :param default: default value
- :return: parameter value or the default
- """
- return _get_ya_plugin_instance().get_param(key, default)
-
-
+ :return: absolute path to python
+ """
+ return _get_ya_plugin_instance().python_path
+
+
+def valgrind_path():
+ """
+ Get path to valgrind
+ :return: absolute path to valgrind
+ """
+ return _get_ya_plugin_instance().valgrind_path
+
+
+def get_param(key, default=None):
+ """
+ Get arbitrary parameter passed via command line
+ :param key: key
+ :param default: default value
+ :return: parameter value or the default
+ """
+ return _get_ya_plugin_instance().get_param(key, default)
+
+
def get_param_dict_copy():
"""
Return copy of dictionary with all parameters. Changes to this dictionary do *not* change parameters.
@@ -209,25 +209,25 @@ def get_param_dict_copy():
@not_test
def test_output_path(path=None):
- """
- Get dir in the suite output_path for the current test case
- """
+ """
+ Get dir in the suite output_path for the current test case
+ """
test_out_dir = os.path.splitext(_get_ya_config().current_test_log_path)[0]
try:
- os.makedirs(test_out_dir)
+ os.makedirs(test_out_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return _join_path(test_out_dir, path)
-
-
-def project_path(path=None):
- """
- Get path in build root relating to build_root/project path
- """
- return _join_path(os.path.join(build_path(), context.project_path), path)
-
-
+
+
+def project_path(path=None):
+ """
+ Get path in build root relating to build_root/project path
+ """
+ return _join_path(os.path.join(build_path(), context.project_path), path)
+
+
def gdb_path():
"""
Get path to the gdb
@@ -235,25 +235,25 @@ def gdb_path():
return _get_ya_plugin_instance().gdb_path
-def c_compiler_path():
- """
- Get path to the gdb
- """
- return os.environ.get("YA_CC")
-
-
+def c_compiler_path():
+ """
+ Get path to the gdb
+ """
+ return os.environ.get("YA_CC")
+
+
def get_yt_hdd_path(path=None):
if 'HDD_PATH' in os.environ:
return _join_path(os.environ['HDD_PATH'], path)
-def cxx_compiler_path():
- """
- Get path to the gdb
- """
- return os.environ.get("YA_CXX")
-
-
+def cxx_compiler_path():
+ """
+ Get path to the gdb
+ """
+ return os.environ.get("YA_CXX")
+
+
def global_resources():
try:
return json.loads(os.environ.get("YA_GLOBAL_RESOURCES"))
@@ -282,39 +282,39 @@ def _register_core(name, binary_path, core_path, bt_path, pbt_path):
@not_test
-def test_source_path(path=None):
- return _join_path(os.path.join(source_path(), context.project_path), path)
-
-
-class Context(object):
- """
- Runtime context
- """
-
- @property
- def build_type(self):
- return _get_ya_plugin_instance().get_context("build_type")
-
- @property
- def project_path(self):
- return _get_ya_plugin_instance().get_context("project_path")
-
- @property
- def test_stderr(self):
- return _get_ya_plugin_instance().get_context("test_stderr")
-
- @property
+def test_source_path(path=None):
+ return _join_path(os.path.join(source_path(), context.project_path), path)
+
+
+class Context(object):
+ """
+ Runtime context
+ """
+
+ @property
+ def build_type(self):
+ return _get_ya_plugin_instance().get_context("build_type")
+
+ @property
+ def project_path(self):
+ return _get_ya_plugin_instance().get_context("project_path")
+
+ @property
+ def test_stderr(self):
+ return _get_ya_plugin_instance().get_context("test_stderr")
+
+ @property
def test_debug(self):
return _get_ya_plugin_instance().get_context("test_debug")
@property
- def test_traceback(self):
- return _get_ya_plugin_instance().get_context("test_traceback")
-
- @property
- def test_name(self):
+ def test_traceback(self):
+ return _get_ya_plugin_instance().get_context("test_traceback")
+
+ @property
+ def test_name(self):
return _get_ya_config().current_test_name
-
+
@property
def sanitize(self):
"""
@@ -340,4 +340,4 @@ class Context(object):
return _get_ya_plugin_instance().get_context(key)
-context = Context()
+context = Context()
diff --git a/library/python/testing/yatest_common/yatest/common/runtime_java.py b/library/python/testing/yatest_common/yatest/common/runtime_java.py
index 1aa7b04827..39bbb45570 100644
--- a/library/python/testing/yatest_common/yatest/common/runtime_java.py
+++ b/library/python/testing/yatest_common/yatest/common/runtime_java.py
@@ -8,7 +8,7 @@ _JAVA_DIR = []
def get_java_path(jdk_dir):
- # deprecated - to be deleted
+ # deprecated - to be deleted
java_paths = (os.path.join(jdk_dir, 'bin', 'java'), os.path.join(jdk_dir, 'bin', 'java.exe'))
for p in java_paths:
@@ -25,11 +25,11 @@ def get_java_path(jdk_dir):
return p
return ''
-
-
-def get_build_java_dir(jdk_dir):
+
+
+def get_build_java_dir(jdk_dir):
versions = [8, 10, 11, 12, 13, 14, 15]
-
+
if not _JAVA_DIR:
for version in versions:
jdk_tar_path = os.path.join(jdk_dir, "jdk{}.tar".format(version))
diff --git a/library/python/testing/yatest_common/yatest/common/tags.py b/library/python/testing/yatest_common/yatest/common/tags.py
index c5401bd2ed..9e7a74cdf5 100644
--- a/library/python/testing/yatest_common/yatest/common/tags.py
+++ b/library/python/testing/yatest_common/yatest/common/tags.py
@@ -1,5 +1,5 @@
-try:
- import pytest
- ya_external = getattr(pytest.mark, "ya:external")
-except ImportError:
- ya_external = None
+try:
+ import pytest
+ ya_external = getattr(pytest.mark, "ya:external")
+except ImportError:
+ ya_external = None
diff --git a/library/python/testing/yatest_lib/external.py b/library/python/testing/yatest_lib/external.py
index 459d12c878..39113230d9 100644
--- a/library/python/testing/yatest_lib/external.py
+++ b/library/python/testing/yatest_lib/external.py
@@ -1,147 +1,147 @@
-from __future__ import absolute_import
-
+from __future__ import absolute_import
+
import re
import sys
-import copy
+import copy
import logging
-
-from . import tools
+
+from . import tools
from datetime import date, datetime
-
+
import enum
import six
logger = logging.getLogger(__name__)
MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
-
+
def apply(func, value, apply_to_keys=False):
- """
- Applies func to every possible member of value
- :param value: could be either a primitive object or a complex one (list, dicts)
- :param func: func to be applied
- :return:
- """
- def _apply(func, value, value_path):
- if value_path is None:
- value_path = []
-
- if isinstance(value, list) or isinstance(value, tuple):
- res = []
- for ind, item in enumerate(value):
- path = copy.copy(value_path)
- path.append(ind)
- res.append(_apply(func, item, path))
- elif isinstance(value, dict):
- if is_external(value):
- # this is a special serialized object pointing to some external place
- res = func(value, value_path)
- else:
- res = {}
- for key, val in sorted(value.items(), key=lambda dict_item: dict_item[0]):
- path = copy.copy(value_path)
- path.append(key)
+ """
+ Applies func to every possible member of value
+ :param value: could be either a primitive object or a complex one (list, dicts)
+ :param func: func to be applied
+ :return:
+ """
+ def _apply(func, value, value_path):
+ if value_path is None:
+ value_path = []
+
+ if isinstance(value, list) or isinstance(value, tuple):
+ res = []
+ for ind, item in enumerate(value):
+ path = copy.copy(value_path)
+ path.append(ind)
+ res.append(_apply(func, item, path))
+ elif isinstance(value, dict):
+ if is_external(value):
+ # this is a special serialized object pointing to some external place
+ res = func(value, value_path)
+ else:
+ res = {}
+ for key, val in sorted(value.items(), key=lambda dict_item: dict_item[0]):
+ path = copy.copy(value_path)
+ path.append(key)
res[_apply(func, key, path) if apply_to_keys else key] = _apply(func, val, path)
- else:
- res = func(value, value_path)
- return res
- return _apply(func, value, None)
-
-
+ else:
+ res = func(value, value_path)
+ return res
+ return _apply(func, value, None)
+
+
def is_coroutine(val):
if sys.version_info[0] < 3:
return False
else:
import asyncio
- return asyncio.iscoroutinefunction(val) or asyncio.iscoroutine(val)
-
-
-def serialize(value):
- """
- Serialize value to json-convertible object
- Ensures that all components of value can be serialized to json
- :param value: object to be serialized
- """
- def _serialize(val, _):
- if val is None:
- return val
+ return asyncio.iscoroutinefunction(val) or asyncio.iscoroutine(val)
+
+
+def serialize(value):
+ """
+ Serialize value to json-convertible object
+ Ensures that all components of value can be serialized to json
+ :param value: object to be serialized
+ """
+ def _serialize(val, _):
+ if val is None:
+ return val
if isinstance(val, six.string_types) or isinstance(val, bytes):
- return tools.to_utf8(val)
+ return tools.to_utf8(val)
if isinstance(val, enum.Enum):
return str(val)
if isinstance(val, six.integer_types) or type(val) in [float, bool]:
- return val
- if is_external(val):
- return dict(val)
+ return val
+ if is_external(val):
+ return dict(val)
if isinstance(val, (date, datetime)):
return repr(val)
if is_coroutine(val):
- return None
- raise ValueError("Cannot serialize value '{}' of type {}".format(val, type(val)))
+ return None
+ raise ValueError("Cannot serialize value '{}' of type {}".format(val, type(val)))
return apply(_serialize, value, apply_to_keys=True)
-
-
-def is_external(value):
- return isinstance(value, dict) and "uri" in value.keys()
-
-
-class ExternalSchema(object):
- File = "file"
- SandboxResource = "sbr"
- Delayed = "delayed"
+
+
+def is_external(value):
+ return isinstance(value, dict) and "uri" in value.keys()
+
+
+class ExternalSchema(object):
+ File = "file"
+ SandboxResource = "sbr"
+ Delayed = "delayed"
HTTP = "http"
-
-
-class CanonicalObject(dict):
- def __iter__(self):
- raise TypeError("Iterating canonical object is not implemented")
-
-
-class ExternalDataInfo(object):
-
- def __init__(self, data):
- assert is_external(data)
- self._data = data
-
- def __str__(self):
- type_str = "File" if self.is_file else "Sandbox resource"
- return "{}({})".format(type_str, self.path)
-
- def __repr__(self):
- return str(self)
-
- @property
- def uri(self):
- return self._data["uri"]
-
- @property
- def checksum(self):
- return self._data.get("checksum")
-
- @property
- def is_file(self):
- return self.uri.startswith(ExternalSchema.File)
-
- @property
- def is_sandbox_resource(self):
- return self.uri.startswith(ExternalSchema.SandboxResource)
-
- @property
- def is_delayed(self):
- return self.uri.startswith(ExternalSchema.Delayed)
-
- @property
+
+
+class CanonicalObject(dict):
+ def __iter__(self):
+ raise TypeError("Iterating canonical object is not implemented")
+
+
+class ExternalDataInfo(object):
+
+ def __init__(self, data):
+ assert is_external(data)
+ self._data = data
+
+ def __str__(self):
+ type_str = "File" if self.is_file else "Sandbox resource"
+ return "{}({})".format(type_str, self.path)
+
+ def __repr__(self):
+ return str(self)
+
+ @property
+ def uri(self):
+ return self._data["uri"]
+
+ @property
+ def checksum(self):
+ return self._data.get("checksum")
+
+ @property
+ def is_file(self):
+ return self.uri.startswith(ExternalSchema.File)
+
+ @property
+ def is_sandbox_resource(self):
+ return self.uri.startswith(ExternalSchema.SandboxResource)
+
+ @property
+ def is_delayed(self):
+ return self.uri.startswith(ExternalSchema.Delayed)
+
+ @property
def is_http(self):
return self.uri.startswith(ExternalSchema.HTTP)
@property
- def path(self):
+ def path(self):
if self.uri.count("://") != 1:
logger.error("Invalid external data uri: '%s'", self.uri)
return self.uri
- _, path = self.uri.split("://")
- return path
-
+ _, path = self.uri.split("://")
+ return path
+
def get_mds_key(self):
assert self.is_http
m = re.match(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)', self.uri)
@@ -149,44 +149,44 @@ class ExternalDataInfo(object):
return m.group(1)
raise AssertionError("Failed to extract mds key properly from '{}'".format(self.uri))
- @property
- def size(self):
- return self._data.get("size")
-
- def serialize(self):
- return self._data
-
- @classmethod
- def _serialize(cls, schema, path, checksum=None, attrs=None):
- res = CanonicalObject({"uri": "{}://{}".format(schema, path)})
- if checksum:
- res["checksum"] = checksum
- if attrs:
- res.update(attrs)
- return res
-
- @classmethod
- def serialize_file(cls, path, checksum=None, diff_tool=None, local=False, diff_file_name=None, diff_tool_timeout=None, size=None):
- attrs = {}
- if diff_tool:
- attrs["diff_tool"] = diff_tool
- if local:
- attrs["local"] = local
- if diff_file_name:
- attrs["diff_file_name"] = diff_file_name
- if diff_tool_timeout:
- attrs["diff_tool_timeout"] = diff_tool_timeout
- if size is not None:
- attrs["size"] = size
- return cls._serialize(ExternalSchema.File, path, checksum, attrs=attrs)
-
- @classmethod
- def serialize_resource(cls, id, checksum=None):
- return cls._serialize(ExternalSchema.SandboxResource, id, checksum)
-
- @classmethod
- def serialize_delayed(cls, upload_id, checksum):
- return cls._serialize(ExternalSchema.Delayed, upload_id, checksum)
-
- def get(self, key, default=None):
- return self._data.get(key, default)
+ @property
+ def size(self):
+ return self._data.get("size")
+
+ def serialize(self):
+ return self._data
+
+ @classmethod
+ def _serialize(cls, schema, path, checksum=None, attrs=None):
+ res = CanonicalObject({"uri": "{}://{}".format(schema, path)})
+ if checksum:
+ res["checksum"] = checksum
+ if attrs:
+ res.update(attrs)
+ return res
+
+ @classmethod
+ def serialize_file(cls, path, checksum=None, diff_tool=None, local=False, diff_file_name=None, diff_tool_timeout=None, size=None):
+ attrs = {}
+ if diff_tool:
+ attrs["diff_tool"] = diff_tool
+ if local:
+ attrs["local"] = local
+ if diff_file_name:
+ attrs["diff_file_name"] = diff_file_name
+ if diff_tool_timeout:
+ attrs["diff_tool_timeout"] = diff_tool_timeout
+ if size is not None:
+ attrs["size"] = size
+ return cls._serialize(ExternalSchema.File, path, checksum, attrs=attrs)
+
+ @classmethod
+ def serialize_resource(cls, id, checksum=None):
+ return cls._serialize(ExternalSchema.SandboxResource, id, checksum)
+
+ @classmethod
+ def serialize_delayed(cls, upload_id, checksum):
+ return cls._serialize(ExternalSchema.Delayed, upload_id, checksum)
+
+ def get(self, key, default=None):
+ return self._data.get(key, default)
diff --git a/library/python/testing/yatest_lib/tools.py b/library/python/testing/yatest_lib/tools.py
index 602eaeefcc..b72d79c162 100644
--- a/library/python/testing/yatest_lib/tools.py
+++ b/library/python/testing/yatest_lib/tools.py
@@ -1,13 +1,13 @@
import six
import sys
-
-def to_utf8(value):
- """
- Converts value to string encoded into utf-8
- :param value:
- :return:
- """
+
+def to_utf8(value):
+ """
+ Converts value to string encoded into utf-8
+ :param value:
+ :return:
+ """
if sys.version_info[0] < 3:
if not isinstance(value, basestring): # noqa
value = unicode(value) # noqa
diff --git a/library/python/testing/yatest_lib/ya.make b/library/python/testing/yatest_lib/ya.make
index d115846230..342bae82ba 100644
--- a/library/python/testing/yatest_lib/ya.make
+++ b/library/python/testing/yatest_lib/ya.make
@@ -1,16 +1,16 @@
OWNER(g:yatest)
-
+
PY23_LIBRARY()
-
-PY_SRCS(
+
+PY_SRCS(
NAMESPACE
yatest_lib
- external.py
+ external.py
test_splitter.py
- tools.py
+ tools.py
ya.py
-)
-
+)
+
PEERDIR(
contrib/python/six
)
@@ -21,6 +21,6 @@ IF(PYTHON2)
)
ENDIF()
-END()
+END()
RECURSE_FOR_TESTS(tests)
diff --git a/library/python/windows/__init__.py b/library/python/windows/__init__.py
index 9966cb8c83..62861b3309 100644
--- a/library/python/windows/__init__.py
+++ b/library/python/windows/__init__.py
@@ -3,7 +3,7 @@
import os
import stat
import sys
-import shutil
+import shutil
import logging
from six import reraise
@@ -11,22 +11,22 @@ from six import reraise
import library.python.func
import library.python.strings
-logger = logging.getLogger(__name__)
+logger = logging.getLogger(__name__)
+
-
ERRORS = {
'SUCCESS': 0,
- 'PATH_NOT_FOUND': 3,
- 'ACCESS_DENIED': 5,
- 'SHARING_VIOLATION': 32,
+ 'PATH_NOT_FOUND': 3,
+ 'ACCESS_DENIED': 5,
+ 'SHARING_VIOLATION': 32,
'INSUFFICIENT_BUFFER': 122,
'DIR_NOT_EMPTY': 145,
}
-RETRIABLE_FILE_ERRORS = (ERRORS['ACCESS_DENIED'], ERRORS['SHARING_VIOLATION'])
-RETRIABLE_DIR_ERRORS = (ERRORS['ACCESS_DENIED'], ERRORS['DIR_NOT_EMPTY'], ERRORS['SHARING_VIOLATION'])
+RETRIABLE_FILE_ERRORS = (ERRORS['ACCESS_DENIED'], ERRORS['SHARING_VIOLATION'])
+RETRIABLE_DIR_ERRORS = (ERRORS['ACCESS_DENIED'], ERRORS['DIR_NOT_EMPTY'], ERRORS['SHARING_VIOLATION'])
+
-
# Check if on Windows
@library.python.func.lazy
def on_win():
@@ -145,7 +145,7 @@ if on_win():
ei = None
for t in xrange(tries):
if t:
- logger.debug('Diehard [errs %s]: try #%d in %s', ','.join(str(x) for x in winerrors), t, f)
+ logger.debug('Diehard [errs %s]: try #%d in %s', ','.join(str(x) for x in winerrors), t, f)
try:
return f(*args, **kwargs)
except WindowsError as e:
@@ -285,14 +285,14 @@ if on_win():
def set_error_mode(mode):
return ctypes.windll.kernel32.SetErrorMode(mode)
- @win_only
- def rmtree(path):
- def error_handler(func, handling_path, execinfo):
- e = execinfo[1]
- if e.winerror == ERRORS['PATH_NOT_FOUND']:
- handling_path = "\\\\?\\" + handling_path # handle path over 256 symbols
- if os.path.exists(path):
- return func(handling_path)
+ @win_only
+ def rmtree(path):
+ def error_handler(func, handling_path, execinfo):
+ e = execinfo[1]
+ if e.winerror == ERRORS['PATH_NOT_FOUND']:
+ handling_path = "\\\\?\\" + handling_path # handle path over 256 symbols
+ if os.path.exists(path):
+ return func(handling_path)
if e.winerror == ERRORS['ACCESS_DENIED']:
try:
# removing of r/w directory with read-only files in it yields ACCESS_DENIED
@@ -303,9 +303,9 @@ if on_win():
else:
# propagate true last error if this attempt fails
return func(handling_path)
- raise e
- shutil.rmtree(path, onerror=error_handler)
-
+ raise e
+ shutil.rmtree(path, onerror=error_handler)
+
# Don't display the Windows GPF dialog if the invoked program dies.
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx
@win_only
diff --git a/library/python/ya.make b/library/python/ya.make
index f0f7ebc6a8..2e1eb6e0e1 100644
--- a/library/python/ya.make
+++ b/library/python/ya.make
@@ -3,10 +3,10 @@ OWNER(g:python-contrib)
RECURSE(
aho_corasick
aho_corasick/ut
- archive
- archive/benchmark
- archive/test
- archive/test/data
+ archive
+ archive/benchmark
+ archive/test
+ archive/test/data
asgi_yauth
async_clients
auth_client_parser
@@ -19,14 +19,14 @@ RECURSE(
blackbox/tests
blackbox/tvm2
bloom
- boost_test
+ boost_test
bstr
build_info
build_info/ut
capabilities
celery_dashboard
certifi
- cgroups
+ cgroups
charset
charts_notes
charts_notes/example
@@ -72,7 +72,7 @@ RECURSE(
filelock/ut
filesys
filesys/ut
- find_root
+ find_root
flask
flask_passport
fnvhash
@@ -81,7 +81,7 @@ RECURSE(
framing/ut
func
func/ut
- fs
+ fs
geolocation
geolocation/ut
geohash
@@ -184,8 +184,8 @@ RECURSE(
svn_version
svn_version/ut
symbols
- testing
- tmp
+ testing
+ tmp
toloka_client
toloka-kit
toloka-airflow
@@ -199,7 +199,7 @@ RECURSE(
tvm2/tests
type_info
type_info/test
- unique_id
+ unique_id
vault_client
watch_dog
watch_dog/example
diff --git a/tools/archiver/tests/ya.make b/tools/archiver/tests/ya.make
index 60d6c7f459..445b4d3e70 100644
--- a/tools/archiver/tests/ya.make
+++ b/tools/archiver/tests/ya.make
@@ -1,11 +1,11 @@
-OWNER(mvel)
+OWNER(mvel)
PY2TEST()
-TEST_SRCS(test.py)
+TEST_SRCS(test.py)
-DATA(arcadia/tools/archiver/tests)
-
-DEPENDS(tools/archiver)
-
-END()
+DATA(arcadia/tools/archiver/tests)
+
+DEPENDS(tools/archiver)
+
+END()
diff --git a/tools/ya.make b/tools/ya.make
index cad28af957..51a6b8b426 100644
--- a/tools/ya.make
+++ b/tools/ya.make
@@ -12,7 +12,7 @@ RECURSE(
c++filt
calendar_extractor
check_formula_md5
- check_json
+ check_json
check_yaml
checktrigrams
clustermaster
diff --git a/util/system/ut/ya.make b/util/system/ut/ya.make
index 77be13efbc..127e7c261e 100644
--- a/util/system/ut/ya.make
+++ b/util/system/ut/ya.make
@@ -11,8 +11,8 @@ SPLIT_FACTOR(40)
TIMEOUT(300)
-SIZE(MEDIUM)
-
+SIZE(MEDIUM)
+
IF (OS_DARWIN)
SIZE(LARGE)
TAG(ya:fat ya:force_sandbox ya:exotic_platform)
diff --git a/ydb/library/yql/parser/proto_ast/gen_parser.sh b/ydb/library/yql/parser/proto_ast/gen_parser.sh
index ed910905cb..8c911497f6 100755
--- a/ydb/library/yql/parser/proto_ast/gen_parser.sh
+++ b/ydb/library/yql/parser/proto_ast/gen_parser.sh
@@ -12,5 +12,5 @@ fi
ANTLR3="./antlr3/antlr-3.5.2-complete-no-st3.jar"
-../../../../ya tool java -d64 -jar $ANTLR3 -lib ./ -language protobuf $1
-../../../../ya tool java -d64 -jar $ANTLR3 -lib ./ $1
+../../../../ya tool java -d64 -jar $ANTLR3 -lib ./ -language protobuf $1
+../../../../ya tool java -d64 -jar $ANTLR3 -lib ./ $1
diff --git a/ydb/tests/functional/hive/ya.make b/ydb/tests/functional/hive/ya.make
index f54ed2db4a..60c5fdbf13 100644
--- a/ydb/tests/functional/hive/ya.make
+++ b/ydb/tests/functional/hive/ya.make
@@ -33,12 +33,12 @@ ENDIF()
DEPENDS(
ydb/apps/ydbd
)
-
+
PEERDIR(
ydb/tests/library
)
FORK_SUBTESTS()
FORK_TEST_FILES()
-
+
END()