aboutsummaryrefslogtreecommitdiffstats
path: root/build
diff options
context:
space:
mode:
authorAleksandr <ivansduck@gmail.com>2022-02-10 16:47:52 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:47:52 +0300
commitea6c5b7f172becca389cacaff7d5f45f6adccbe6 (patch)
treed16cef493ac1e092b4a03ab9437ec06ffe3d188f /build
parent37de222addabbef336dcaaea5f7c7645a629fc6d (diff)
downloadydb-ea6c5b7f172becca389cacaff7d5f45f6adccbe6.tar.gz
Restoring authorship annotation for Aleksandr <ivansduck@gmail.com>. Commit 1 of 2.
Diffstat (limited to 'build')
-rw-r--r--build/config/tests/clang_tidy/config.yaml2
-rw-r--r--build/config/tests/clang_tidy/ya.make8
-rw-r--r--build/config/tests/flake8/flake8.conf18
-rw-r--r--build/config/tests/flake8/ya.make8
-rw-r--r--build/config/tests/ya.make6
-rw-r--r--build/config/ya.make6
-rw-r--r--build/platform/python/python27/ya.make30
-rw-r--r--build/platform/python/python34/ya.make30
-rw-r--r--build/platform/python/python35/ya.make30
-rw-r--r--build/platform/python/python36/ya.make30
-rw-r--r--build/platform/python/python37/ya.make30
-rw-r--r--build/platform/python/python38/ya.make30
-rw-r--r--build/platform/python/resources.inc4
-rw-r--r--build/platform/python/tests/test_common.py36
-rw-r--r--build/platform/python/tests/testlib.py36
-rw-r--r--build/platform/python/tests/ya.make68
-rw-r--r--build/platform/python/ya.make20
-rw-r--r--build/platform/test_tool/a.yaml28
-rw-r--r--build/platform/test_tool/ya.make8
-rw-r--r--build/plugins/_common.py14
-rw-r--r--build/plugins/_requirements.py24
-rw-r--r--build/plugins/_test_const.py282
-rw-r--r--build/plugins/code_generator.py2
-rw-r--r--build/plugins/macros_with_error.py34
-rw-r--r--build/plugins/pybuild.py282
-rw-r--r--build/plugins/res.py16
-rw-r--r--build/plugins/suppressions.py38
-rw-r--r--build/plugins/tests/test_requirements.py2
-rw-r--r--build/plugins/ytest.py648
-rw-r--r--build/rules/contrib_deps.policy2
-rw-r--r--build/rules/flake8/migrations.yaml4
-rw-r--r--build/scripts/append_file.py4
-rw-r--r--build/scripts/clang_tidy.py136
-rw-r--r--build/scripts/compile_cuda.py18
-rwxr-xr-xbuild/scripts/configure_file.py2
-rw-r--r--build/scripts/copy_to_dir.py2
-rw-r--r--build/scripts/coverage-info.py18
-rw-r--r--build/scripts/create_jcoverage_report.py70
-rw-r--r--build/scripts/error.py58
-rwxr-xr-xbuild/scripts/fetch_from.py114
-rw-r--r--build/scripts/fetch_from_archive.py6
-rw-r--r--build/scripts/fetch_from_external.py6
-rw-r--r--build/scripts/fetch_from_mds.py6
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py24
-rw-r--r--build/scripts/go_tool.py2
-rw-r--r--build/scripts/java_pack_to_file.py56
-rw-r--r--build/scripts/link_dyn_lib.py4
-rw-r--r--build/scripts/link_exe.py54
-rw-r--r--build/scripts/link_fat_obj.py8
-rw-r--r--build/scripts/retry.py58
-rw-r--r--build/scripts/run_junit.py250
-rw-r--r--build/scripts/unpacking_jtest_runner.py82
-rw-r--r--build/scripts/with_coverage.py4
-rw-r--r--build/scripts/with_crash_on_timeout.py4
-rw-r--r--build/scripts/ya.make22
-rw-r--r--build/ya.conf.json144
-rw-r--r--build/ya.make18
-rw-r--r--build/ymake.core.conf180
-rwxr-xr-xbuild/ymake_conf.py62
59 files changed, 1594 insertions, 1594 deletions
diff --git a/build/config/tests/clang_tidy/config.yaml b/build/config/tests/clang_tidy/config.yaml
index d55707592c..fba2fc718b 100644
--- a/build/config/tests/clang_tidy/config.yaml
+++ b/build/config/tests/clang_tidy/config.yaml
@@ -1,6 +1,6 @@
Checks: >
-*,
- arcadia-typeid-name-restriction,
+ arcadia-typeid-name-restriction,
bugprone-use-after-move,
readability-identifier-naming,
CheckOptions:
diff --git a/build/config/tests/clang_tidy/ya.make b/build/config/tests/clang_tidy/ya.make
index fc6544f38f..2c63a7978d 100644
--- a/build/config/tests/clang_tidy/ya.make
+++ b/build/config/tests/clang_tidy/ya.make
@@ -1,4 +1,4 @@
-OWNER(
- g:cpp-committee
- g:yatest
-)
+OWNER(
+ g:cpp-committee
+ g:yatest
+)
diff --git a/build/config/tests/flake8/flake8.conf b/build/config/tests/flake8/flake8.conf
index 8af22b5d4d..2e5516a02f 100644
--- a/build/config/tests/flake8/flake8.conf
+++ b/build/config/tests/flake8/flake8.conf
@@ -11,17 +11,17 @@ select =
#Q, # quotes
ignore =
- E122, # continuation line missing indentation or outdented
+ E122, # continuation line missing indentation or outdented
E123, # closing bracket does not match indentation of opening bracket's line
E127, # continuation line over-indented for visual indent
- E131, # continuation line unaligned for hanging
+ E131, # continuation line unaligned for hanging
E203, # whitespace before ':'
- E225, # missing whitespace around operator
+ E225, # missing whitespace around operator
E226, # missing whitespace around arithmetic operator
E24, # multiple spaces after ',' or tab after ','
- E275, # missing whitespace after keyword
- E305, # expected 2 blank lines after end of function or class
- E306, # expected 1 blank line before a nested definition
+ E275, # missing whitespace after keyword
+ E305, # expected 2 blank lines after end of function or class
+ E306, # expected 1 blank line before a nested definition
E402, # module level import not at top of file
E722, # do not use bare except, specify exception instead
E731, # do not assign a lambda expression, use a def
@@ -29,7 +29,7 @@ ignore =
F722, # syntax error in forward annotation
- W503, # line break before binary operator
- W504, # line break after binary operator
-
+ W503, # line break before binary operator
+ W504, # line break after binary operator
+
max-line-length = 200
diff --git a/build/config/tests/flake8/ya.make b/build/config/tests/flake8/ya.make
index c4ba4105e0..6aff16ef0b 100644
--- a/build/config/tests/flake8/ya.make
+++ b/build/config/tests/flake8/ya.make
@@ -1,4 +1,4 @@
-OWNER(
- g:python-committee
- g:yatest
-)
+OWNER(
+ g:python-committee
+ g:yatest
+)
diff --git a/build/config/tests/ya.make b/build/config/tests/ya.make
index 146c3f63ad..fad817f908 100644
--- a/build/config/tests/ya.make
+++ b/build/config/tests/ya.make
@@ -1,4 +1,4 @@
-OWNER(
+OWNER(
shadchin
- g:yatest
-)
+ g:yatest
+)
diff --git a/build/config/ya.make b/build/config/ya.make
index ff93704d5b..8c485fc68e 100644
--- a/build/config/ya.make
+++ b/build/config/ya.make
@@ -1,3 +1,3 @@
-RECURSE(
- tests
-)
+RECURSE(
+ tests
+)
diff --git a/build/platform/python/python27/ya.make b/build/platform/python/python27/ya.make
index 35d2ef8405..f9811d2998 100644
--- a/build/platform/python/python27/ya.make
+++ b/build/platform/python/python27/ya.make
@@ -1,22 +1,22 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
IF (ARCH_ARM64 OR ARCH_AARCH64)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_LINUX_ARM64})
ELSE()
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_LINUX})
ENDIF()
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON27 ${PYTHON27_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python34/ya.make b/build/platform/python/python34/ya.make
index 61be788bec..2d9bfa38ae 100644
--- a/build/platform/python/python34/ya.make
+++ b/build/platform/python/python34/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON34 ${PYTHON34_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON34 ${PYTHON34_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON34 ${PYTHON34_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python35/ya.make b/build/platform/python/python35/ya.make
index 7a6292efa0..9baedcd926 100644
--- a/build/platform/python/python35/ya.make
+++ b/build/platform/python/python35/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON35 ${PYTHON35_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON35 ${PYTHON35_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON35 ${PYTHON35_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python36/ya.make b/build/platform/python/python36/ya.make
index c9dcf5d88b..2debf2a37d 100644
--- a/build/platform/python/python36/ya.make
+++ b/build/platform/python/python36/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON36 ${PYTHON36_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON36 ${PYTHON36_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON36 ${PYTHON36_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python37/ya.make b/build/platform/python/python37/ya.make
index 724c3f5b76..ed50e55bb9 100644
--- a/build/platform/python/python37/ya.make
+++ b/build/platform/python/python37/ya.make
@@ -1,18 +1,18 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON37 ${PYTHON37_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON37 ${PYTHON37_DARWIN})
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON37 ${PYTHON37_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/python38/ya.make b/build/platform/python/python38/ya.make
index b6820ca6ca..e9d3ba7db7 100644
--- a/build/platform/python/python38/ya.make
+++ b/build/platform/python/python38/ya.make
@@ -1,23 +1,23 @@
-RESOURCES_LIBRARY()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
-
-IF (OS_LINUX)
+RESOURCES_LIBRARY()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+INCLUDE(${ARCADIA_ROOT}/build/platform/python/resources.inc)
+
+IF (OS_LINUX)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_LINUX})
-ELSEIF (OS_DARWIN)
+ELSEIF (OS_DARWIN)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_DARWIN})
IF (ARCH_ARM64)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_DARWIN_ARM64})
ELSEIF(ARCH_X86_64)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_DARWIN})
ENDIF()
-ELSEIF (OS_WINDOWS)
+ELSEIF (OS_WINDOWS)
DECLARE_EXTERNAL_RESOURCE(EXTERNAL_PYTHON38 ${PYTHON38_WINDOWS})
-ENDIF()
-
-END()
+ENDIF()
+
+END()
diff --git a/build/platform/python/resources.inc b/build/platform/python/resources.inc
index a730a5039b..6bace07e85 100644
--- a/build/platform/python/resources.inc
+++ b/build/platform/python/resources.inc
@@ -6,7 +6,7 @@ SET(PYTHON37_LINUX sbr:616675620)
SET(PYTHON38_LINUX sbr:1211259884)
SET(PYTHON39_LINUX sbr:2019057022)
SET(PYTHON310_LINUX sbr:2505611617)
-
+
SET(PYTHON27_LINUX_ARM64 sbr:2145006545)
SET(PYTHON27_DARWIN sbr:426406952)
@@ -17,7 +17,7 @@ SET(PYTHON37_DARWIN sbr:616740054)
SET(PYTHON38_DARWIN sbr:1211286783)
SET(PYTHON39_DARWIN sbr:2046345566)
SET(PYTHON310_DARWIN sbr:2505677448)
-
+
SET(PYTHON38_DARWIN_ARM64 sbr:2577173323)
SET(PYTHON39_DARWIN_ARM64 sbr:2533263197)
SET(PYTHON310_DARWIN_ARM64 sbr:2577250782)
diff --git a/build/platform/python/tests/test_common.py b/build/platform/python/tests/test_common.py
index 7a685330ea..f4689ff0a4 100644
--- a/build/platform/python/tests/test_common.py
+++ b/build/platform/python/tests/test_common.py
@@ -1,22 +1,22 @@
-import subprocess
-
-import pytest
-
-from build.platform.python.tests import testlib
-
+import subprocess
+
+import pytest
+
+from build.platform.python.tests import testlib
+
PYTHON_VERSIONS = ["2.7", "3.4", "3.5", "3.6"] # 3.7, 3.8 are not runnable
-
-
-@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
-def test_version_matched(pyver):
- testlib.check_python_version(pyver)
-
-
-@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
-def test_python_max_unicode_bytes(pyver):
- cmd = [testlib.get_python_bin(pyver), '-c', 'import sys; print(sys.maxunicode)']
- maxunicode = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('utf-8')
- assert int(maxunicode) > 65535, "Found UCS2 build"
+
+
+@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
+def test_version_matched(pyver):
+ testlib.check_python_version(pyver)
+
+
+@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
+def test_python_max_unicode_bytes(pyver):
+ cmd = [testlib.get_python_bin(pyver), '-c', 'import sys; print(sys.maxunicode)']
+ maxunicode = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('utf-8')
+ assert int(maxunicode) > 65535, "Found UCS2 build"
@pytest.mark.parametrize("pyver", PYTHON_VERSIONS)
diff --git a/build/platform/python/tests/testlib.py b/build/platform/python/tests/testlib.py
index d12f2815d4..92fc571778 100644
--- a/build/platform/python/tests/testlib.py
+++ b/build/platform/python/tests/testlib.py
@@ -1,21 +1,21 @@
-import os
-import subprocess
-
-import yatest.common
-
-
-def get_python_bin(ver):
+import os
+import subprocess
+
+import yatest.common
+
+
+def get_python_bin(ver):
res_name = 'EXTERNAL_PYTHON{}_RESOURCE_GLOBAL'.format(ver.replace('.', ''))
- gr = yatest.common.global_resources()
- if res_name in gr:
- bindir = os.path.join(gr[res_name], 'python', 'bin')
+ gr = yatest.common.global_resources()
+ if res_name in gr:
+ bindir = os.path.join(gr[res_name], 'python', 'bin')
if ('python' + ver) in os.listdir(bindir):
return os.path.join(bindir, 'python' + ver)
- return os.path.join(bindir, 'python')
-
- raise AssertionError("Resource '{}' is not available: {}".format(res_name, gr))
-
-
-def check_python_version(version):
- ver = subprocess.check_output([get_python_bin(version), '-V'], stderr=subprocess.STDOUT).decode('utf-8')
- assert version in ver
+ return os.path.join(bindir, 'python')
+
+ raise AssertionError("Resource '{}' is not available: {}".format(res_name, gr))
+
+
+def check_python_version(version):
+ ver = subprocess.check_output([get_python_bin(version), '-V'], stderr=subprocess.STDOUT).decode('utf-8')
+ assert version in ver
diff --git a/build/platform/python/tests/ya.make b/build/platform/python/tests/ya.make
index 0d8965240e..3915de8969 100644
--- a/build/platform/python/tests/ya.make
+++ b/build/platform/python/tests/ya.make
@@ -1,36 +1,36 @@
-PY3TEST()
-
-OWNER(
- g:contrib
- g:yatool
-)
-
-IF (OS_DARWIN)
- SIZE(LARGE)
-
- TAG(
- ya:fat
- ya:force_sandbox ya:exotic_platform
- )
-ENDIF()
-
-PY_SRCS(
- testlib.py
-)
-
-TEST_SRCS(
- test_common.py
-)
-
-PEERDIR(
- build/platform/python/python27
- build/platform/python/python34
- build/platform/python/python35
- build/platform/python/python36
- build/platform/python/python37
- build/platform/python/python38
+PY3TEST()
+
+OWNER(
+ g:contrib
+ g:yatool
+)
+
+IF (OS_DARWIN)
+ SIZE(LARGE)
+
+ TAG(
+ ya:fat
+ ya:force_sandbox ya:exotic_platform
+ )
+ENDIF()
+
+PY_SRCS(
+ testlib.py
+)
+
+TEST_SRCS(
+ test_common.py
+)
+
+PEERDIR(
+ build/platform/python/python27
+ build/platform/python/python34
+ build/platform/python/python35
+ build/platform/python/python36
+ build/platform/python/python37
+ build/platform/python/python38
build/platform/python/python39
build/platform/python/python310
-)
-
-END()
+)
+
+END()
diff --git a/build/platform/python/ya.make b/build/platform/python/ya.make
index 247e65f4c9..67be319056 100644
--- a/build/platform/python/ya.make
+++ b/build/platform/python/ya.make
@@ -1,12 +1,12 @@
RESOURCES_LIBRARY()
-OWNER(
- g:contrib
- g:yatool
-)
-
-INCLUDE(resources.inc)
+OWNER(
+ g:contrib
+ g:yatool
+)
+INCLUDE(resources.inc)
+
IF (USE_SYSTEM_PYTHON)
IF (OS_LINUX)
IF (ARCH_ARM64 OR ARCH_AARCH64)
@@ -130,7 +130,7 @@ ELSEIF (NOT USE_ARCADIA_PYTHON)
ENDIF()
END()
-
-RECURSE_FOR_TESTS(
- tests
-)
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/platform/test_tool/a.yaml b/build/platform/test_tool/a.yaml
index 298148e450..e7540dfb74 100644
--- a/build/platform/test_tool/a.yaml
+++ b/build/platform/test_tool/a.yaml
@@ -1,16 +1,16 @@
service: devtools
-title: test_tool acceptance
-ci:
+title: test_tool acceptance
+ci:
release-title-source: flow
- secret: sec-01ekd5wc1dmdd544yp1xt3s9b8
- runtime:
- sandbox-owner: DEVTOOLS-LARGE
- autocheck:
- large-autostart:
- - target: devtools/ya/build/tests/cross_compilation/mac_builds_linux
- toolchains:
- - default-darwin-x86_64-release
- - target: devtools/ya/test/tests/port_manager/fat
- toolchains:
- - default-linux-x86_64-release-musl
- - default-darwin-x86_64-release
+ secret: sec-01ekd5wc1dmdd544yp1xt3s9b8
+ runtime:
+ sandbox-owner: DEVTOOLS-LARGE
+ autocheck:
+ large-autostart:
+ - target: devtools/ya/build/tests/cross_compilation/mac_builds_linux
+ toolchains:
+ - default-darwin-x86_64-release
+ - target: devtools/ya/test/tests/port_manager/fat
+ toolchains:
+ - default-linux-x86_64-release-musl
+ - default-darwin-x86_64-release
diff --git a/build/platform/test_tool/ya.make b/build/platform/test_tool/ya.make
index fd97d51ed8..ffc48022fa 100644
--- a/build/platform/test_tool/ya.make
+++ b/build/platform/test_tool/ya.make
@@ -1,8 +1,8 @@
RESOURCES_LIBRARY()
-OWNER(
- g:yatest
- heretic
-)
+OWNER(
+ g:yatest
+ heretic
+)
IF (TEST_TOOL_HOST_LOCAL)
MESSAGE(WARNING Host test tool $TEST_TOOL_HOST_LOCAL will be used)
diff --git a/build/plugins/_common.py b/build/plugins/_common.py
index 2f831a94db..7bf29b4d6f 100644
--- a/build/plugins/_common.py
+++ b/build/plugins/_common.py
@@ -188,13 +188,13 @@ def filter_out_by_keyword(test_data, keyword):
def generate_chunks(lst, chunk_size):
for i in xrange(0, len(lst), chunk_size):
yield lst[i:(i + chunk_size)]
-
-
-def strip_roots(path):
- for prefix in ["$B/", "$S/"]:
- if path.startswith(prefix):
- return path[len(prefix):]
- return path
+
+
+def strip_roots(path):
+ for prefix in ["$B/", "$S/"]:
+ if path.startswith(prefix):
+ return path[len(prefix):]
+ return path
def to_yesno(x):
diff --git a/build/plugins/_requirements.py b/build/plugins/_requirements.py
index c27635e852..11cb92ebe7 100644
--- a/build/plugins/_requirements.py
+++ b/build/plugins/_requirements.py
@@ -6,22 +6,22 @@ def check_cpu(suite_cpu_requirements, test_size, is_kvm=False):
max_cpu_requirements = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu)
if isinstance(suite_cpu_requirements, str):
if all(consts.TestRequirementsConstants.is_all_cpu(req) for req in (max_cpu_requirements, suite_cpu_requirements)):
- return None
- return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
+ return None
+ return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
if not isinstance(suite_cpu_requirements, int):
- return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements)
+ return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements)
if suite_cpu_requirements < min_cpu_requirements or suite_cpu_requirements > consts.TestRequirementsConstants.get_cpu_value(max_cpu_requirements):
- return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
+ return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
- return None
+ return None
# TODO: Remove is_kvm param when there will be guarantees on RAM
def check_ram(suite_ram_requirements, test_size, is_kvm=False):
if not isinstance(suite_ram_requirements, int):
- return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements)
+ return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements)
min_ram_requirements = consts.TestRequirementsConstants.MinRam
max_ram_requirements = consts.MAX_RAM_REQUIREMENTS_FOR_KVM if is_kvm else consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram)
if suite_ram_requirements < min_ram_requirements or suite_ram_requirements > max_ram_requirements:
@@ -29,7 +29,7 @@ def check_ram(suite_ram_requirements, test_size, is_kvm=False):
if is_kvm:
err_msg += ' with kvm requirements'
return err_msg
- return None
+ return None
def check_ram_disk(suite_ram_disk, test_size, is_kvm=False):
@@ -37,13 +37,13 @@ def check_ram_disk(suite_ram_disk, test_size, is_kvm=False):
max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk)
if isinstance(suite_ram_disk, str):
if all(consts.TestRequirementsConstants.is_all_ram_disk(req) for req in (max_ram_disk, suite_ram_disk)):
- return None
- return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, 0, max_ram_disk, test_size)
+ return None
+ return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, 0, max_ram_disk, test_size)
if not isinstance(suite_ram_disk, int):
- return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk)
+ return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk)
if suite_ram_disk < min_ram_disk or suite_ram_disk > consts.TestRequirementsConstants.get_ram_disk_value(max_ram_disk):
- return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, min_ram_disk, max_ram_disk, test_size)
+ return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, min_ram_disk, max_ram_disk, test_size)
- return None
+ return None
diff --git a/build/plugins/_test_const.py b/build/plugins/_test_const.py
index 0d03cc3d17..33e9b989c3 100644
--- a/build/plugins/_test_const.py
+++ b/build/plugins/_test_const.py
@@ -1,34 +1,34 @@
-# coding: utf-8
+# coding: utf-8
import re
import sys
-
-RESTART_TEST_INDICATOR = '##restart-test##'
-INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
-
-RESTART_TEST_INDICATORS = [
- RESTART_TEST_INDICATOR,
- "network error",
-]
-
-# testing
-BIN_DIRECTORY = 'bin'
-CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
-CONSOLE_SNIPPET_LIMIT = 5000
-LIST_NODE_LOG_FILE = "test_list.log"
-LIST_NODE_RESULT_FILE = "test_list.json"
-LIST_RESULT_NODE_LOG_FILE = "list_result.log"
-MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
-MAX_TEST_RESTART_COUNT = 3
-REPORT_SNIPPET_LIMIT = 10000
-SANITIZER_ERROR_RC = 100
-TEST_SUBTEST_SEPARATOR = '::'
-TESTING_OUT_DIR_NAME = "testing_out_stuff"
+
+RESTART_TEST_INDICATOR = '##restart-test##'
+INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
+
+RESTART_TEST_INDICATORS = [
+ RESTART_TEST_INDICATOR,
+ "network error",
+]
+
+# testing
+BIN_DIRECTORY = 'bin'
+CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
+CONSOLE_SNIPPET_LIMIT = 5000
+LIST_NODE_LOG_FILE = "test_list.log"
+LIST_NODE_RESULT_FILE = "test_list.json"
+LIST_RESULT_NODE_LOG_FILE = "list_result.log"
+MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
+MAX_TEST_RESTART_COUNT = 3
+REPORT_SNIPPET_LIMIT = 10000
+SANITIZER_ERROR_RC = 100
+TEST_SUBTEST_SEPARATOR = '::'
+TESTING_OUT_DIR_NAME = "testing_out_stuff"
TESTING_OUT_TAR_NAME = TESTING_OUT_DIR_NAME + ".tar"
-TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
-TRACE_FILE_NAME = "ytest.report.trace"
-TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, "run_test.log"}
-
+TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+TRACE_FILE_NAME = "ytest.report.trace"
+TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, "run_test.log"}
+
# kvm
DEFAULT_RAM_REQUIREMENTS_FOR_KVM = 4
MAX_RAM_REQUIREMENTS_FOR_KVM = 16
@@ -37,29 +37,29 @@ MAX_RAM_REQUIREMENTS_FOR_KVM = 16
TEST_NODE_FINISHING_TIME = 5 * 60
DEFAULT_TEST_NODE_TIMEOUT = 15 * 60
-# coverage
+# coverage
COVERAGE_TESTS_TIMEOUT_FACTOR = 1.5
COVERAGE_RESOLVED_FILE_NAME_PATTERN = "coverage_resolved.{}.json"
CPP_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("cpp")
JAVA_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("java")
PYTHON_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("python")
CLANG_COVERAGE_TEST_TYPES = ("unittest", "coverage_extractor", "pytest", "py3test", "gtest", "boost_test", "exectest")
-COVERAGE_TABLE_CHUNKS = 20
-COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
-COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
-COVERAGE_YT_TABLE_PREFIX = "datatable"
-
-# fuzzing
-CORPUS_DATA_FILE_NAME = 'corpus.json'
-CORPUS_DATA_ROOT_DIR = 'fuzzing'
-CORPUS_DIR_NAME = 'corpus'
-FUZZING_COMPRESSION_COEF = 1.1
-FUZZING_DEFAULT_TIMEOUT = 3600
+COVERAGE_TABLE_CHUNKS = 20
+COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
+COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
+COVERAGE_YT_TABLE_PREFIX = "datatable"
+
+# fuzzing
+CORPUS_DATA_FILE_NAME = 'corpus.json'
+CORPUS_DATA_ROOT_DIR = 'fuzzing'
+CORPUS_DIR_NAME = 'corpus'
+FUZZING_COMPRESSION_COEF = 1.1
+FUZZING_DEFAULT_TIMEOUT = 3600
FUZZING_FINISHING_TIME = 600
-FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
-GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
-MAX_CORPUS_RESOURCES_ALLOWED = 5
-
+FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
+GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
+MAX_CORPUS_RESOURCES_ALLOWED = 5
+
TEST_TOOL_HOST = 'TEST_TOOL_HOST_RESOURCE_GLOBAL'
TEST_TOOL_TARGET = 'TEST_TOOL_TARGET_RESOURCE_GLOBAL'
TEST_TOOL_HOST_LOCAL = 'TEST_TOOL_HOST_LOCAL'
@@ -73,15 +73,15 @@ FLAKES_PY2_RESOURCE = 'FLAKES_PY2_RESOURCE_GLOBAL'
FLAKES_PY3_RESOURCE = 'FLAKES_PY3_RESOURCE_GLOBAL'
FLAKE8_PY2_RESOURCE = 'FLAKE8_PY2_RESOURCE_GLOBAL'
FLAKE8_PY3_RESOURCE = 'FLAKE8_PY3_RESOURCE_GLOBAL'
-
-
-class Enum(object):
-
- @classmethod
- def enumerate(cls):
- return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
-
-
+
+
+class Enum(object):
+
+ @classmethod
+ def enumerate(cls):
+ return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
+
+
class TestRequirements(Enum):
Container = 'container'
Cpu = 'cpu'
@@ -119,7 +119,7 @@ class TestRequirementsConstants(Enum):
return cls.AllRamDiskValue if cls.is_all_ram_disk(value) else value
-class TestSize(Enum):
+class TestSize(Enum):
Small = 'small'
Medium = 'medium'
Large = 'large'
@@ -172,7 +172,7 @@ class TestSize(Enum):
TestRequirements.RamDisk: 4,
},
Large: {
- TestRequirements.Cpu: 4,
+ TestRequirements.Cpu: 4,
TestRequirements.Ram: 32,
TestRequirements.RamDisk: 4,
},
@@ -207,14 +207,14 @@ class TestSize(Enum):
raise Exception("Unknown test size '{}'".format(size))
-class TestRunExitCode(Enum):
+class TestRunExitCode(Enum):
Skipped = 2
Failed = 3
TimeOut = 10
- InfrastructureError = 12
+ InfrastructureError = 12
-class YaTestTags(Enum):
+class YaTestTags(Enum):
Manual = "ya:manual"
Notags = "ya:notags"
Norestart = "ya:norestart"
@@ -228,100 +228,100 @@ class YaTestTags(Enum):
class Status(object):
- GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
SKIPPED = -100
NOT_LAUNCHED = -200
CANON_DIFF = -300
DESELECTED = -400
INTERNAL = -sys.maxint
- FLAKY = -50
+ FLAKY = -50
BY_NAME = {'good': GOOD, 'fail': FAIL, 'xfail': XFAIL, 'xpass': XPASS, 'missing': MISSING, 'crashed': CRASHED,
'skipped': SKIPPED, 'flaky': FLAKY, 'not_launched': NOT_LAUNCHED, 'timeout': TIMEOUT, 'diff': CANON_DIFF,
'internal': INTERNAL, 'deselected': DESELECTED}
TO_STR = {GOOD: 'good', FAIL: 'fail', XFAIL: 'xfail', XPASS: 'xpass', MISSING: 'missing', CRASHED: 'crashed',
SKIPPED: 'skipped', FLAKY: 'flaky', NOT_LAUNCHED: 'not_launched', TIMEOUT: 'timeout', CANON_DIFF: 'diff',
INTERNAL: 'internal', DESELECTED: 'deselected'}
-
-
-class _Colors(object):
-
- _NAMES = [
- "blue",
- "cyan",
- "default",
- "green",
- "grey",
- "magenta",
- "red",
- "white",
- "yellow",
- ]
+
+
+class _Colors(object):
+
+ _NAMES = [
+ "blue",
+ "cyan",
+ "default",
+ "green",
+ "grey",
+ "magenta",
+ "red",
+ "white",
+ "yellow",
+ ]
_PREFIXES = ["", "light", "dark"]
-
- def __init__(self):
- self._table = {}
- for prefix in self._PREFIXES:
- for value in self._NAMES:
- name = value
- if prefix:
- name = "{}_{}".format(prefix, value)
- value = "{}-{}".format(prefix, value)
- self.__add_color(name.upper(), value)
-
- def __add_color(self, name, value):
- self._table[name] = value
- self.__setattr__(name, value)
-
-
-Colors = _Colors()
-
-
-class _Highlight(object):
-
- _MARKERS = {
- # special
- "RESET": "rst",
-
- "IMPORTANT": "imp",
- "UNIMPORTANT": "unimp",
- "BAD": "bad",
- "WARNING": "warn",
- "GOOD": "good",
- "PATH": "path",
- "ALTERNATIVE1": "alt1",
- "ALTERNATIVE2": "alt2",
- "ALTERNATIVE3": "alt3",
- }
-
- def __init__(self):
- # setting attributes because __getattr__ is much slower
- for attr, value in self._MARKERS.items():
- self.__setattr__(attr, value)
-
-
-Highlight = _Highlight()
-
-
-class _StatusColorMap(object):
-
- _MAP = {
- 'good': Highlight.GOOD,
- 'fail': Highlight.BAD,
- 'missing': Highlight.ALTERNATIVE1,
- 'crashed': Highlight.WARNING,
- 'skipped': Highlight.UNIMPORTANT,
- 'not_launched': Highlight.BAD,
- 'timeout': Highlight.BAD,
- 'flaky': Highlight.ALTERNATIVE3,
- 'xfail': Highlight.WARNING,
+
+ def __init__(self):
+ self._table = {}
+ for prefix in self._PREFIXES:
+ for value in self._NAMES:
+ name = value
+ if prefix:
+ name = "{}_{}".format(prefix, value)
+ value = "{}-{}".format(prefix, value)
+ self.__add_color(name.upper(), value)
+
+ def __add_color(self, name, value):
+ self._table[name] = value
+ self.__setattr__(name, value)
+
+
+Colors = _Colors()
+
+
+class _Highlight(object):
+
+ _MARKERS = {
+ # special
+ "RESET": "rst",
+
+ "IMPORTANT": "imp",
+ "UNIMPORTANT": "unimp",
+ "BAD": "bad",
+ "WARNING": "warn",
+ "GOOD": "good",
+ "PATH": "path",
+ "ALTERNATIVE1": "alt1",
+ "ALTERNATIVE2": "alt2",
+ "ALTERNATIVE3": "alt3",
+ }
+
+ def __init__(self):
+ # setting attributes because __getattr__ is much slower
+ for attr, value in self._MARKERS.items():
+ self.__setattr__(attr, value)
+
+
+Highlight = _Highlight()
+
+
+class _StatusColorMap(object):
+
+ _MAP = {
+ 'good': Highlight.GOOD,
+ 'fail': Highlight.BAD,
+ 'missing': Highlight.ALTERNATIVE1,
+ 'crashed': Highlight.WARNING,
+ 'skipped': Highlight.UNIMPORTANT,
+ 'not_launched': Highlight.BAD,
+ 'timeout': Highlight.BAD,
+ 'flaky': Highlight.ALTERNATIVE3,
+ 'xfail': Highlight.WARNING,
'xpass': Highlight.WARNING,
- 'diff': Highlight.BAD,
- 'internal': Highlight.BAD,
- 'deselected': Highlight.UNIMPORTANT,
- }
-
- def __getitem__(self, item):
- return self._MAP[item]
-
-
-StatusColorMap = _StatusColorMap()
+ 'diff': Highlight.BAD,
+ 'internal': Highlight.BAD,
+ 'deselected': Highlight.UNIMPORTANT,
+ }
+
+ def __getitem__(self, item):
+ return self._MAP[item]
+
+
+StatusColorMap = _StatusColorMap()
diff --git a/build/plugins/code_generator.py b/build/plugins/code_generator.py
index ca8bb18c15..e527c2b8bb 100644
--- a/build/plugins/code_generator.py
+++ b/build/plugins/code_generator.py
@@ -3,7 +3,7 @@ import os
import _import_wrapper as iw
-pattern = re.compile(r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']")
+pattern = re.compile(r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']")
class CodeGeneratorTemplateParser(object):
diff --git a/build/plugins/macros_with_error.py b/build/plugins/macros_with_error.py
index e82fb56d2c..4cd0544d18 100644
--- a/build/plugins/macros_with_error.py
+++ b/build/plugins/macros_with_error.py
@@ -1,26 +1,26 @@
import sys
-import _common
-
-import ymake
-
+import _common
+import ymake
+
+
def onmacros_with_error(unit, *args):
print >> sys.stderr, 'This macros will fail'
raise Exception('Expected fail in MACROS_WITH_ERROR')
-
-
-def onrestrict_path(unit, *args):
- if args:
- if 'MSG' in args:
- pos = args.index('MSG')
- paths, msg = args[:pos], args[pos + 1:]
- msg = ' '.join(msg)
- else:
- paths, msg = args, 'forbidden'
- if not _common.strip_roots(unit.path()).startswith(paths):
- error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format(unit.path(), msg, ', '.join(paths))
- ymake.report_configure_error(error_msg)
+
+
+def onrestrict_path(unit, *args):
+ if args:
+ if 'MSG' in args:
+ pos = args.index('MSG')
+ paths, msg = args[:pos], args[pos + 1:]
+ msg = ' '.join(msg)
+ else:
+ paths, msg = args, 'forbidden'
+ if not _common.strip_roots(unit.path()).startswith(paths):
+ error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format(unit.path(), msg, ', '.join(paths))
+ ymake.report_configure_error(error_msg)
def onassert(unit, *args):
val = unit.get(args[0])
diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py
index f32a2d39a0..1e18f14051 100644
--- a/build/plugins/pybuild.py
+++ b/build/plugins/pybuild.py
@@ -1,7 +1,7 @@
-import os
-import collections
+import os
+import collections
from hashlib import md5
-
+
import ymake
from _common import stripext, rootrel_arc_src, tobuilddir, listid, resolve_to_ymake_path, generate_chunks, pathid
@@ -61,55 +61,55 @@ def mangle(name):
return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
-def parse_pyx_includes(filename, path, source_root, seen=None):
- normpath = lambda *x: os.path.normpath(os.path.join(*x))
-
- abs_path = normpath(source_root, filename)
- seen = seen or set()
- if abs_path in seen:
- return
- seen.add(abs_path)
-
- if not os.path.exists(abs_path):
- # File might be missing, because it might be generated
- return
-
- with open(abs_path, 'rb') as f:
- # Don't parse cimports and etc - irrelevant for cython, it's linker work
+def parse_pyx_includes(filename, path, source_root, seen=None):
+ normpath = lambda *x: os.path.normpath(os.path.join(*x))
+
+ abs_path = normpath(source_root, filename)
+ seen = seen or set()
+ if abs_path in seen:
+ return
+ seen.add(abs_path)
+
+ if not os.path.exists(abs_path):
+ # File might be missing, because it might be generated
+ return
+
+ with open(abs_path, 'rb') as f:
+ # Don't parse cimports and etc - irrelevant for cython, it's linker work
includes = ymake.parse_cython_includes(f.read())
-
- abs_dirname = os.path.dirname(abs_path)
- # All includes are relative to the file which include
- path_dirname = os.path.dirname(path)
- file_dirname = os.path.dirname(filename)
-
- for incfile in includes:
- abs_path = normpath(abs_dirname, incfile)
- if os.path.exists(abs_path):
- incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile)
- yield (incname, incpath)
- # search for includes in the included files
- for e in parse_pyx_includes(incname, incpath, source_root, seen):
- yield e
- else:
- # There might be arcadia root or cython relative include.
- # Don't treat such file as missing, because there must be PEERDIR on py_library
- # which contains it.
- for path in [
- source_root,
- source_root + "/contrib/tools/cython/Cython/Includes",
- ]:
- if os.path.exists(normpath(path, incfile)):
- break
- else:
- ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path))
-
+
+ abs_dirname = os.path.dirname(abs_path)
+ # All includes are relative to the file which include
+ path_dirname = os.path.dirname(path)
+ file_dirname = os.path.dirname(filename)
+
+ for incfile in includes:
+ abs_path = normpath(abs_dirname, incfile)
+ if os.path.exists(abs_path):
+ incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile)
+ yield (incname, incpath)
+ # search for includes in the included files
+ for e in parse_pyx_includes(incname, incpath, source_root, seen):
+ yield e
+ else:
+ # There might be arcadia root or cython relative include.
+ # Don't treat such file as missing, because there must be PEERDIR on py_library
+ # which contains it.
+ for path in [
+ source_root,
+ source_root + "/contrib/tools/cython/Cython/Includes",
+ ]:
+ if os.path.exists(normpath(path, incfile)):
+ break
+ else:
+ ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path))
+
def has_pyx(args):
return any(arg.endswith('.pyx') for arg in args)
-
-def get_srcdir(path, unit):
- return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
-
+
+def get_srcdir(path, unit):
+ return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
+
def add_python_lint_checks(unit, py_ver, files):
def get_resolved_files():
resolved_files = []
@@ -119,27 +119,27 @@ def add_python_lint_checks(unit, py_ver, files):
resolved_files.append(resolved)
return resolved_files
- if unit.get('LINT_LEVEL_VALUE') == "none":
-
- no_lint_allowed_paths = (
- "contrib/",
- "devtools/",
- "junk/",
- # temporary allowed, TODO: remove
- "taxi/uservices/",
- "travel/",
+ if unit.get('LINT_LEVEL_VALUE') == "none":
+
+ no_lint_allowed_paths = (
+ "contrib/",
+ "devtools/",
+ "junk/",
+ # temporary allowed, TODO: remove
+ "taxi/uservices/",
+ "travel/",
"market/report/lite/", # MARKETOUT-38662, deadline: 2021-08-12
"passport/backend/oauth/", # PASSP-35982
- )
-
- upath = unit.path()[3:]
-
- if not upath.startswith(no_lint_allowed_paths):
- ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths))
-
- if files and unit.get('LINT_LEVEL_VALUE') not in ("none", "none_internal"):
+ )
+
+ upath = unit.path()[3:]
+
+ if not upath.startswith(no_lint_allowed_paths):
+ ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths))
+
+ if files and unit.get('LINT_LEVEL_VALUE') not in ("none", "none_internal"):
resolved_files = get_resolved_files()
- flake8_cfg = 'build/config/tests/flake8/flake8.conf'
+ flake8_cfg = 'build/config/tests/flake8/flake8.conf'
unit.onadd_check(["flake8.py{}".format(py_ver), flake8_cfg] + resolved_files)
@@ -214,14 +214,14 @@ def onpy_srcs(unit, *args):
ns = ""
else:
ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.'
-
- cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes'
+
+ cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes'
cythonize_py = False
optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes'
-
+
cython_directives = []
- if cython_coverage:
- cython_directives += ['-X', 'linetrace=True']
+ if cython_coverage:
+ cython_directives += ['-X', 'linetrace=True']
pyxs_c = []
pyxs_c_h = []
@@ -353,41 +353,41 @@ def onpy_srcs(unit, *args):
dump_output.close()
if pyxs:
- files2res = set()
- # Include map stores files which were included in the processing pyx file,
- # to be able to find source code of the included file inside generated file
- # for currently processing pyx file.
- include_map = collections.defaultdict(set)
-
- if cython_coverage:
- def process_pyx(filename, path, out_suffix, noext):
- # skip generated files
- if not is_arc_src(path, unit):
- return
- # source file
- files2res.add((filename, path))
- # generated
- if noext:
- files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix))
- else:
- files2res.add((filename + out_suffix, path + out_suffix))
- # used includes
- for entry in parse_pyx_includes(filename, path, unit.resolve('$S')):
- files2res.add(entry)
- include_arc_rel = entry[0]
- include_map[filename].add(include_arc_rel)
- else:
- def process_pyx(filename, path, out_suffix, noext):
- pass
-
- for pyxs, cython, out_suffix, noext in [
+ files2res = set()
+ # Include map stores files which were included in the processing pyx file,
+ # to be able to find source code of the included file inside generated file
+ # for currently processing pyx file.
+ include_map = collections.defaultdict(set)
+
+ if cython_coverage:
+ def process_pyx(filename, path, out_suffix, noext):
+ # skip generated files
+ if not is_arc_src(path, unit):
+ return
+ # source file
+ files2res.add((filename, path))
+ # generated
+ if noext:
+ files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix))
+ else:
+ files2res.add((filename + out_suffix, path + out_suffix))
+ # used includes
+ for entry in parse_pyx_includes(filename, path, unit.resolve('$S')):
+ files2res.add(entry)
+ include_arc_rel = entry[0]
+ include_map[filename].add(include_arc_rel)
+ else:
+ def process_pyx(filename, path, out_suffix, noext):
+ pass
+
+ for pyxs, cython, out_suffix, noext in [
(pyxs_c, unit.on_buildwith_cython_c_dep, ".c", False),
(pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", True),
(pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", True),
(pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", False),
]:
for path, mod in pyxs:
- filename = rootrel_arc_src(path, unit)
+ filename = rootrel_arc_src(path, unit)
cython_args = [path]
dep = path
@@ -400,26 +400,26 @@ def onpy_srcs(unit, *args):
cython_args += [
'--module-name', mod,
'--init-suffix', mangle(mod),
- '--source-root', '${ARCADIA_ROOT}',
- # set arcadia root relative __file__ for generated modules
- '-X', 'set_initial_path={}'.format(filename),
+ '--source-root', '${ARCADIA_ROOT}',
+ # set arcadia root relative __file__ for generated modules
+ '-X', 'set_initial_path={}'.format(filename),
] + cython_directives
cython(cython_args)
py_register(unit, mod, py3)
- process_pyx(filename, path, out_suffix, noext)
-
- if files2res:
- # Compile original and generated sources into target for proper cython coverage calculation
- unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)])
-
- if include_map:
- data = []
- prefix = 'resfs/cython/include'
- for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()):
- data += ['-', line]
- unit.onresource(data)
-
+ process_pyx(filename, path, out_suffix, noext)
+
+ if files2res:
+ # Compile original and generated sources into target for proper cython coverage calculation
+ unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)])
+
+ if include_map:
+ data = []
+ prefix = 'resfs/cython/include'
+ for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()):
+ data += ['-', line]
+ unit.onresource(data)
+
for swigs, on_swig_python in [
(swigs_c, unit.on_swig_python_c),
(swigs_cpp, unit.on_swig_python_cpp),
@@ -433,11 +433,11 @@ def onpy_srcs(unit, *args):
onpy_srcs(unit, swg_py + '=' + mod)
if pys:
- pys_seen = set()
- pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))}
- if pys_dups:
- ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups))
-
+ pys_seen = set()
+ pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))}
+ if pys_dups:
+ ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups))
+
res = []
if py3:
@@ -523,10 +523,10 @@ def onpy_srcs(unit, *args):
def _check_test_srcs(*args):
- used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"}
- if used:
- param = list(used)[0]
- ymake.report_configure_error('in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param))
+ used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"}
+ if used:
+ param = list(used)[0]
+ ymake.report_configure_error('in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param))
def ontest_srcs(unit, *args):
@@ -606,21 +606,21 @@ def onpy_main(unit, arg):
arg += ':main'
py_main(unit, arg)
-
-
-def onpy_constructor(unit, arg):
- """
- @usage: PY_CONSTRUCTOR(package.module[:func])
-
- Specifies the module or function which will be started before python's main()
- init() is expected in the target module if no function is specified
- Can be considered as __attribute__((constructor)) for python
- """
- if ':' not in arg:
- arg = arg + '=init'
- else:
- arg[arg.index(':')] = '='
- unit.onresource(['-', 'py/constructors/{}'.format(arg)])
+
+
+def onpy_constructor(unit, arg):
+ """
+ @usage: PY_CONSTRUCTOR(package.module[:func])
+
+ Specifies the module or function which will be started before python's main()
+ init() is expected in the target module if no function is specified
+ Can be considered as __attribute__((constructor)) for python
+ """
+ if ':' not in arg:
+ arg = arg + '=init'
+ else:
+ arg[arg.index(':')] = '='
+ unit.onresource(['-', 'py/constructors/{}'.format(arg)])
def onpy_enums_serialization(unit, *args):
ns = ''
diff --git a/build/plugins/res.py b/build/plugins/res.py
index a937caba81..fccfb51eb5 100644
--- a/build/plugins/res.py
+++ b/build/plugins/res.py
@@ -9,7 +9,7 @@ def split(lst, limit):
filepath = None
lenght = 0
bucket = []
-
+
for item in lst:
if filepath:
lenght += root_lenght + len(filepath) + len(item)
@@ -17,17 +17,17 @@ def split(lst, limit):
yield bucket
bucket = []
lenght = 0
-
+
bucket.append(filepath)
bucket.append(item)
filepath = None
else:
filepath = item
-
+
if bucket:
yield bucket
-
-
+
+
def remove_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
@@ -38,8 +38,8 @@ def onfat_resource(unit, *args):
unit.onpeerdir(['library/cpp/resource'])
# Since the maximum length of lpCommandLine string for CreateProcess is 8kb (windows) characters,
- # we make several calls of rescompiler
- # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx
+ # we make several calls of rescompiler
+ # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx
for part_args in split(args, 8000):
output = listid(part_args) + '.cpp'
inputs = [x for x, y in iterpair(part_args) if x != '-']
@@ -47,7 +47,7 @@ def onfat_resource(unit, *args):
inputs = ['IN'] + inputs
unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output])
- unit.onsrcs(['GLOBAL', output])
+ unit.onsrcs(['GLOBAL', output])
def onresource_files(unit, *args):
diff --git a/build/plugins/suppressions.py b/build/plugins/suppressions.py
index 6f4a1b4f03..a5e6bd2188 100644
--- a/build/plugins/suppressions.py
+++ b/build/plugins/suppressions.py
@@ -1,19 +1,19 @@
-def onsuppressions(unit, *args):
- """
- SUPPRESSIONS() - allows to specify files with suppression notation which will be used by
- address, leak or thread sanitizer runtime by default.
- Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer
- and tsan.supp for thread sanitizer suppressions respectively.
- See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks
- for details.
- """
- import os
-
- valid = ("asan.supp", "tsan.supp", "lsan.supp")
-
- if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"):
- for x in args:
- if os.path.basename(x) not in valid:
- unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)])
- return
- unit.onsrcs(["GLOBAL"] + list(args))
+def onsuppressions(unit, *args):
+ """
+ SUPPRESSIONS() - allows to specify files with suppression notation which will be used by
+ address, leak or thread sanitizer runtime by default.
+ Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer
+ and tsan.supp for thread sanitizer suppressions respectively.
+ See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks
+ for details.
+ """
+ import os
+
+ valid = ("asan.supp", "tsan.supp", "lsan.supp")
+
+ if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"):
+ for x in args:
+ if os.path.basename(x) not in valid:
+ unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)])
+ return
+ unit.onsrcs(["GLOBAL"] + list(args))
diff --git a/build/plugins/tests/test_requirements.py b/build/plugins/tests/test_requirements.py
index 24d57ac901..7d1a9b98b1 100644
--- a/build/plugins/tests/test_requirements.py
+++ b/build/plugins/tests/test_requirements.py
@@ -32,7 +32,7 @@ class TestRequirements(object):
assert not requirements.check_ram(1, test_size)
assert not requirements.check_ram(4, test_size)
assert not requirements.check_ram(5, test_size)
- assert not requirements.check_ram(32, consts.TestSize.Large)
+ assert not requirements.check_ram(32, consts.TestSize.Large)
assert requirements.check_ram(48, consts.TestSize.Large)
assert not requirements.check_ram(1, test_size, is_kvm=True)
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index 8970837f0f..f58d00c99c 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -16,20 +16,20 @@ import collections
import ymake
-MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
-MDS_SHEME = 'mds'
-CANON_DATA_DIR_NAME = 'canondata'
-CANON_OUTPUT_STORAGE = 'canondata_storage'
-CANON_RESULT_FILE_NAME = 'result.json'
-CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)')
-CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
-CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
-
+MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+MDS_SHEME = 'mds'
+CANON_DATA_DIR_NAME = 'canondata'
+CANON_OUTPUT_STORAGE = 'canondata_storage'
+CANON_RESULT_FILE_NAME = 'result.json'
+CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)')
+CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
+CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
+
VALID_NETWORK_REQUIREMENTS = ("full", "restricted")
VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
BLOCK_SEPARATOR = '============================================================='
-SPLIT_FACTOR_MAX_VALUE = 1000
-SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
+SPLIT_FACTOR_MAX_VALUE = 1000
+SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
PARTITION_MODS = ('SEQUENTIAL', 'MODULO')
DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml"
DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json"
@@ -69,65 +69,65 @@ def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + per
return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec)
-def validate_sb_vault(name, value):
- if not CANON_SB_VAULT_REGEX.match(value):
- return "sb_vault value '{}' should follow pattern <ENV_NAME>=:<value|file>:<owner>:<vault key>".format(value)
-
-
-def validate_numerical_requirement(name, value):
- if mr.resolve_value(value) is None:
- return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name)
-
-
-def validate_choice_requirement(name, val, valid):
- if val not in valid:
- return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(name, val, ", ".join(valid))
-
-
+def validate_sb_vault(name, value):
+ if not CANON_SB_VAULT_REGEX.match(value):
+ return "sb_vault value '{}' should follow pattern <ENV_NAME>=:<value|file>:<owner>:<vault key>".format(value)
+
+
+def validate_numerical_requirement(name, value):
+ if mr.resolve_value(value) is None:
+ return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name)
+
+
+def validate_choice_requirement(name, val, valid):
+ if val not in valid:
+ return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(name, val, ", ".join(valid))
+
+
def validate_force_sandbox_requirement(name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func):
if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run:
- if value == 'all':
- return
- return validate_numerical_requirement(name, value)
- error_msg = validate_numerical_requirement(name, value)
- if error_msg:
- return error_msg
+ if value == 'all':
+ return
+ return validate_numerical_requirement(name, value)
+ error_msg = validate_numerical_requirement(name, value)
+ if error_msg:
+ return error_msg
return check_func(mr.resolve_value(value), test_size, is_kvm)
-
-
+
+
# TODO: Remove is_kvm param when there will be guarantees on RAM
def validate_requirement(req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run):
- req_checks = {
- 'container': validate_numerical_requirement,
+ req_checks = {
+ 'container': validate_numerical_requirement,
'cpu': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_cpu),
- 'disk_usage': validate_numerical_requirement,
- 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS),
- 'kvm': None,
- 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS),
+ 'disk_usage': validate_numerical_requirement,
+ 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS),
+ 'kvm': None,
+ 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS),
'ram': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram),
'ram_disk': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram_disk),
- 'sb': None,
- 'sb_vault': validate_sb_vault,
- }
-
- if req_name not in req_checks:
- return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(req_name, ", ".join(sorted(req_checks)))
-
- if req_name in ('container', 'disk') and not is_force_sandbox:
- return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format(req_name)
-
- check_func = req_checks[req_name]
- if check_func:
- return check_func(req_name, value)
-
-
-def validate_test(unit, kw):
+ 'sb': None,
+ 'sb_vault': validate_sb_vault,
+ }
+
+ if req_name not in req_checks:
+ return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(req_name, ", ".join(sorted(req_checks)))
+
+ if req_name in ('container', 'disk') and not is_force_sandbox:
+ return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format(req_name)
+
+ check_func = req_checks[req_name]
+ if check_func:
+ return check_func(req_name, value)
+
+
+def validate_test(unit, kw):
def get_list(key):
return deserialize_list(kw.get(key, ""))
valid_kw = copy.deepcopy(kw)
errors = []
- warnings = []
+ warnings = []
if valid_kw.get('SCRIPT-REL-PATH') == 'boost.test':
project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
@@ -143,66 +143,66 @@ def validate_test(unit, kw):
size = valid_kw.get('SIZE', consts.TestSize.Small).lower()
# TODO: use set instead list
tags = get_list("TAG")
- requirements_orig = get_list("REQUIREMENTS")
+ requirements_orig = get_list("REQUIREMENTS")
in_autocheck = "ya:not_autocheck" not in tags and 'ya:manual' not in tags
- is_fat = 'ya:fat' in tags
- is_force_sandbox = 'ya:force_distbuild' not in tags and is_fat
+ is_fat = 'ya:fat' in tags
+ is_force_sandbox = 'ya:force_distbuild' not in tags and is_fat
is_ytexec_run = 'ya:yt' in tags
- is_fuzzing = valid_kw.get("FUZZING", False)
- is_kvm = 'kvm' in requirements_orig
+ is_fuzzing = valid_kw.get("FUZZING", False)
+ is_kvm = 'kvm' in requirements_orig
requirements = {}
- list_requirements = ('sb_vault')
- for req in requirements_orig:
+ list_requirements = ('sb_vault')
+ for req in requirements_orig:
if req in ('kvm', ):
requirements[req] = str(True)
continue
if ":" in req:
req_name, req_value = req.split(":", 1)
- if req_name in list_requirements:
- requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
- else:
- if req_name in requirements:
- if req_value in ["0"]:
- warnings.append("Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
- del requirements[req_name]
- elif requirements[req_name] != req_value:
- warnings.append("Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
- requirements[req_name] = req_value
+ if req_name in list_requirements:
+ requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
+ else:
+ if req_name in requirements:
+ if req_value in ["0"]:
+ warnings.append("Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ del requirements[req_name]
+ elif requirements[req_name] != req_value:
+ warnings.append("Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ requirements[req_name] = req_value
else:
- requirements[req_name] = req_value
+ requirements[req_name] = req_value
else:
errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req))
- if not errors:
- for req_name, req_value in requirements.items():
+ if not errors:
+ for req_name, req_value in requirements.items():
error_msg = validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run)
- if error_msg:
- errors += [error_msg]
-
+ if error_msg:
+ errors += [error_msg]
+
invalid_requirements_for_distbuild = [requirement for requirement in requirements.keys() if requirement not in ('ram', 'ram_disk', 'cpu', 'network')]
- sb_tags = [tag for tag in tags if tag.startswith('sb:')]
+ sb_tags = [tag for tag in tags if tag.startswith('sb:')]
if is_fat:
- if size != consts.TestSize.Large:
- errors.append("Only LARGE test may have ya:fat tag")
-
+ if size != consts.TestSize.Large:
+ errors.append("Only LARGE test may have ya:fat tag")
+
if in_autocheck and not is_force_sandbox:
- if invalid_requirements_for_distbuild:
- errors.append("'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format(invalid_requirements_for_distbuild))
- if sb_tags:
- errors.append("You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format(sb_tags))
- if 'ya:sandbox_coverage' in tags:
- errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.")
- else:
+ if invalid_requirements_for_distbuild:
+ errors.append("'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format(invalid_requirements_for_distbuild))
+ if sb_tags:
+ errors.append("You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format(sb_tags))
+ if 'ya:sandbox_coverage' in tags:
+ errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.")
+ else:
if is_force_sandbox:
- errors.append('ya:force_sandbox can be used with LARGE tests only')
- if 'ya:nofuse' in tags:
- errors.append('ya:nofuse can be used with LARGE tests only')
- if 'ya:privileged' in tags:
- errors.append("ya:privileged can be used with LARGE tests only")
- if in_autocheck and size == consts.TestSize.Large:
- errors.append("LARGE test must have ya:fat tag")
+ errors.append('ya:force_sandbox can be used with LARGE tests only')
+ if 'ya:nofuse' in tags:
+ errors.append('ya:nofuse can be used with LARGE tests only')
+ if 'ya:privileged' in tags:
+ errors.append("ya:privileged can be used with LARGE tests only")
+ if in_autocheck and size == consts.TestSize.Large:
+ errors.append("LARGE test must have ya:fat tag")
if 'ya:privileged' in tags and 'container' not in requirements:
errors.append("Only tests with 'container' requirement can have 'ya:privileged' tag")
@@ -236,34 +236,34 @@ def validate_test(unit, kw):
valid_kw['REQUIREMENTS'] = serialize_list(requiremtens_list)
if valid_kw.get("FUZZ-OPTS"):
- for option in get_list("FUZZ-OPTS"):
- if not option.startswith("-"):
- errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
- break
- eqpos = option.find("=")
- if eqpos == -1 or len(option) == eqpos + 1:
- errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
- break
- if option[eqpos - 1] == " " or option[eqpos + 1] == " ":
- errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
- break
- if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"):
- errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
- break
-
- if valid_kw.get("YT-SPEC"):
+ for option in get_list("FUZZ-OPTS"):
+ if not option.startswith("-"):
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
+ break
+ eqpos = option.find("=")
+ if eqpos == -1 or len(option) == eqpos + 1:
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
+ break
+ if option[eqpos - 1] == " " or option[eqpos + 1] == " ":
+ errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
+ break
+ if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"):
+ errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
+ break
+
+ if valid_kw.get("YT-SPEC"):
if not is_ytexec_run:
- errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag")
- else:
- for filename in get_list("YT-SPEC"):
- filename = unit.resolve('$S/' + filename)
- if not os.path.exists(filename):
- errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename))
- continue
+ errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag")
+ else:
+ for filename in get_list("YT-SPEC"):
+ filename = unit.resolve('$S/' + filename)
+ if not os.path.exists(filename):
+ errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename))
+ continue
if is_yt_spec_contain_pool_info(filename) and "ya:external" not in tags:
tags.append("ya:external")
tags.append("ya:yt_research_pool")
-
+
if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test":
errors.append("PYTEST_SCRIPT is deprecated")
@@ -274,23 +274,23 @@ def validate_test(unit, kw):
if valid_kw.get('SPLIT-FACTOR'):
if valid_kw.get('FORK-MODE') == 'none':
errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro')
-
- value = 1
+
+ value = 1
try:
value = int(valid_kw.get('SPLIT-FACTOR'))
if value <= 0:
raise ValueError("must be > 0")
- if value > SPLIT_FACTOR_MAX_VALUE:
- raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
+ if value > SPLIT_FACTOR_MAX_VALUE:
+ raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
except ValueError as e:
errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e))
- if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
- nfiles = count_entries(valid_kw.get('TEST-FILES'))
- if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
- errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
- nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
-
+ if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
+ nfiles = count_entries(valid_kw.get('TEST-FILES'))
+ if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
+ errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
+ nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
+
unit_path = get_norm_unit_path(unit)
if not is_fat and "ya:noretries" in tags and not is_ytexec_run \
and not unit_path.startswith("devtools/") \
@@ -300,25 +300,25 @@ def validate_test(unit, kw):
and not unit_path.startswith("yp/tests"):
errors.append("Only LARGE tests can have 'ya:noretries' tag")
- if errors:
- return None, warnings, errors
-
- return valid_kw, warnings, errors
-
-
-def get_norm_unit_path(unit, extra=None):
- path = _common.strip_roots(unit.path())
- if extra:
- return '{}/{}'.format(path, extra)
- return path
-
-
-def dump_test(unit, kw):
- valid_kw, warnings, errors = validate_test(unit, kw)
- for w in warnings:
- unit.message(['warn', w])
- for e in errors:
- ymake.report_configure_error(e)
+ if errors:
+ return None, warnings, errors
+
+ return valid_kw, warnings, errors
+
+
+def get_norm_unit_path(unit, extra=None):
+ path = _common.strip_roots(unit.path())
+ if extra:
+ return '{}/{}'.format(path, extra)
+ return path
+
+
+def dump_test(unit, kw):
+ valid_kw, warnings, errors = validate_test(unit, kw)
+ for w in warnings:
+ unit.message(['warn', w])
+ for e in errors:
+ ymake.report_configure_error(e)
if valid_kw is None:
return None
string_handler = StringIO.StringIO()
@@ -339,14 +339,14 @@ def deserialize_list(val):
return filter(None, val.replace('"', "").split(";"))
-def count_entries(x):
- # see (de)serialize_list
- assert x is None or isinstance(x, str), type(x)
- if not x:
- return 0
- return x.count(";") + 1
-
-
+def count_entries(x):
+ # see (de)serialize_list
+ assert x is None or isinstance(x, str), type(x)
+ if not x:
+ return 0
+ return x.count(";") + 1
+
+
def get_values_list(unit, key):
res = map(str.strip, (unit.get(key) or '').replace('$' + key, '').strip().split())
return [r for r in res if r and r not in ['""', "''"]]
@@ -357,31 +357,31 @@ def get_norm_paths(unit, key):
return [x.rstrip('\\/') for x in get_values_list(unit, key)]
-def get_unit_list_variable(unit, name):
- items = unit.get(name)
- if items:
- items = items.split(' ')
- assert items[0] == "${}".format(name), (items, name)
- return items[1:]
- return []
-
-
-def implies(a, b):
- return bool((not a) or b)
-
-
-def match_coverage_extractor_requirements(unit):
- # we shouldn't add test if
- return all([
- # tests are not requested
- unit.get("TESTS_REQUESTED") == "yes",
- # build doesn't imply clang coverage, which supports segment extraction from the binaries
- unit.get("CLANG_COVERAGE") == "yes",
- # contrib wasn't requested
- implies(get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes"),
- ])
-
-
+def get_unit_list_variable(unit, name):
+ items = unit.get(name)
+ if items:
+ items = items.split(' ')
+ assert items[0] == "${}".format(name), (items, name)
+ return items[1:]
+ return []
+
+
+def implies(a, b):
+ return bool((not a) or b)
+
+
+def match_coverage_extractor_requirements(unit):
+ # we shouldn't add test if
+ return all([
+ # tests are not requested
+ unit.get("TESTS_REQUESTED") == "yes",
+ # build doesn't imply clang coverage, which supports segment extraction from the binaries
+ unit.get("CLANG_COVERAGE") == "yes",
+ # contrib wasn't requested
+ implies(get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes"),
+ ])
+
+
def get_tidy_config_map(unit):
global tidy_config_map
if tidy_config_map is None:
@@ -420,16 +420,16 @@ def onadd_ytest(unit, *args):
test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
- if flat_args[1] == "fuzz.test":
- unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
+ if flat_args[1] == "fuzz.test":
+ unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
elif flat_args[1] == "go.test":
data, _ = get_canonical_test_resources(unit)
test_data += data
- elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
- # XXX
- # Current ymake implementation doesn't allow to call macro inside the 'when' body
- # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
- return
+ elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
+ # XXX
+ # Current ymake implementation doesn't allow to call macro inside the 'when' body
+ # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
+ return
elif flat_args[1] == "clang_tidy" and unit.get("TIDY") != "yes":
# Graph is not prepared
return
@@ -439,7 +439,7 @@ def onadd_ytest(unit, *args):
test_tags = serialize_list(_get_test_tags(unit, spec_args))
test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or ''
test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
-
+
if flat_args[1] != "clang_tidy" and unit.get("TIDY") == "yes":
# graph changed for clang_tidy tests
if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"):
@@ -471,17 +471,17 @@ def onadd_ytest(unit, *args):
fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
fork_mode = ' '.join(fork_mode) if fork_mode else ''
- unit_path = get_norm_unit_path(unit)
-
+ unit_path = get_norm_unit_path(unit)
+
test_record = {
'TEST-NAME': flat_args[0],
'SCRIPT-REL-PATH': flat_args[1],
'TESTED-PROJECT-NAME': unit.name(),
'TESTED-PROJECT-FILENAME': unit.filename(),
- 'SOURCE-FOLDER-PATH': unit_path,
- # TODO get rid of BUILD-FOLDER-PATH
- 'BUILD-FOLDER-PATH': unit_path,
- 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
+ 'SOURCE-FOLDER-PATH': unit_path,
+ # TODO get rid of BUILD-FOLDER-PATH
+ 'BUILD-FOLDER-PATH': unit_path,
+ 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
'GLOBAL-LIBRARY-PATH': unit.global_filename(),
'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
@@ -495,9 +495,9 @@ def onadd_ytest(unit, *args):
'TAG': test_tags,
'REQUIREMENTS': serialize_list(test_requirements),
'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
- 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
- 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
- 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
+ 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
+ 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
+ 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
'BLOB': unit.get('TEST_BLOB_DATA') or '',
'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '',
@@ -513,12 +513,12 @@ def onadd_ytest(unit, *args):
else:
test_record["TEST-NAME"] += "_bench"
- if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
- test_record['FUZZING'] = '1'
+ if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
+ test_record['FUZZING'] = '1'
# use all cores if fuzzing requested
test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))
-
- data = dump_test(unit, test_record)
+
+ data = dump_test(unit, test_record)
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
@@ -550,7 +550,7 @@ def onadd_check(unit, *args):
flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
"SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
check_type = flat_args[0]
- test_dir = get_norm_unit_path(unit)
+ test_dir = get_norm_unit_path(unit)
test_timeout = ''
fork_mode = ''
@@ -558,7 +558,7 @@ def onadd_check(unit, *args):
extra_test_dart_data = {}
ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
- if check_type in ["flake8.py2", "flake8.py3"]:
+ if check_type in ["flake8.py2", "flake8.py3"]:
script_rel_path = check_type
fork_mode = unit.get('TEST_FORK_MODE') or ''
elif check_type == "JAVA_STYLE":
@@ -628,18 +628,18 @@ def onadd_check(unit, *args):
'FORK-TEST-FILES': '',
'SIZE': 'SMALL',
'TAG': '',
- 'REQUIREMENTS': '',
+ 'REQUIREMENTS': '',
'USE_ARCADIA_PYTHON': use_arcadia_python or '',
'OLD_PYTEST': 'no',
'PYTHON-PATHS': '',
- # TODO remove FILES, see DEVTOOLS-7052
- 'FILES': test_files,
- 'TEST-FILES': test_files,
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
'NO_JBUILD': 'yes' if ymake_java_test else 'no',
}
test_record.update(extra_test_dart_data)
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
@@ -659,10 +659,10 @@ def onadd_check_py_imports(unit, *args):
return
unit.onpeerdir(['library/python/testing/import_test'])
check_type = "py.imports"
- test_dir = get_norm_unit_path(unit)
+ test_dir = get_norm_unit_path(unit)
use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
- test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
+ test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
test_record = {
'TEST-NAME': "pyimports",
'TEST-TIMEOUT': '',
@@ -681,15 +681,15 @@ def onadd_check_py_imports(unit, *args):
'USE_ARCADIA_PYTHON': use_arcadia_python or '',
'OLD_PYTEST': 'no',
'PYTHON-PATHS': '',
- # TODO remove FILES, see DEVTOOLS-7052
- 'FILES': test_files,
- 'TEST-FILES': test_files,
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
}
if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None":
test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"])
else:
test_record["NO-CHECK"] = ''
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
@@ -714,14 +714,14 @@ def onadd_pytest_script(unit, *args):
test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
tags = _get_test_tags(unit)
- requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
- data, data_files = get_canonical_test_resources(unit)
+ data, data_files = get_canonical_test_resources(unit)
test_data += data
python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
binary_path = None
test_cwd = unit.get('TEST_CWD_VALUE') or ''
- _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
def onadd_pytest_bin(unit, *args):
@@ -756,40 +756,40 @@ def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None):
test_size = unit.get('TEST_SIZE_NAME') or ''
test_cwd = unit.get('TEST_CWD_VALUE') or ''
- unit_path = unit.path()
+ unit_path = unit.path()
test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
tags = _get_test_tags(unit)
- requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
- data, data_files = get_canonical_test_resources(unit)
+ data, data_files = get_canonical_test_resources(unit)
test_data += data
python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
- yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
+ yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
if not binary_path:
- binary_path = os.path.join(unit_path, unit.filename())
- _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
+ binary_path = os.path.join(unit_path, unit.filename())
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
def extract_java_system_properties(unit, args):
if len(args) % 2:
- return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
- props = []
+ props = []
for x, y in zip(args[::2], args[1::2]):
if x == 'FILE':
if y.startswith('${BINDIR}') or y.startswith('${ARCADIA_BUILD_ROOT}') or y.startswith('/'):
- return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y)
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y)
y = _common.rootrel_arc_src(y, unit)
if not os.path.exists(unit.resolve('$S/' + y)):
- return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y)
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y)
y = '${ARCADIA_ROOT}/' + y
props.append({'type': 'file', 'path': y})
else:
props.append({'type': 'inline', 'key': x, 'value': y})
- return props, None
+ return props, None
def onjava_test(unit, *args):
@@ -801,28 +801,28 @@ def onjava_test(unit, *args):
if unit.get('MODULE_TYPE') == 'JTEST_FOR':
if not unit.get('UNITTEST_DIR'):
- ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path()))
- return
+ ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path()))
+ return
java_cp_arg_type = unit.get('JAVA_CLASSPATH_CMD_TYPE_VALUE') or 'MANIFEST'
if java_cp_arg_type not in ('MANIFEST', 'COMMAND_FILE', 'LIST'):
ymake.report_configure_error('{}: TEST_JAVA_CLASSPATH_CMD_TYPE({}) are invalid. Choose argument from MANIFEST, COMMAND_FILE or LIST)'.format(unit.path(), java_cp_arg_type))
return
- unit_path = unit.path()
- path = _common.strip_roots(unit_path)
+ unit_path = unit.path()
+ path = _common.strip_roots(unit_path)
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
- test_data.append('arcadia/build/scripts/run_junit.py')
+ test_data.append('arcadia/build/scripts/run_junit.py')
test_data.append('arcadia/build/scripts/unpacking_jtest_runner.py')
- data, data_files = get_canonical_test_resources(unit)
- test_data += data
-
- props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
- if error_mgs:
- ymake.report_configure_error(error_mgs)
- return
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+
+ props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
+ if error_mgs:
+ ymake.report_configure_error(error_mgs)
+ return
for prop in props:
if prop['type'] == 'file':
test_data.append(prop['path'].replace('${ARCADIA_ROOT}', 'arcadia'))
@@ -831,7 +831,7 @@ def onjava_test(unit, *args):
test_cwd = unit.get('TEST_CWD_VALUE') or '' # TODO: validate test_cwd value
- if unit.get('MODULE_TYPE') == 'JUNIT5':
+ if unit.get('MODULE_TYPE') == 'JUNIT5':
script_rel_path = 'junit5.test'
else:
script_rel_path = 'junit.test'
@@ -842,7 +842,7 @@ def onjava_test(unit, *args):
'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)]),
'SCRIPT-REL-PATH': script_rel_path,
'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '',
- 'TESTED-PROJECT-NAME': path,
+ 'TESTED-PROJECT-NAME': path,
'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
# 'TEST-PRESERVE-ENV': 'da',
'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))),
@@ -878,7 +878,7 @@ def onjava_test(unit, *args):
else:
test_record['TEST_JAR'] = '{}/{}.jar'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
if data:
unit.set_property(['DART_DATA', data])
@@ -892,7 +892,7 @@ def onjava_test_deps(unit, *args):
assert len(args) == 1
mode = args[0]
- path = get_norm_unit_path(unit)
+ path = get_norm_unit_path(unit)
ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
test_record = {
@@ -923,7 +923,7 @@ def onjava_test_deps(unit, *args):
if ymake_java_test:
test_record['CLASSPATH'] = '$B/{}/{}.jar ${{DART_CLASSPATH}}'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
- data = dump_test(unit, test_record)
+ data = dump_test(unit, test_record)
unit.set_property(['DART_DATA', data])
@@ -951,12 +951,12 @@ def _dump_test(
fork_mode,
test_size,
tags,
- requirements,
+ requirements,
binary_path='',
old_pytest=False,
test_cwd=None,
- runner_bin=None,
- yt_spec=None,
+ runner_bin=None,
+ yt_spec=None,
data_files=None
):
@@ -965,7 +965,7 @@ def _dump_test(
else:
script_rel_path = test_type
- unit_path = unit.path()
+ unit_path = unit.path()
fork_test_files = unit.get('FORK_TEST_FILES_MODE')
fork_mode = ' '.join(fork_mode) if fork_mode else ''
use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
@@ -1029,11 +1029,11 @@ def onrun(unit, *args):
def onsetup_exectest(unit, *args):
- command = unit.get(["EXECTEST_COMMAND_VALUE"])
- if command is None:
- ymake.report_configure_error("EXECTEST must have at least one RUN macro")
- return
- command = command.replace("$EXECTEST_COMMAND_VALUE", "")
+ command = unit.get(["EXECTEST_COMMAND_VALUE"])
+ if command is None:
+ ymake.report_configure_error("EXECTEST must have at least one RUN macro")
+ return
+ command = command.replace("$EXECTEST_COMMAND_VALUE", "")
if "PYTHON_BIN" in command:
unit.ondepends('contrib/tools/python')
unit.set(["TEST_BLOB_DATA", base64.b64encode(command)])
@@ -1043,71 +1043,71 @@ def onsetup_exectest(unit, *args):
def onsetup_run_python(unit):
if unit.get("USE_ARCADIA_PYTHON") == "yes":
unit.ondepends('contrib/tools/python')
-
-
-def get_canonical_test_resources(unit):
- unit_path = unit.path()
+
+
+def get_canonical_test_resources(unit):
+ unit_path = unit.path()
canon_data_dir = os.path.join(unit.resolve(unit_path), CANON_DATA_DIR_NAME, unit.get('CANONIZE_SUB_PATH') or '')
-
- try:
- _, dirs, files = next(os.walk(canon_data_dir))
- except StopIteration:
- # path doesn't exist
- return [], []
-
- if CANON_RESULT_FILE_NAME in files:
- return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path)
- return [], []
-
-
-def _load_canonical_file(filename, unit_path):
- try:
- with open(filename) as results_file:
- return json.load(results_file)
- except Exception as e:
- print>>sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename)
- return {}
-
-
-def _get_resource_from_uri(uri):
- m = CANON_MDS_RESOURCE_REGEX.match(uri)
- if m:
- res_id = m.group(1)
- return "{}:{}".format(MDS_SHEME, res_id)
-
- m = CANON_SBR_RESOURCE_REGEX.match(uri)
- if m:
- # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name
- # That's why we use notation with '=' to specify specific path for resource
- uri = m.group(1)
- res_id = m.group(2)
- return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id]))
-
-
-def _get_external_resources_from_canon_data(data):
- # Method should work with both canonization versions:
- # result.json: {'uri':X 'checksum':Y}
- # result.json: {'testname': {'uri':X 'checksum':Y}}
- # result.json: {'testname': [{'uri':X 'checksum':Y}]}
- # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail
- # That's why we check 'uri' and 'checksum' fields presence
- # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format)
- res = set()
-
- if isinstance(data, dict):
- if 'uri' in data and 'checksum' in data:
- resource = _get_resource_from_uri(data['uri'])
- if resource:
- res.add(resource)
- else:
- for k, v in data.iteritems():
- res.update(_get_external_resources_from_canon_data(v))
- elif isinstance(data, list):
- for e in data:
- res.update(_get_external_resources_from_canon_data(e))
-
- return res
-
-
-def _get_canonical_data_resources_v2(filename, unit_path):
+
+ try:
+ _, dirs, files = next(os.walk(canon_data_dir))
+ except StopIteration:
+ # path doesn't exist
+ return [], []
+
+ if CANON_RESULT_FILE_NAME in files:
+ return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path)
+ return [], []
+
+
+def _load_canonical_file(filename, unit_path):
+ try:
+ with open(filename) as results_file:
+ return json.load(results_file)
+ except Exception as e:
+ print>>sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename)
+ return {}
+
+
+def _get_resource_from_uri(uri):
+ m = CANON_MDS_RESOURCE_REGEX.match(uri)
+ if m:
+ res_id = m.group(1)
+ return "{}:{}".format(MDS_SHEME, res_id)
+
+ m = CANON_SBR_RESOURCE_REGEX.match(uri)
+ if m:
+ # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name
+ # That's why we use notation with '=' to specify specific path for resource
+ uri = m.group(1)
+ res_id = m.group(2)
+ return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id]))
+
+
+def _get_external_resources_from_canon_data(data):
+ # Method should work with both canonization versions:
+ # result.json: {'uri':X 'checksum':Y}
+ # result.json: {'testname': {'uri':X 'checksum':Y}}
+ # result.json: {'testname': [{'uri':X 'checksum':Y}]}
+ # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail
+ # That's why we check 'uri' and 'checksum' fields presence
+ # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format)
+ res = set()
+
+ if isinstance(data, dict):
+ if 'uri' in data and 'checksum' in data:
+ resource = _get_resource_from_uri(data['uri'])
+ if resource:
+ res.add(resource)
+ else:
+ for k, v in data.iteritems():
+ res.update(_get_external_resources_from_canon_data(v))
+ elif isinstance(data, list):
+ for e in data:
+ res.update(_get_external_resources_from_canon_data(e))
+
+ return res
+
+
+def _get_canonical_data_resources_v2(filename, unit_path):
return (_get_external_resources_from_canon_data(_load_canonical_file(filename, unit_path)), [filename])
diff --git a/build/rules/contrib_deps.policy b/build/rules/contrib_deps.policy
index 9af4b85cc2..cd07358c77 100644
--- a/build/rules/contrib_deps.policy
+++ b/build/rules/contrib_deps.policy
@@ -36,7 +36,7 @@ ALLOW contrib/(deprecated/)?python/django -> library/python/gunicorn
ALLOW contrib/python/python-magic -> library/python/symbols/libmagic
ALLOW contrib/python/typecode -> library/python/symbols/libmagic
ALLOW contrib/tools/jdk/test -> devtools/ya/yalibrary/tools
-ALLOW contrib/libs/clang12/tools/extra/clang-tidy/tool -> library/cpp/clang_tidy
+ALLOW contrib/libs/clang12/tools/extra/clang-tidy/tool -> library/cpp/clang_tidy
ALLOW contrib/libs/inja -> library/cpp/scheme
ALLOW contrib/libs/geos/capi/ctypes -> library/python/ctypes
ALLOW contrib/libs/leveldb -> library/cpp/deprecated/mapped_file
diff --git a/build/rules/flake8/migrations.yaml b/build/rules/flake8/migrations.yaml
index 6e54bf2e62..be6071572f 100644
--- a/build/rules/flake8/migrations.yaml
+++ b/build/rules/flake8/migrations.yaml
@@ -3662,5 +3662,5 @@ migrations:
- ads/libs/py_lmcompute/ft
- ads/libs/py_autobudget/mt/test_attribution_preprocessor
- ads/bigkv/tensor_transport/tests
- - scarab/api/python3
- - mssngr/botplatform/src/bots/core/migrations
+ - scarab/api/python3
+ - mssngr/botplatform/src/bots/core/migrations
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py
index 6b5d53bc71..d7fd7b30fd 100644
--- a/build/scripts/append_file.py
+++ b/build/scripts/append_file.py
@@ -5,5 +5,5 @@ if __name__ == "__main__":
file_path = sys.argv[1]
with open(file_path, "a") as f:
- for text in sys.argv[2:]:
- print >>f, text
+ for text in sys.argv[2:]:
+ print >>f, text
diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py
index eb1b690ee9..cc8f88e70c 100644
--- a/build/scripts/clang_tidy.py
+++ b/build/scripts/clang_tidy.py
@@ -1,11 +1,11 @@
import argparse
-import contextlib
+import contextlib
import json
-import os
-import re
-import shutil
+import os
+import re
+import shutil
import sys
-import tempfile
+import tempfile
import subprocess
@@ -20,12 +20,12 @@ def setup_script(args):
def parse_args():
parser = argparse.ArgumentParser()
- parser.add_argument("--testing-src", required=True)
- parser.add_argument("--clang-tidy-bin", required=True)
+ parser.add_argument("--testing-src", required=True)
+ parser.add_argument("--clang-tidy-bin", required=True)
parser.add_argument("--config-validation-script", required=True)
parser.add_argument("--ymake-python", required=True)
- parser.add_argument("--tidy-json", required=True)
- parser.add_argument("--source-root", required=True)
+ parser.add_argument("--tidy-json", required=True)
+ parser.add_argument("--source-root", required=True)
parser.add_argument("--build-root", required=True)
parser.add_argument("--default-config-file", required=True)
parser.add_argument("--project-config-file", required=True)
@@ -35,27 +35,27 @@ def parse_args():
return parser.parse_known_args()
-def generate_compilation_database(clang_cmd, source_root, filename, path):
- compile_database = [
- {
- "file": filename,
+def generate_compilation_database(clang_cmd, source_root, filename, path):
+ compile_database = [
+ {
+ "file": filename,
"command": subprocess.list2cmdline(clang_cmd),
- "directory": source_root,
- }
- ]
- compilation_database_json = os.path.join(path, "compile_commands.json")
- with open(compilation_database_json, "w") as afile:
+ "directory": source_root,
+ }
+ ]
+ compilation_database_json = os.path.join(path, "compile_commands.json")
+ with open(compilation_database_json, "w") as afile:
json.dump(compile_database, afile)
return compilation_database_json
-@contextlib.contextmanager
-def gen_tmpdir():
- path = tempfile.mkdtemp()
- yield path
- shutil.rmtree(path)
-
-
+@contextlib.contextmanager
+def gen_tmpdir():
+ path = tempfile.mkdtemp()
+ yield path
+ shutil.rmtree(path)
+
+
@contextlib.contextmanager
def gen_tmpfile():
_, path = tempfile.mkstemp()
@@ -63,21 +63,21 @@ def gen_tmpfile():
os.remove(path)
-def load_profile(path):
- if os.path.exists(path):
- files = os.listdir(path)
- if len(files) == 1:
- with open(os.path.join(path, files[0])) as afile:
- return json.load(afile)["profile"]
- elif len(files) > 1:
- return {
- "error": "found several profile files: {}".format(files),
- }
- return {
- "error": "profile file is missing",
- }
-
-
+def load_profile(path):
+ if os.path.exists(path):
+ files = os.listdir(path)
+ if len(files) == 1:
+ with open(os.path.join(path, files[0])) as afile:
+ return json.load(afile)["profile"]
+ elif len(files) > 1:
+ return {
+ "error": "found several profile files: {}".format(files),
+ }
+ return {
+ "error": "profile file is missing",
+ }
+
+
def load_fixes(path):
if os.path.exists(path):
with open(path, 'r') as afile:
@@ -125,46 +125,46 @@ def main():
filter_configs(args.project_config_file, filtered_config)
result_config_file = tidy_config_validation.merge_tidy_configs(base_config_path=args.default_config_file, additional_config_path=filtered_config, result_config_path=result_config)
compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir)
- cmd = [
- clang_tidy_bin,
- args.testing_src,
- "-p",
- compile_command_path,
- "--warnings-as-errors",
- "*",
- "--config-file",
+ cmd = [
+ clang_tidy_bin,
+ args.testing_src,
+ "-p",
+ compile_command_path,
+ "--warnings-as-errors",
+ "*",
+ "--config-file",
result_config_file,
- "--header-filter",
- header_filter,
- "--use-color",
- "--enable-check-profile",
+ "--header-filter",
+ header_filter,
+ "--use-color",
+ "--enable-check-profile",
"--store-check-profile={}".format(profile_tmpdir),
- ]
+ ]
if args.export_fixes == "yes":
cmd += ["--export-fixes", fixes_file]
if args.checks:
cmd += ["--checks", args.checks]
- res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = res.communicate()
- exit_code = res.returncode
+ res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = res.communicate()
+ exit_code = res.returncode
profile = load_profile(profile_tmpdir)
testing_src = os.path.relpath(args.testing_src, args.source_root)
tidy_fixes = load_fixes(fixes_file)
-
- with open(output_json, "wb") as afile:
- json.dump(
- {
+
+ with open(output_json, "wb") as afile:
+ json.dump(
+ {
"file": testing_src,
- "exit_code": exit_code,
- "profile": profile,
- "stderr": err,
- "stdout": out,
+ "exit_code": exit_code,
+ "profile": profile,
+ "stderr": err,
+ "stdout": out,
"fixes": tidy_fixes,
- },
- afile,
- )
-
+ },
+ afile,
+ )
+
if __name__ == "__main__":
main()
diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py
index c0bec50b2a..bf85ae053c 100644
--- a/build/scripts/compile_cuda.py
+++ b/build/scripts/compile_cuda.py
@@ -45,27 +45,27 @@ def main():
cflags.append('-fopenmp')
cflags.remove('-fopenmp=libomp')
- skip_list = [
- '-gline-tables-only',
- # clang coverage
- '-fprofile-instr-generate',
- '-fcoverage-mapping',
+ skip_list = [
+ '-gline-tables-only',
+ # clang coverage
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
'/Zc:inline', # disable unreferenced functions (kernel registrators) remove
'-Wno-c++17-extensions',
'-flto',
'-faligned-allocation',
- ]
-
+ ]
+
if skip_nocxxinc:
skip_list.append('-nostdinc++')
- for flag in skip_list:
+ for flag in skip_list:
if flag in cflags:
cflags.remove(flag)
skip_prefix_list = [
'-fsanitize=',
- '-fsanitize-coverage=',
+ '-fsanitize-coverage=',
'-fsanitize-blacklist=',
'--system-header-prefix',
]
diff --git a/build/scripts/configure_file.py b/build/scripts/configure_file.py
index 1873ed70eb..6d434c3e8c 100755
--- a/build/scripts/configure_file.py
+++ b/build/scripts/configure_file.py
@@ -53,7 +53,7 @@ if __name__ == "__main__":
usage()
varDict = {}
for x in sys.argv[3:]:
- key, value = str(x).split('=', 1)
+ key, value = str(x).split('=', 1)
varDict[key] = value
main(sys.argv[1], sys.argv[2], varDict)
diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py
index 9baeb5ffac..53f3207bb7 100644
--- a/build/scripts/copy_to_dir.py
+++ b/build/scripts/copy_to_dir.py
@@ -34,7 +34,7 @@ def hardlink_or_copy(src, dst):
if e.errno == errno.EEXIST:
return
elif e.errno == errno.EXDEV:
- sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst))
+ sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst))
shutil.copy(src, dst)
else:
raise
diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py
index 94491d9256..d3bf13c4e7 100644
--- a/build/scripts/coverage-info.py
+++ b/build/scripts/coverage-info.py
@@ -149,7 +149,7 @@ def gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args):
lcov_args.append(cov_info)
-def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files):
+def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files):
with tarfile.open(gcno_archive) as gcno_tf:
for gcno_item in gcno_tf:
if gcno_item.isfile() and gcno_item.name.endswith(GCNO_EXT):
@@ -157,13 +157,13 @@ def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_execut
gcno_name = gcno_item.name
source_fname = gcno_name[:-len(GCNO_EXT)]
- if prefix_filter and not source_fname.startswith(prefix_filter):
- sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter))
- continue
- if exclude_files and exclude_files.search(source_fname):
- sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern))
- continue
-
+ if prefix_filter and not source_fname.startswith(prefix_filter):
+ sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter))
+ continue
+ if exclude_files and exclude_files.search(source_fname):
+ sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern))
+ continue
+
fname2gcno[source_fname] = gcno_name
if os.path.getsize(gcno_name) > 0:
@@ -234,7 +234,7 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt
def gen_info(cmd, cov_info):
gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args)
- init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files)
+ init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files)
process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info)
if coverage_report_path:
diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py
index 45083ff4f7..cb7918ff04 100644
--- a/build/scripts/create_jcoverage_report.py
+++ b/build/scripts/create_jcoverage_report.py
@@ -3,7 +3,7 @@ import tarfile
import zipfile
import os
import sys
-import time
+import time
import subprocess
@@ -14,23 +14,23 @@ def mkdir_p(path):
pass
-class Timer(object):
-
- def __init__(self):
- self.start = time.time()
-
- def step(self, msg):
- sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start)))
- self.start = time.time()
-
-
+class Timer(object):
+
+ def __init__(self):
+ self.start = time.time()
+
+ def step(self, msg):
+ sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start)))
+ self.start = time.time()
+
+
def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths):
- timer = Timer()
+ timer = Timer()
reports_dir = 'jacoco_reports_dir'
mkdir_p(reports_dir)
with tarfile.open(source) as tf:
tf.extractall(reports_dir)
- timer.step("Coverage data extracted")
+ timer.step("Coverage data extracted")
reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)]
with open(jars_list) as f:
@@ -52,10 +52,10 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_
if jar.endswith('devtools-jacoco-agent.jar'):
agent_disposition = jar
- # Skip java contrib - it's irrelevant coverage
- if jar.startswith('contrib/java'):
- continue
-
+ # Skip java contrib - it's irrelevant coverage
+ if jar.startswith('contrib/java'):
+ continue
+
with zipfile.ZipFile(jar) as jf:
for entry in jf.infolist():
if entry.filename.endswith('.java'):
@@ -67,35 +67,35 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_
else:
continue
- entry.filename = entry.filename.encode('utf-8')
+ entry.filename = entry.filename.encode('utf-8')
jf.extract(entry, dest)
- timer.step("Jar files extracted")
+ timer.step("Jar files extracted")
if not agent_disposition:
print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.'
- if tar_output:
- report_dir = 'java.report.temp'
- else:
- report_dir = output
+ if tar_output:
+ report_dir = 'java.report.temp'
+ else:
+ report_dir = output
mkdir_p(report_dir)
if agent_disposition:
- agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format]
+ agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format]
agent_cmd += reports
subprocess.check_call(agent_cmd)
- timer.step("Jacoco finished")
+ timer.step("Jacoco finished")
- if tar_output:
- with tarfile.open(output, 'w') as outf:
- outf.add(report_dir, arcname='.')
+ if tar_output:
+ with tarfile.open(output, 'w') as outf:
+ outf.add(report_dir, arcname='.')
if __name__ == '__main__':
- if 'LC_ALL' in os.environ:
- if os.environ['LC_ALL'] == 'C':
- os.environ['LC_ALL'] = 'en_GB.UTF-8'
-
+ if 'LC_ALL' in os.environ:
+ if os.environ['LC_ALL'] == 'C':
+ os.environ['LC_ALL'] = 'en_GB.UTF-8'
+
parser = argparse.ArgumentParser()
parser.add_argument('--source', action='store')
@@ -104,9 +104,9 @@ if __name__ == '__main__':
parser.add_argument('--prefix-filter', action='store')
parser.add_argument('--exclude-filter', action='store')
parser.add_argument('--jars-list', action='store')
- parser.add_argument('--output-format', action='store', default="html")
- parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True)
- parser.add_argument('--agent-disposition', action='store')
+ parser.add_argument('--output-format', action='store', default="html")
+ parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True)
+ parser.add_argument('--agent-disposition', action='store')
parser.add_argument('--runner-path', dest='runners_paths', action='append', default=[])
args = parser.parse_args()
main(**vars(args))
diff --git a/build/scripts/error.py b/build/scripts/error.py
index f7d8ecb2cc..6e4256e5c2 100644
--- a/build/scripts/error.py
+++ b/build/scripts/error.py
@@ -1,19 +1,19 @@
-# Sync content of this file with devtools/ya/core/error/__init__.py
-
+# Sync content of this file with devtools/ya/core/error/__init__.py
+
TEMPORARY_ERROR_MESSAGES = [
- 'Connection reset by peer',
- 'Connection timed out',
- 'Function not implemented',
- 'I/O operation on closed file',
- 'Internal Server Error',
- 'Network connection closed unexpectedly',
+ 'Connection reset by peer',
+ 'Connection timed out',
+ 'Function not implemented',
+ 'I/O operation on closed file',
+ 'Internal Server Error',
+ 'Network connection closed unexpectedly',
'Network is unreachable',
'No route to host',
- 'No space left on device',
- 'Not enough space',
- 'Temporary failure in name resolution',
+ 'No space left on device',
+ 'Not enough space',
+ 'Temporary failure in name resolution',
'The read operation timed out',
- 'timeout: timed out',
+ 'timeout: timed out',
]
@@ -23,55 +23,55 @@ class ExitCodes(object):
COMPILATION_FAILED = 11
INFRASTRUCTURE_ERROR = 12
NOT_RETRIABLE_ERROR = 13
- YT_STORE_FETCH_ERROR = 14
+ YT_STORE_FETCH_ERROR = 14
def merge_exit_codes(exit_codes):
- return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0
+ return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0
def is_temporary_error(exc):
- import logging
- logger = logging.getLogger(__name__)
-
+ import logging
+ logger = logging.getLogger(__name__)
+
if getattr(exc, 'temporary', False):
- logger.debug("Exception has temporary attribute: %s", exc)
+ logger.debug("Exception has temporary attribute: %s", exc)
return True
import errno
err = getattr(exc, 'errno', None)
if err == errno.ECONNREFUSED or err == errno.ENETUNREACH:
- logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err)
+ logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err)
return True
import socket
if isinstance(exc, socket.timeout) or isinstance(getattr(exc, 'reason', None), socket.timeout):
- logger.debug("Socket timeout exception: %s", exc)
+ logger.debug("Socket timeout exception: %s", exc)
return True
if isinstance(exc, socket.gaierror):
- logger.debug("Getaddrinfo exception: %s", exc)
- return True
-
- import urllib2
-
- if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ):
- logger.debug("urllib2.HTTPError: %s", exc)
+ logger.debug("Getaddrinfo exception: %s", exc)
return True
+ import urllib2
+
+ if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ):
+ logger.debug("urllib2.HTTPError: %s", exc)
+ return True
+
import httplib
if isinstance(exc, httplib.IncompleteRead):
- logger.debug("IncompleteRead exception: %s", exc)
+ logger.debug("IncompleteRead exception: %s", exc)
return True
exc_str = str(exc)
for message in TEMPORARY_ERROR_MESSAGES:
if message in exc_str:
- logger.debug("Found temporary error pattern (%s): %s", message, exc_str)
+ logger.debug("Found temporary error pattern (%s): %s", message, exc_str)
return True
return False
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
index db4fea50bf..bbca65219f 100755
--- a/build/scripts/fetch_from.py
+++ b/build/scripts/fetch_from.py
@@ -1,19 +1,19 @@
-import datetime as dt
-import errno
+import datetime as dt
+import errno
import hashlib
-import json
-import logging
-import os
+import json
+import logging
+import os
import platform
import random
-import shutil
-import socket
+import shutil
+import socket
import string
import sys
-import tarfile
-import urllib2
+import tarfile
+import urllib2
-import retry
+import retry
def make_user_agent():
@@ -29,7 +29,7 @@ def add_common_arguments(parser):
parser.add_argument('--executable', action='store_true', help='make outputs executable')
parser.add_argument('--log-path')
parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity')
- parser.add_argument('outputs', nargs='*', default=[])
+ parser.add_argument('outputs', nargs='*', default=[])
def ensure_dir(path):
@@ -37,7 +37,7 @@ def ensure_dir(path):
os.makedirs(path)
-# Reference code: library/python/fs/__init__.py
+# Reference code: library/python/fs/__init__.py
def hardlink_or_copy(src, dst):
ensure_dir(os.path.dirname(dst))
@@ -49,23 +49,23 @@ def hardlink_or_copy(src, dst):
except OSError as e:
if e.errno == errno.EEXIST:
return
- elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES):
- sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst))
+ elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES):
+ sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst))
shutil.copy(src, dst)
else:
raise
-def rename_or_copy_and_remove(src, dst):
+def rename_or_copy_and_remove(src, dst):
ensure_dir(os.path.dirname(dst))
-
- try:
- os.rename(src, dst)
- except OSError:
+
+ try:
+ os.rename(src, dst)
+ except OSError:
shutil.copy(src, dst)
- os.remove(src)
-
-
+ os.remove(src)
+
+
class BadChecksumFetchError(Exception):
pass
@@ -114,17 +114,17 @@ def is_temporary(e):
def is_broken(e):
return isinstance(e, urllib2.HTTPError) and e.code in (410, 404)
- if is_broken(e):
- return False
-
- if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
- return True
-
- import error
-
- return error.is_temporary_error(e)
+ if is_broken(e):
+ return False
+ if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
+ return True
+ import error
+
+ return error.is_temporary_error(e)
+
+
def uniq_string_generator(size=6, chars=string.ascii_lowercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
@@ -292,28 +292,28 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=
return tmp_file_name
-def chmod(filename, mode):
+def chmod(filename, mode):
if platform.system().lower() == 'windows':
# https://docs.microsoft.com/en-us/windows/win32/fileio/hard-links-and-junctions:
# hard to reset read-only attribute for removal if there are multiple hardlinks
return
- stat = os.stat(filename)
- if stat.st_mode & 0o777 != mode:
- try:
- os.chmod(filename, mode)
- except OSError:
+ stat = os.stat(filename)
+ if stat.st_mode & 0o777 != mode:
+ try:
+ os.chmod(filename, mode)
+ except OSError:
import pwd
- sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)))
- raise
-
-
+ sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)))
+ raise
+
+
def process(fetched_file, file_name, args, remove=True):
assert len(args.rename) <= len(args.outputs), (
'too few outputs to rename', args.rename, 'into', args.outputs)
- # Forbid changes to the loaded resource
- chmod(fetched_file, 0o444)
-
+ # Forbid changes to the loaded resource
+ chmod(fetched_file, 0o444)
+
if not os.path.isfile(fetched_file):
raise ResourceIsDirectoryError('Resource must be a file, not a directory: %s' % fetched_file)
@@ -332,16 +332,16 @@ def process(fetched_file, file_name, args, remove=True):
if args.untar_to:
ensure_dir(args.untar_to)
- # Extract only requested files
+ # Extract only requested files
try:
with tarfile.open(fetched_file, mode='r:*') as tar:
- inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):]))
- members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs]
- tar.extractall(args.untar_to, members=members)
- # Forbid changes to the loaded resource data
- for root, _, files in os.walk(args.untar_to):
- for filename in files:
- chmod(os.path.join(root, filename), 0o444)
+ inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):]))
+ members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs]
+ tar.extractall(args.untar_to, members=members)
+ # Forbid changes to the loaded resource data
+ for root, _, files in os.walk(args.untar_to):
+ for filename in files:
+ chmod(os.path.join(root, filename), 0o444)
except tarfile.ReadError as e:
logging.exception(e)
raise ResourceUnpackingError('File {} cannot be untared'.format(fetched_file))
@@ -354,12 +354,12 @@ def process(fetched_file, file_name, args, remove=True):
hardlink_or_copy(src, dst)
else:
logging.info('Renaming %s to %s', src, dst)
- if os.path.exists(dst):
- raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
+ if os.path.exists(dst):
+ raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
if remove:
rename_or_copy_and_remove(src, dst)
else:
- hardlink_or_copy(src, dst)
+ hardlink_or_copy(src, dst)
for path in args.outputs:
if not os.path.exists(path):
@@ -367,9 +367,9 @@ def process(fetched_file, file_name, args, remove=True):
if not os.path.isfile(path):
raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path))
if args.executable:
- chmod(path, os.stat(path).st_mode | 0o111)
+ chmod(path, os.stat(path).st_mode | 0o111)
if os.path.abspath(path) == os.path.abspath(fetched_file):
remove = False
-
+
if remove:
os.remove(fetched_file)
diff --git a/build/scripts/fetch_from_archive.py b/build/scripts/fetch_from_archive.py
index 57aff91b5e..765a3004f6 100644
--- a/build/scripts/fetch_from_archive.py
+++ b/build/scripts/fetch_from_archive.py
@@ -31,6 +31,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_external.py b/build/scripts/fetch_from_external.py
index d4ed6f4221..cf3c967a49 100644
--- a/build/scripts/fetch_from_external.py
+++ b/build/scripts/fetch_from_external.py
@@ -55,6 +55,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_mds.py b/build/scripts/fetch_from_mds.py
index 5e4e656394..7ee05b7c2e 100644
--- a/build/scripts/fetch_from_mds.py
+++ b/build/scripts/fetch_from_mds.py
@@ -45,6 +45,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
index a99542e174..511d7bf9dd 100755
--- a/build/scripts/fetch_from_sandbox.py
+++ b/build/scripts/fetch_from_sandbox.py
@@ -106,13 +106,13 @@ def _urlopen(url, data=None, headers=None):
time.sleep(retry_after)
-def _query(url):
- return json.loads(_urlopen(url))
-
-
+def _query(url):
+ return json.loads(_urlopen(url))
+
+
_SANDBOX_BASE_URL = 'https://sandbox.yandex-team.ru/api/v1.0'
-
-
+
+
def get_resource_info(resource_id, touch=False, no_links=False):
url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id)))
headers = {}
@@ -136,10 +136,10 @@ def fetch(resource_id, custom_fetcher):
try:
resource_info = get_resource_info(resource_id, touch=True, no_links=True)
except Exception as e:
- sys.stderr.write(
- "Failed to fetch resource {}: {}\n".format(resource_id, str(e))
+ sys.stderr.write(
+ "Failed to fetch resource {}: {}\n".format(resource_id, str(e))
)
- raise
+ raise
if resource_info.get('state', 'DELETED') != 'READY':
raise ResourceInfoError("Resource {} is not READY".format(resource_id))
@@ -264,6 +264,6 @@ if __name__ == '__main__':
logging.exception(e)
print >>sys.stderr, open(args.abs_log_path).read()
sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py
index c1e98b20c0..5e5ba8c024 100644
--- a/build/scripts/go_tool.py
+++ b/build/scripts/go_tool.py
@@ -781,7 +781,7 @@ def do_link_test(args):
if __name__ == '__main__':
args = pcf.get_args(sys.argv[1:])
-
+
parser = argparse.ArgumentParser(prefix_chars='+')
parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True)
parser.add_argument('++srcs', nargs='*', required=True)
diff --git a/build/scripts/java_pack_to_file.py b/build/scripts/java_pack_to_file.py
index 8d2aeb93fd..f6911c7796 100644
--- a/build/scripts/java_pack_to_file.py
+++ b/build/scripts/java_pack_to_file.py
@@ -1,25 +1,25 @@
-import os
-import re
-import optparse
-
-PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL)
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.add_option('-o', '--output')
- parser.add_option('-a', '--source-root', dest='source_root')
- return parser.parse_args()
-
-
-def get_package_name(filename):
- with open(filename) as afile:
- match = PACKAGE_REGEX.search(afile.read())
- if match:
- return match.group(1).replace('\n\t ', '').replace('.', '/')
- return ''
-
-
+import os
+import re
+import optparse
+
+PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL)
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.add_option('-o', '--output')
+ parser.add_option('-a', '--source-root', dest='source_root')
+ return parser.parse_args()
+
+
+def get_package_name(filename):
+ with open(filename) as afile:
+ match = PACKAGE_REGEX.search(afile.read())
+ if match:
+ return match.group(1).replace('\n\t ', '').replace('.', '/')
+ return ''
+
+
def write_coverage_sources(output, srcroot, files):
with open(output, 'w') as afile:
for filename in files:
@@ -27,10 +27,10 @@ def write_coverage_sources(output, srcroot, files):
afile.write(os.path.join(pname, os.path.basename(filename)) + ':' + filename + '\n')
-def main():
- opts, files = parse_args()
+def main():
+ opts, files = parse_args()
write_coverage_sources(opts.output, opts.source_root, files)
-
-
-if __name__ == '__main__':
- exit(main())
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py
index 23487f5c1e..58faf37bbe 100644
--- a/build/scripts/link_dyn_lib.py
+++ b/build/scripts/link_dyn_lib.py
@@ -148,8 +148,8 @@ def fix_cmd(arch, musl, c):
return list(f(list(parse_export_file(fname))))
if p.endswith('.supp'):
- return []
-
+ return []
+
if p.endswith('.pkg.fake'):
return []
diff --git a/build/scripts/link_exe.py b/build/scripts/link_exe.py
index f469e3b442..1c3cc4e516 100644
--- a/build/scripts/link_exe.py
+++ b/build/scripts/link_exe.py
@@ -5,16 +5,16 @@ import optparse
from process_whole_archive_option import ProcessWholeArchiveOption
-def get_leaks_suppressions(cmd):
- supp, newcmd = [], []
- for arg in cmd:
+def get_leaks_suppressions(cmd):
+ supp, newcmd = [], []
+ for arg in cmd:
if arg.endswith(".supp"):
- supp.append(arg)
- else:
- newcmd.append(arg)
- return supp, newcmd
-
-
+ supp.append(arg)
+ else:
+ newcmd.append(arg)
+ return supp, newcmd
+
+
musl_libs = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
@@ -23,26 +23,26 @@ def fix_cmd(musl, c):
def gen_default_suppressions(inputs, output, source_root):
- import collections
- import os
-
- supp_map = collections.defaultdict(set)
+ import collections
+ import os
+
+ supp_map = collections.defaultdict(set)
for filename in inputs:
- sanitizer = os.path.basename(filename).split('.', 1)[0]
+ sanitizer = os.path.basename(filename).split('.', 1)[0]
with open(os.path.join(source_root, filename)) as src:
- for line in src:
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- supp_map[sanitizer].add(line)
-
+ for line in src:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ supp_map[sanitizer].add(line)
+
with open(output, "wb") as dst:
- for supp_type, supps in supp_map.items():
- dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type)
- dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps))))
- dst.write('}\n')
-
-
+ for supp_type, supps in supp_map.items():
+ dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type)
+ dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps))))
+ dst.write('}\n')
+
+
def parse_args():
parser = optparse.OptionParser()
parser.disable_interspersed_args()
@@ -69,7 +69,7 @@ if __name__ == '__main__':
supp, cmd = get_leaks_suppressions(cmd)
if supp:
- src_file = "default_suppressions.cpp"
+ src_file = "default_suppressions.cpp"
gen_default_suppressions(supp, src_file, opts.source_root)
cmd += [src_file]
diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py
index c189668b9e..9458c0ebfb 100644
--- a/build/scripts/link_fat_obj.py
+++ b/build/scripts/link_fat_obj.py
@@ -35,10 +35,10 @@ def get_args():
return parser.parse_args(groups['default']), groups
-def strip_suppression_files(srcs):
+def strip_suppression_files(srcs):
return [s for s in srcs if not s.endswith('.supp')]
-
-
+
+
def main():
args, groups = get_args()
@@ -51,7 +51,7 @@ def main():
# Dependencies
global_srcs = groups['global_srcs']
- global_srcs = strip_suppression_files(global_srcs)
+ global_srcs = strip_suppression_files(global_srcs)
global_srcs = ProcessWholeArchiveOption(args.arch).construct_cmd(global_srcs)
peers = groups['peers']
diff --git a/build/scripts/retry.py b/build/scripts/retry.py
index d14170bfec..ac417f7c5f 100644
--- a/build/scripts/retry.py
+++ b/build/scripts/retry.py
@@ -1,29 +1,29 @@
-import time
-import functools
-
-
-# Partly copy-pasted from contrib/python/retry
-def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1):
- _tries, _delay = tries, delay
- while _tries:
- try:
- return f()
- except exceptions as e:
- _tries -= 1
- if not _tries:
- raise
-
- time.sleep(_delay)
- _delay *= backoff
-
- if max_delay is not None:
- _delay = min(_delay, max_delay)
-
-
-def retry(**retry_kwargs):
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- return retry_func(lambda: func(*args, **kwargs), **retry_kwargs)
- return wrapper
- return decorator
+import time
+import functools
+
+
+# Partly copy-pasted from contrib/python/retry
+def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1):
+ _tries, _delay = tries, delay
+ while _tries:
+ try:
+ return f()
+ except exceptions as e:
+ _tries -= 1
+ if not _tries:
+ raise
+
+ time.sleep(_delay)
+ _delay *= backoff
+
+ if max_delay is not None:
+ _delay = min(_delay, max_delay)
+
+
+def retry(**retry_kwargs):
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ return retry_func(lambda: func(*args, **kwargs), **retry_kwargs)
+ return wrapper
+ return decorator
diff --git a/build/scripts/run_junit.py b/build/scripts/run_junit.py
index 5f56403bed..6944144fa7 100644
--- a/build/scripts/run_junit.py
+++ b/build/scripts/run_junit.py
@@ -1,125 +1,125 @@
-import collections
-import json
-import time
-import os
-import sys
-
-SHUTDOWN_SIGNAL = 'SIGUSR1'
-
-PROVIDES = {
- "devtools/junit-runner/devtools-junit-runner.jar": "junit-runner",
- "devtools/junit5-runner/devtools-junit5-runner.jar": "junit-runner",
-}
-
-
-class SignalInterruptionError(Exception):
- pass
-
-
-def on_shutdown(s, f):
- raise SignalInterruptionError()
-
-
-def get_tracefile_path(args):
- return args[args.index('--output') + 1]
-
-
-def dump_chunk_error(tracefile, name, imps):
- with open(tracefile, 'a') as afile:
- msg = {
- "timestamp": time.time(),
- "name": "chunk-event",
- "value": {
- "errors": [
- [
- "fail",
- "[[bad]]Test contains conflicting dependencies for [[imp]]{}[[bad]]: {}[[rst]]".format(
- name, ', '.join(imps)
- ),
- ],
- ],
- },
- }
- json.dump(msg, afile)
- afile.write("\n")
-
-
-def verify_classpath(args):
- cpfile = args[args.index('-classpath') + 1]
- assert cpfile.startswith('@'), cpfile
-
- cpfile = cpfile[1:]
- assert os.path.exists(cpfile)
-
- with open(cpfile) as afile:
- data = afile.read().splitlines()
-
- collisions = collections.defaultdict(set)
- for cp in data:
- if cp in PROVIDES:
- collisions[PROVIDES[cp]].add(cp)
-
- for name, imps in collisions.items():
- if len(imps) > 1:
- tracefile = get_tracefile_path(args)
- dump_chunk_error(tracefile, name, imps)
- return False
- return True
-
-
-def main():
- args = sys.argv[1:]
-
- # Emulates PROVIDES(X) for junit-runner and junit5-runner.
- # For more info see DEVTOOLSSUPPORT-7454
- if not verify_classpath(args):
- return 1
-
- def execve():
- os.execve(args[0], args, os.environ)
-
- jar_binary = args[args.index('--jar-binary') + 1]
- java_bin_dir = os.path.dirname(jar_binary)
- jstack_binary = os.path.join(java_bin_dir, 'jstack')
-
- if not os.path.exists(jstack_binary):
- sys.stderr.write("jstack is missing: {}\n".format(jstack_binary))
- execve()
-
- import signal
-
- signum = getattr(signal, SHUTDOWN_SIGNAL, None)
-
- if signum is None:
- execve()
-
- import subprocess
-
- proc = subprocess.Popen(args)
- signal.signal(signum, on_shutdown)
- timeout = False
-
- try:
- proc.wait()
- except SignalInterruptionError:
- sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum))
- # Dump stack traces
- subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr)
- # Kill junit - for more info see DEVTOOLS-7636
- os.kill(proc.pid, signal.SIGKILL)
- proc.wait()
- timeout = True
-
- if proc.returncode:
- sys.stderr.write('java exit code: {}\n'.format(proc.returncode))
- if timeout:
- # In case of timeout return specific exit code
- # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301
- proc.returncode = 10
- sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode))
-
- return proc.returncode
-
-
-if __name__ == '__main__':
- exit(main())
+import collections
+import json
+import time
+import os
+import sys
+
+SHUTDOWN_SIGNAL = 'SIGUSR1'
+
+PROVIDES = {
+ "devtools/junit-runner/devtools-junit-runner.jar": "junit-runner",
+ "devtools/junit5-runner/devtools-junit5-runner.jar": "junit-runner",
+}
+
+
+class SignalInterruptionError(Exception):
+ pass
+
+
+def on_shutdown(s, f):
+ raise SignalInterruptionError()
+
+
+def get_tracefile_path(args):
+ return args[args.index('--output') + 1]
+
+
+def dump_chunk_error(tracefile, name, imps):
+ with open(tracefile, 'a') as afile:
+ msg = {
+ "timestamp": time.time(),
+ "name": "chunk-event",
+ "value": {
+ "errors": [
+ [
+ "fail",
+ "[[bad]]Test contains conflicting dependencies for [[imp]]{}[[bad]]: {}[[rst]]".format(
+ name, ', '.join(imps)
+ ),
+ ],
+ ],
+ },
+ }
+ json.dump(msg, afile)
+ afile.write("\n")
+
+
+def verify_classpath(args):
+ cpfile = args[args.index('-classpath') + 1]
+ assert cpfile.startswith('@'), cpfile
+
+ cpfile = cpfile[1:]
+ assert os.path.exists(cpfile)
+
+ with open(cpfile) as afile:
+ data = afile.read().splitlines()
+
+ collisions = collections.defaultdict(set)
+ for cp in data:
+ if cp in PROVIDES:
+ collisions[PROVIDES[cp]].add(cp)
+
+ for name, imps in collisions.items():
+ if len(imps) > 1:
+ tracefile = get_tracefile_path(args)
+ dump_chunk_error(tracefile, name, imps)
+ return False
+ return True
+
+
+def main():
+ args = sys.argv[1:]
+
+ # Emulates PROVIDES(X) for junit-runner and junit5-runner.
+ # For more info see DEVTOOLSSUPPORT-7454
+ if not verify_classpath(args):
+ return 1
+
+ def execve():
+ os.execve(args[0], args, os.environ)
+
+ jar_binary = args[args.index('--jar-binary') + 1]
+ java_bin_dir = os.path.dirname(jar_binary)
+ jstack_binary = os.path.join(java_bin_dir, 'jstack')
+
+ if not os.path.exists(jstack_binary):
+ sys.stderr.write("jstack is missing: {}\n".format(jstack_binary))
+ execve()
+
+ import signal
+
+ signum = getattr(signal, SHUTDOWN_SIGNAL, None)
+
+ if signum is None:
+ execve()
+
+ import subprocess
+
+ proc = subprocess.Popen(args)
+ signal.signal(signum, on_shutdown)
+ timeout = False
+
+ try:
+ proc.wait()
+ except SignalInterruptionError:
+ sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum))
+ # Dump stack traces
+ subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr)
+ # Kill junit - for more info see DEVTOOLS-7636
+ os.kill(proc.pid, signal.SIGKILL)
+ proc.wait()
+ timeout = True
+
+ if proc.returncode:
+ sys.stderr.write('java exit code: {}\n'.format(proc.returncode))
+ if timeout:
+ # In case of timeout return specific exit code
+ # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301
+ proc.returncode = 10
+ sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode))
+
+ return proc.returncode
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py
index 9730dcd711..9bb314a98a 100644
--- a/build/scripts/unpacking_jtest_runner.py
+++ b/build/scripts/unpacking_jtest_runner.py
@@ -1,10 +1,10 @@
-import io
-import json
-import optparse
-import os
+import io
+import json
+import optparse
+import os
import sys
import subprocess
-import time
+import time
import zipfile
import platform
@@ -15,7 +15,7 @@ import platform
def parse_args():
parser = optparse.OptionParser()
parser.disable_interspersed_args()
- parser.add_option('--trace-file')
+ parser.add_option('--trace-file')
parser.add_option('--jar-binary')
parser.add_option('--tests-jar-path')
parser.add_option('--classpath-option-type', choices=('manifest', 'command_file', 'list'), default='manifest')
@@ -49,27 +49,27 @@ def fix_cmd(cmd):
return cmd
-def dump_event(etype, data, filename):
- event = {
- 'timestamp': time.time(),
- 'value': data,
- 'name': etype,
- }
-
- with io.open(filename, 'a', encoding='utf8') as afile:
- afile.write(unicode(json.dumps(event) + '\n'))
-
-
-def dump_chunk_event(data, filename):
- return dump_event('chunk-event', data, filename)
-
-
-def extract_jars(dest, archive):
- os.makedirs(dest)
- with zipfile.ZipFile(archive) as zf:
- zf.extractall(dest)
-
-
+def dump_event(etype, data, filename):
+ event = {
+ 'timestamp': time.time(),
+ 'value': data,
+ 'name': etype,
+ }
+
+ with io.open(filename, 'a', encoding='utf8') as afile:
+ afile.write(unicode(json.dumps(event) + '\n'))
+
+
+def dump_chunk_event(data, filename):
+ return dump_event('chunk-event', data, filename)
+
+
+def extract_jars(dest, archive):
+ os.makedirs(dest)
+ with zipfile.ZipFile(archive) as zf:
+ zf.extractall(dest)
+
+
def make_bfg_from_cp(class_path, out):
class_path = ' '.join(
map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path)
@@ -89,7 +89,7 @@ def make_command_file_from_cp(class_path, out):
def main():
- s = time.time()
+ s = time.time()
opts, args = parse_args()
# unpack tests jar
@@ -100,13 +100,13 @@ def main():
build_root = ''
dest = os.path.abspath('test-classes')
- extract_jars(dest, opts.tests_jar_path)
-
- metrics = {
- 'suite_jtest_extract_jars_(seconds)': time.time() - s,
- }
-
- s = time.time()
+ extract_jars(dest, opts.tests_jar_path)
+
+ metrics = {
+ 'suite_jtest_extract_jars_(seconds)': time.time() - s,
+ }
+
+ s = time.time()
# fix java classpath
cp_idx = args.index('-classpath')
if args[cp_idx + 1].startswith('@'):
@@ -131,12 +131,12 @@ def main():
else:
args[cp_idx + 1] = args[cp_idx + 1].replace(opts.tests_jar_path, dest)
args = fix_cmd(args[:cp_idx]) + args[cp_idx:]
-
- metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s
-
- if opts.trace_file:
- dump_chunk_event({'metrics': metrics}, opts.trace_file)
-
+
+ metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s
+
+ if opts.trace_file:
+ dump_chunk_event({'metrics': metrics}, opts.trace_file)
+
# run java cmd
if platform.system() == 'Windows':
sys.exit(subprocess.Popen(args).wait())
diff --git a/build/scripts/with_coverage.py b/build/scripts/with_coverage.py
index d62435c3b8..52937490bc 100644
--- a/build/scripts/with_coverage.py
+++ b/build/scripts/with_coverage.py
@@ -1,5 +1,5 @@
-# TODO prettyboy remove after ya-bin release
-
+# TODO prettyboy remove after ya-bin release
+
import os
import sys
import subprocess
diff --git a/build/scripts/with_crash_on_timeout.py b/build/scripts/with_crash_on_timeout.py
index bde864ed29..775347f9c1 100644
--- a/build/scripts/with_crash_on_timeout.py
+++ b/build/scripts/with_crash_on_timeout.py
@@ -1,5 +1,5 @@
-# TODO prettyboy remove after ya-bin release
-
+# TODO prettyboy remove after ya-bin release
+
import os
import sys
import subprocess
diff --git a/build/scripts/ya.make b/build/scripts/ya.make
index 710165e40d..105f8dfc7b 100644
--- a/build/scripts/ya.make
+++ b/build/scripts/ya.make
@@ -8,12 +8,12 @@ TEST_SRCS(
build_java_codenav_index.py
build_java_with_error_prone.py
build_java_with_error_prone2.py
- build_mn.py
- build_pln_header.py
+ build_mn.py
+ build_pln_header.py
cat.py
- cgo1_wrapper.py
+ cgo1_wrapper.py
check_config_h.py
- collect_java_srcs.py
+ collect_java_srcs.py
compile_cuda.py
compile_java.py
compile_jsrc.py
@@ -29,7 +29,7 @@ TEST_SRCS(
extract_jacoco_report.py
f2c.py
fail_module_cmd.py
- fetch_from.py
+ fetch_from.py
fetch_from_external.py
fetch_from_mds.py
fetch_from_npm.py
@@ -40,10 +40,10 @@ TEST_SRCS(
fix_msvc_output.py
fs_tools.py
gen_aar_gradle_script.py
- gen_java_codenav_entry.py
+ gen_java_codenav_entry.py
gen_java_codenav_protobuf.py
gen_mx_table.py
- gen_py3_reg.py
+ gen_py3_reg.py
gen_py_reg.py
gen_test_apk_gradle_script.py
gen_ub.py
@@ -51,7 +51,7 @@ TEST_SRCS(
go_proto_wrapper.py
go_tool.py
ios_wrapper.py
- java_pack_to_file.py
+ java_pack_to_file.py
link_asrc.py
link_dyn_lib.py
link_exe.py
@@ -71,14 +71,14 @@ TEST_SRCS(
py_compile.py
run_ios_simulator.py
run_javac.py
- run_junit.py
+ run_junit.py
run_llvm_dsymutil.py
run_msvc_wine.py
run_tool.py
sky.py
stdout2stderr.py
symlink.py
- tar_directory.py
+ tar_directory.py
tar_sources.py
tared_protoc.py
touch.py
@@ -87,7 +87,7 @@ TEST_SRCS(
with_coverage.py
with_crash_on_timeout.py
with_pathsep_resolve.py
- wrap_groovyc.py
+ wrap_groovyc.py
wrapper.py
writer.py
xargs.py
diff --git a/build/ya.conf.json b/build/ya.conf.json
index 5f7cc875d6..6757c68936 100644
--- a/build/ya.conf.json
+++ b/build/ya.conf.json
@@ -323,10 +323,10 @@
"dlv": {
"description": "Debugger for the Go programming language"
},
- "node-search": {
- "description": "Autocheck data inspector tool",
- "visible": false
- },
+ "node-search": {
+ "description": "Autocheck data inspector tool",
+ "visible": false
+ },
"releaser": {
"description": "Release tool"
},
@@ -426,10 +426,10 @@
"rdtset": {
"description": "Task CPU affinity and Intel(R) Resource Director Technology control tool"
},
- "optrace": {
- "description": "optrace records output files written by each process",
- "visible": false
- },
+ "optrace": {
+ "description": "optrace records output files written by each process",
+ "visible": false
+ },
"yoimports": {
"description": "Go imports formatting tool"
},
@@ -3809,44 +3809,44 @@
}
]
},
- "optrace": {
- "tools": {
- "optrace": {
- "bottle": "optrace",
- "executable": "optrace"
- }
- },
- "platforms": [
- {
- "host": {
- "os": "LINUX"
- },
- "default": true
- }
- ]
- },
- "node-search": {
- "tools": {
- "node-search": {
- "bottle": "node-search",
- "executable": "node-search"
- }
- },
- "platforms": [
- {
- "host": {
- "os": "LINUX"
- },
- "default": true
- },
- {
- "host": {
- "os": "DARWIN"
- },
- "default": true
- }
- ]
- },
+ "optrace": {
+ "tools": {
+ "optrace": {
+ "bottle": "optrace",
+ "executable": "optrace"
+ }
+ },
+ "platforms": [
+ {
+ "host": {
+ "os": "LINUX"
+ },
+ "default": true
+ }
+ ]
+ },
+ "node-search": {
+ "tools": {
+ "node-search": {
+ "bottle": "node-search",
+ "executable": "node-search"
+ }
+ },
+ "platforms": [
+ {
+ "host": {
+ "os": "LINUX"
+ },
+ "default": true
+ },
+ {
+ "host": {
+ "os": "DARWIN"
+ },
+ "default": true
+ }
+ ]
+ },
"gpt": {
"tools": {
"gpt_perf": {
@@ -6923,32 +6923,32 @@
]
}
},
- "optrace": {
- "formula": {
- "sandbox_id": [
- 894130496
- ],
- "match": "optrace"
- },
- "executable": {
- "optrace": [
- "optrace"
- ]
- }
- },
- "node-search": {
- "formula": {
- "sandbox_id": [
- 1157378401
- ],
- "match": "node_search"
- },
- "executable": {
- "node-search": [
- "node_search"
- ]
- }
- },
+ "optrace": {
+ "formula": {
+ "sandbox_id": [
+ 894130496
+ ],
+ "match": "optrace"
+ },
+ "executable": {
+ "optrace": [
+ "optrace"
+ ]
+ }
+ },
+ "node-search": {
+ "formula": {
+ "sandbox_id": [
+ 1157378401
+ ],
+ "match": "node_search"
+ },
+ "executable": {
+ "node-search": [
+ "node_search"
+ ]
+ }
+ },
"rsync": {
"formula": {
"sandbox_id": [
diff --git a/build/ya.make b/build/ya.make
index 407b8f13e0..760f63ae22 100644
--- a/build/ya.make
+++ b/build/ya.make
@@ -7,10 +7,10 @@ NEED_CHECK()
PY2_LIBRARY()
-PY_SRCS(
- ymake_conf.py
-)
-
+PY_SRCS(
+ ymake_conf.py
+)
+
PEERDIR(
library/cpp/deprecated/enum_codegen
library/cpp/deprecated/split
@@ -21,14 +21,14 @@ END()
RECURSE(
conf_fatal_error
- config
+ config
docs/empty
external_resources
- platform/java
+ platform/java
platform/local_so
- platform/perl
- platform/python
- platform/python/ldflags
+ platform/perl
+ platform/python
+ platform/python/ldflags
plugins
prebuilt
scripts
diff --git a/build/ymake.core.conf b/build/ymake.core.conf
index 081833998b..3101432ffe 100644
--- a/build/ymake.core.conf
+++ b/build/ymake.core.conf
@@ -302,14 +302,14 @@ ENUM_PARSER_TOOL=${tool:"tools/enum_parser/enum_parser"}
# tag:python-specific tag:cython-specific
CYTHON_SCRIPT=${input:"${ARCADIA_ROOT}/contrib/tools/cython/cython.py"}
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
-CYTHON_OUTPUT_INCLUDES=\
+CYTHON_OUTPUT_INCLUDES=\
${output_include;hide:"contrib/libs/python/Include/compile.h"} \
${output_include;hide:"contrib/libs/python/Include/frameobject.h"} \
-${output_include;hide:"contrib/libs/python/Include/longintrepr.h"} \
-${output_include;hide:"contrib/libs/python/Include/pyconfig.h"} \
-${output_include;hide:"contrib/libs/python/Include/Python.h"} \
-${output_include;hide:"contrib/libs/python/Include/pythread.h"} \
-${output_include;hide:"contrib/libs/python/Include/structmember.h"} \
+${output_include;hide:"contrib/libs/python/Include/longintrepr.h"} \
+${output_include;hide:"contrib/libs/python/Include/pyconfig.h"} \
+${output_include;hide:"contrib/libs/python/Include/Python.h"} \
+${output_include;hide:"contrib/libs/python/Include/pythread.h"} \
+${output_include;hide:"contrib/libs/python/Include/structmember.h"} \
${output_include;hide:"contrib/libs/python/Include/traceback.h"} \
${output_include;hide:"contrib/tools/cython/generated_c_headers.h"} \
${output_include;hide:"omp.h"}
@@ -1190,12 +1190,12 @@ module _BASE_UNIT: _BARE_UNIT {
CFLAGS+=-fsanitize-coverage=$SANITIZE_COVERAGE
LDFLAGS+=-fsanitize-coverage=$SANITIZE_COVERAGE
}
-
- when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
- CFLAGS+=-fprofile-instr-generate -fcoverage-mapping -DCLANG_COVERAGE
- LDFLAGS+=-fprofile-instr-generate -fcoverage-mapping
- }
-
+
+ when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
+ CFLAGS+=-fprofile-instr-generate -fcoverage-mapping -DCLANG_COVERAGE
+ LDFLAGS+=-fprofile-instr-generate -fcoverage-mapping
+ }
+
when ($NLG_COVERAGE && $NLG_COVERAGE != "no") {
CFLAGS+=-DNLG_COVERAGE
}
@@ -1308,10 +1308,10 @@ module _BASE_UNIT: _BARE_UNIT {
PEERDIR += build/external_resources/codenavigation
}
- when ($CYTHON_COVERAGE && $CYTHON_COVERAGE == "yes") {
- CFLAGS+=-DCYTHON_TRACE=1 -DCYTHON_TRACE_NOGIL=1
- }
-
+ when ($CYTHON_COVERAGE && $CYTHON_COVERAGE == "yes") {
+ CFLAGS+=-DCYTHON_TRACE=1 -DCYTHON_TRACE_NOGIL=1
+ }
+
DEFAULT(USE_SSE4 yes)
when ($NOSSE != "yes") {
@@ -1652,10 +1652,10 @@ module _BASE_PROGRAM: _LINK_UNIT {
when ($SANITIZER_DEFINED == "yes") {
PEERDIR += contrib/libs/cxxsupp/libsan
}
-
- when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
- PEERDIR+=library/cpp/testing/dump_clang_coverage
- }
+
+ when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
+ PEERDIR+=library/cpp/testing/dump_clang_coverage
+ }
when ($IDE_MSVS == "yes") {
PEERDIR+=build/scripts/c_templates
@@ -1664,8 +1664,8 @@ module _BASE_PROGRAM: _LINK_UNIT {
when ($_CUSTOM_LINK_STEP_SCRIPT) {
LINK_SCRIPT_EXE_FLAGS+=--python=$YMAKE_PYTHON --custom-step=${input:_CUSTOM_LINK_STEP_SCRIPT}
}
-}
-
+}
+
CPP_PROGRAM_SEM=add_executable $MODDIR $REALPRJNAME ${hide:TARGET} ${hide:AUTO_INPUT} && vcs_info && target_link_flags PUBLIC $OBJADDE_LIB $OBJADDE
### @usage: PROGRAM([progname])
###
@@ -1707,8 +1707,8 @@ module PY2_PROGRAM: _PY2_PROGRAM {
PEERDIR+=build/rules/py2_deprecation
}
ASSERT(_OK You are using deprecated Python2-only code (PY2_PROGRAM). Please consider rewriting to Python 3.)
-}
-
+}
+
# tag:python-specific
### @usage: NO_EXTENDED_SOURCE_SEARCH()
###
@@ -1837,7 +1837,7 @@ macro CUSTOM_LINK_STEP_SCRIPT(Name) {
module _BASE_UNITTEST: _BASE_PROGRAM {
.FINAL_TARGET=no
.NODE_TYPE=Program
- .ALLOWED=YT_SPEC
+ .ALLOWED=YT_SPEC
when ($UT_SKIP_EXCEPTIONS == "yes") {
C_DEFINES+=-DUT_SKIP_EXCEPTIONS
}
@@ -1970,7 +1970,7 @@ module BOOSTTEST_WITH_MAIN: BOOSTTEST {
PEERDIR(library/cpp/testing/boost_test_main)
}
-FUZZ_DICTS_VALUE=
+FUZZ_DICTS_VALUE=
### @usage: FUZZ_DICTS(path1 [path2...])
###
### Allows you to specify dictionaries, relative to the root of Arcadia, which will be used in Fuzzing.
@@ -1980,9 +1980,9 @@ FUZZ_DICTS_VALUE=
### Documentation: https://wiki.yandex-team.ru/yatool/fuzzing/
macro FUZZ_DICTS(Data...) {
SET_APPEND(FUZZ_DICTS_VALUE $Data)
-}
-
-FUZZ_OPTS_VALUE=
+}
+
+FUZZ_OPTS_VALUE=
### @usage: FUZZ_OPTS(opt1 [Opt2...])
###
### Overrides or adds options to the corpus mining and fuzzer run.
@@ -1999,22 +1999,22 @@ FUZZ_OPTS_VALUE=
### Documentation: https://wiki.yandex-team.ru/yatool/fuzzing/
macro FUZZ_OPTS(Data...) {
SET_APPEND(FUZZ_OPTS_VALUE $Data)
-}
-
+}
+
# tag:yt-specific tag:test
-TEST_YT_SPEC_VALUE=
-### @usage: YT_SPEC(path1 [path2...])
-###
-### Allows you to specify json-files with YT task and operation specs,
-### which will be used to run test node in the YT.
-### Test must be marked with ya:yt tag.
-### Files must be relative to the root of Arcadia.
-###
-### Documentation: https://wiki.yandex-team.ru/yatool/test/
-macro YT_SPEC(Data...) {
- SET_APPEND(TEST_YT_SPEC_VALUE $Data)
-}
-
+TEST_YT_SPEC_VALUE=
+### @usage: YT_SPEC(path1 [path2...])
+###
+### Allows you to specify json-files with YT task and operation specs,
+### which will be used to run test node in the YT.
+### Test must be marked with ya:yt tag.
+### Files must be relative to the root of Arcadia.
+###
+### Documentation: https://wiki.yandex-team.ru/yatool/test/
+macro YT_SPEC(Data...) {
+ SET_APPEND(TEST_YT_SPEC_VALUE $Data)
+}
+
# tag:test
TEST_SRCS_VALUE=
### @usage: TEST_SRCS(Files...)
@@ -2067,8 +2067,8 @@ TEST_REQUIREMENTS_VALUE=
### Documentation about the Arcadia test system: https://wiki.yandex-team.ru/yatool/test/
macro REQUIREMENTS(Tags...) {
SET_APPEND(TEST_REQUIREMENTS_VALUE $Tags)
-}
-
+}
+
# tag:test
TEST_ENV_VALUE=
### @usage: ENV(key[=value])
@@ -2289,7 +2289,7 @@ module GTEST_UGLY: _BASE_PROGRAM {
module EXECTEST: _BARE_UNIT {
.NODE_TYPE=Program
.FINAL_TARGET=no
- .ALLOWED=YT_SPEC
+ .ALLOWED=YT_SPEC
.RESTRICTED=FORK_TEST_FILES
SET(MODULE_SUFFIX .pkg.fake)
SETUP_EXECTEST()
@@ -2635,14 +2635,14 @@ module DLL_UNIT: _LINK_UNIT {
LINK_DYN_LIB_FLAGS+=--fix-elf ${tool:"tools/fix_elf"}
}
}
-
+
when ($DARWIN == "yes") {
LDFLAGS += -undefined dynamic_lookup
}
- when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
- PEERDIR+=library/cpp/testing/dump_clang_coverage
- }
+ when ($CLANG_COVERAGE && $CLANG_COVERAGE != "no") {
+ PEERDIR+=library/cpp/testing/dump_clang_coverage
+ }
when ($IDE_MSVS == "yes") {
PEERDIR+=build/scripts/c_templates
@@ -3941,7 +3941,7 @@ module _JAR_RUNABLE: _COMPILABLE_JAR_BASE {
otherwise {
_SCRIPTGEN_FLAGS=-D IS_UBERJAR=no
}
- CHECK_PROVIDES()
+ CHECK_PROVIDES()
}
# tag:java-specific
@@ -3972,7 +3972,7 @@ module _JAR_TEST: _COMPILABLE_JAR_BASE {
SET(MODULE_SUFFIX .test.cp.jar)
ENABLE(YMAKE_JAVA_TEST)
JAVA_TEST()
- CHECK_PROVIDES()
+ CHECK_PROVIDES()
}
# tag:java-specific
@@ -4126,9 +4126,9 @@ module _BASE_PY_PROGRAM: _BASE_PROGRAM {
PEERDIR += contrib/tools/python/src/Modules/_sqlite
}
}
- when ($PYTHON_COVERAGE == "yes") {
- PEERDIR+=library/python/coverage
- }
+ when ($PYTHON_COVERAGE == "yes") {
+ PEERDIR+=library/python/coverage
+ }
when ($ARCH_PPC64LE == "yes") {
_MY_ALLOCATOR=SYSTEM
@@ -4199,9 +4199,9 @@ module _BASE_PY3_PROGRAM: _BASE_PROGRAM {
when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") {
NO_STRIP=yes
}
- when ($PYTHON_COVERAGE == "yes") {
- PEERDIR+=library/python/coverage
- }
+ when ($PYTHON_COVERAGE == "yes") {
+ PEERDIR+=library/python/coverage
+ }
when ($CODENAVIGATION && $NOCODENAVIGATION != "yes") {
PEERDIR += contrib/python/six
}
@@ -5546,7 +5546,7 @@ macro SPLIT_FACTOR(Factor) {
}
# tag:test
-FORK_TEST_FILES_MODE=
+FORK_TEST_FILES_MODE=
### @usage: FORK_TEST_FILES()
###
### Only for PY2TEST and PY3TEST: splits a file executable with the tests on chunks in the files listed in TEST_SRCS
@@ -5567,8 +5567,8 @@ TEST_SIZE_NAME=SMALL
### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/
macro SIZE(Type) {
SET(TEST_SIZE_NAME $Type)
-}
-
+}
+
### @usage: JOIN_SRCS(Out Src...)
###
### Join set of sources into single file named Out and send it for further processing.
@@ -6746,32 +6746,32 @@ macro NO_SANITIZE_COVERAGE() {
### @usage: NO_CLANG_COVERAGE()
###
### Disable heavyweight clang coverage for the module
-macro NO_CLANG_COVERAGE() {
- DISABLE(CLANG_COVERAGE)
-}
-
+macro NO_CLANG_COVERAGE() {
+ DISABLE(CLANG_COVERAGE)
+}
+
macro NO_CLANG_TIDY() {
DISABLE(TIDY)
}
# tag:python-specific tag:coverage
-### @usage: NO_PYTHON_COVERAGE()
-###
-### Disable python coverage for module
-macro NO_PYTHON_COVERAGE() {
- DISABLE(PYTHON_COVERAGE)
-}
-
+### @usage: NO_PYTHON_COVERAGE()
+###
+### Disable python coverage for module
+macro NO_PYTHON_COVERAGE() {
+ DISABLE(PYTHON_COVERAGE)
+}
+
# tag:python-specific tag:coverage tag:cython
-### @usage: NO_CYTHON_COVERAGE()
-###
-### Disable cython and cythonized python coverage (CYTHONIZE_PY)
-### Implies NO_CLANG_COVERAGE() - right now, we can't disable instrumentation for .py.cpp files, but enable for .cpp
-macro NO_CYTHON_COVERAGE() {
- DISABLE(CYTHON_COVERAGE)
- NO_CLANG_COVERAGE()
-}
-
+### @usage: NO_CYTHON_COVERAGE()
+###
+### Disable cython and cythonized python coverage (CYTHONIZE_PY)
+### Implies NO_CLANG_COVERAGE() - right now, we can't disable instrumentation for .py.cpp files, but enable for .cpp
+macro NO_CYTHON_COVERAGE() {
+ DISABLE(CYTHON_COVERAGE)
+ NO_CLANG_COVERAGE()
+}
+
# tag:lua-specific
LUAJIT_PATH=${ARCADIA_ROOT}/contrib/libs/luajit
macro _LUAJIT_OBJDUMP(Src, OUT="") {
@@ -7525,7 +7525,7 @@ multimodule PROTO_LIBRARY {
OPTIMIZE_PY_PROTOS()
OBJ_SUF=.py2
# Can not use NO_LINT(), because is not allowed outside of contrib directory
- SET(LINT_LEVEL_VALUE none_internal)
+ SET(LINT_LEVEL_VALUE none_internal)
when ($_COMMON_GOOGLE_APIS != "None") {
PEERDIR += contrib/libs/googleapis-common-protos
@@ -7555,7 +7555,7 @@ multimodule PROTO_LIBRARY {
}
OBJ_SUF=.py3
# Can not use NO_LINT(), because is not allowed outside of contrib directory
- SET(LINT_LEVEL_VALUE none_internal)
+ SET(LINT_LEVEL_VALUE none_internal)
when ($_COMMON_GOOGLE_APIS != "None") {
PEERDIR += contrib/libs/googleapis-common-protos
@@ -9015,26 +9015,26 @@ when ($OPENGL_REQUIRED) {
}
# tag:python-specific
-multimodule PY23_TEST {
+multimodule PY23_TEST {
module PY2 : PYTEST_BIN {
MODULE_PREFIX=py2_
- OBJ_SUF=.py2
+ OBJ_SUF=.py2
CANONIZE_SUB_PATH=py2test
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
- }
+ }
module PY3TEST_PROGRAM: PY3TEST_BIN {
.FINAL_TARGET=yes
- OBJ_SUF=.py3
+ OBJ_SUF=.py3
CANONIZE_SUB_PATH=py3test
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
- }
+ }
module PY3TEST_LIBRARY: _PY3_LIBRARY {
PEERDIR+=library/python/pytest
_REQUIRE_EXPLICIT_LICENSE()
RUN_CYTHON_SCRIPT=$YMAKE_PYTHON $CYTHON_SCRIPT
}
-}
+}
# tag:windows-specific
WINDOWS_MANIFEST=
diff --git a/build/ymake_conf.py b/build/ymake_conf.py
index 30219eb85e..327c639568 100755
--- a/build/ymake_conf.py
+++ b/build/ymake_conf.py
@@ -1665,44 +1665,44 @@ class GnuCompiler(Compiler):
append('EXTRA_OUTPUT')
style = ['${requirements;hide:CC_REQUIREMENTS} ${hide;kv:"p CC"} ${hide;kv:"pc green"}']
- cxx_args = [
+ cxx_args = [
'$CLANG_TIDY_ARGS',
- '$YNDEXER_ARGS',
- '$CXX_COMPILER',
- '$C_FLAGS_PLATFORM',
- '$GCC_COMPILE_FLAGS',
- '$CXXFLAGS',
- '$CL_MACRO_INFO',
- '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
+ '$YNDEXER_ARGS',
+ '$CXX_COMPILER',
+ '$C_FLAGS_PLATFORM',
+ '$GCC_COMPILE_FLAGS',
+ '$CXXFLAGS',
+ '$CL_MACRO_INFO',
+ '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
'$COMPILER_TIME_TRACE_FLAGS',
- '$EXTRA_OUTPUT',
- '$SRCFLAGS',
+ '$EXTRA_OUTPUT',
+ '$SRCFLAGS',
'$_LANG_CFLAGS_VALUE',
'${input:SRC}',
- '$TOOLCHAIN_ENV',
+ '$TOOLCHAIN_ENV',
'$YNDEXER_OUTPUT',
'&& $COMPILER_TIME_TRACE_POSTPROCESS',
- ] + style
+ ] + style
- c_args = [
+ c_args = [
'$CLANG_TIDY_ARGS',
- '$YNDEXER_ARGS',
- '$C_COMPILER',
- '$C_FLAGS_PLATFORM',
- '$GCC_COMPILE_FLAGS',
- '$CFLAGS',
- '$CL_MACRO_INFO',
- '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
- '$CONLYFLAGS',
+ '$YNDEXER_ARGS',
+ '$C_COMPILER',
+ '$C_FLAGS_PLATFORM',
+ '$GCC_COMPILE_FLAGS',
+ '$CFLAGS',
+ '$CL_MACRO_INFO',
+ '$CL_MACRO_INFO_DISABLE_CACHE__NO_UID__',
+ '$CONLYFLAGS',
'$COMPILER_TIME_TRACE_FLAGS',
- '$EXTRA_OUTPUT',
- '$SRCFLAGS',
+ '$EXTRA_OUTPUT',
+ '$SRCFLAGS',
'${input:SRC}',
- '$TOOLCHAIN_ENV',
+ '$TOOLCHAIN_ENV',
'$YNDEXER_OUTPUT',
'&& $COMPILER_TIME_TRACE_POSTPROCESS',
- ] + style
-
+ ] + style
+
ignore_c_args_no_deps = [
'${input:SRC}',
'$SRCFLAGS',
@@ -2269,11 +2269,11 @@ class MSVCToolchainOptions(ToolchainOptions):
def prefix(_type, _path):
if not self.under_wine:
return _path
- return '{wine} {type} $WINE_ENV ${{ARCADIA_ROOT}} ${{ARCADIA_BUILD_ROOT}} {path}'.format(
- wine='${YMAKE_PYTHON} ${input:\"build/scripts/run_msvc_wine.py\"} $(WINE_TOOL-sbr:1093314933)/bin/wine64 -v140',
- type=_type,
- path=_path
- )
+ return '{wine} {type} $WINE_ENV ${{ARCADIA_ROOT}} ${{ARCADIA_BUILD_ROOT}} {path}'.format(
+ wine='${YMAKE_PYTHON} ${input:\"build/scripts/run_msvc_wine.py\"} $(WINE_TOOL-sbr:1093314933)/bin/wine64 -v140',
+ type=_type,
+ path=_path
+ )
self.masm_compiler = prefix('masm', os.path.join(bindir, tools_name, asm_name))
self.link = prefix('link', os.path.join(bindir, tools_name, 'link.exe'))