diff options
author | iaz1607 <iaz1607@yandex-team.ru> | 2022-02-10 16:45:37 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:37 +0300 |
commit | e5437feb4ac2d2dc044e1090b9312dde5ef197e0 (patch) | |
tree | f5a238c69dd20a1fa2092127a31b8aff25020f7d /build | |
parent | f4945d0a44b8770f0801de3056aa41639b0b7bd2 (diff) | |
download | ydb-e5437feb4ac2d2dc044e1090b9312dde5ef197e0.tar.gz |
Restoring authorship annotation for <iaz1607@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'build')
-rw-r--r-- | build/conf/project_specific/yql_udf.conf | 4 | ||||
-rw-r--r-- | build/config/tests/clang_tidy/config.yaml | 28 | ||||
-rw-r--r-- | build/plugins/ytest.py | 384 | ||||
-rw-r--r-- | build/scripts/clang_tidy.py | 204 | ||||
-rw-r--r-- | build/scripts/clang_tidy_arch.py | 66 | ||||
-rw-r--r-- | build/ya.conf.json | 60 | ||||
-rw-r--r-- | build/ya.make | 2 | ||||
-rw-r--r-- | build/ymake.core.conf | 214 | ||||
-rwxr-xr-x | build/ymake_conf.py | 70 |
9 files changed, 516 insertions, 516 deletions
diff --git a/build/conf/project_specific/yql_udf.conf b/build/conf/project_specific/yql_udf.conf index badaf36687..5c89a751ac 100644 --- a/build/conf/project_specific/yql_udf.conf +++ b/build/conf/project_specific/yql_udf.conf @@ -1,4 +1,4 @@ -when ($SANITIZER_TYPE || $USE_ARCADIA_PYTHON == "no" || $UDF_NO_PROBE == "yes" || $MUSL == "yes" || $TIDY == "yes") { +when ($SANITIZER_TYPE || $USE_ARCADIA_PYTHON == "no" || $UDF_NO_PROBE == "yes" || $MUSL == "yes" || $TIDY == "yes") { YQL_UDF_LINK_CMD=$LINK_DYN_LIB YQL_UDF_LINK_PRG_CMD=$LINK_EXEC_DYN_LIB } @@ -117,7 +117,7 @@ module _YQL_UDF_PROGRAM_BASE: SO_PROGRAM { ### @see: [YQL_UDF_MODULE()](#module_YQL_UDF_MODULE) multimodule YQL_UDF { module YQL_UDF_SHARED: YQL_UDF_MODULE { - NO_CLANG_TIDY() + NO_CLANG_TIDY() } module YQL_UDF_STATIC: _DLL_COMPATIBLE_LIBRARY { .ALIASES=SRCS=GLOBAL_SRCS diff --git a/build/config/tests/clang_tidy/config.yaml b/build/config/tests/clang_tidy/config.yaml index d55707592c..78cfba0d63 100644 --- a/build/config/tests/clang_tidy/config.yaml +++ b/build/config/tests/clang_tidy/config.yaml @@ -4,26 +4,26 @@ Checks: > bugprone-use-after-move, readability-identifier-naming, CheckOptions: - - key: readability-identifier-naming.ClassCase - value: CamelCase + - key: readability-identifier-naming.ClassCase + value: CamelCase - key: readability-identifier-naming.PublicMemberCase value: CamelCase - key: readability-identifier-naming.ProtectedMemberCase value: CamelCase - key: readability-identifier-naming.ProtectedMemberSuffix value: _ - - key: readability-identifier-naming.PrivateMemberCase - value: CamelCase + - key: readability-identifier-naming.PrivateMemberCase + value: CamelCase - key: readability-identifier-naming.PrivateMemberSuffix value: _ - - key: readability-identifier-naming.FunctionCase - value: CamelCase - # do not tide public begin / end methods to workaround clang-tidy bug in range-based for loops - - key: readability-identifier-naming.PublicMethodIgnoredRegexp + - key: readability-identifier-naming.FunctionCase + value: CamelCase + # do not tide public begin / end methods to workaround clang-tidy bug in range-based for loops + - key: readability-identifier-naming.PublicMethodIgnoredRegexp value: "(begin|end|empty|size|ysize|front|back)" - - key: readability-identifier-naming.PublicMethodCase - value: CamelCase - - key: readability-identifier-naming.ProtectedMethodCase - value: CamelCase - - key: readability-identifier-naming.PrivateMethodCase - value: CamelCase + - key: readability-identifier-naming.PublicMethodCase + value: CamelCase + - key: readability-identifier-naming.ProtectedMethodCase + value: CamelCase + - key: readability-identifier-naming.PrivateMethodCase + value: CamelCase diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py index 8970837f0f..dca4e0d5e7 100644 --- a/build/plugins/ytest.py +++ b/build/plugins/ytest.py @@ -30,14 +30,14 @@ VALID_DNS_REQUIREMENTS = ("default", "local", "dns64") BLOCK_SEPARATOR = '=============================================================' SPLIT_FACTOR_MAX_VALUE = 1000 SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250 -PARTITION_MODS = ('SEQUENTIAL', 'MODULO') -DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml" -DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json" -PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json" - - -tidy_config_map = None - +PARTITION_MODS = ('SEQUENTIAL', 'MODULO') +DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml" +DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json" +PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json" + + +tidy_config_map = None + def ontest_data(unit, *args): ymake.report_configure_error("TEST_DATA is removed in favour of DATA") @@ -61,14 +61,14 @@ def prepare_env(data): return serialize_list(shlex.split(data)) -def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + perf test for configure - pool_re = re.compile(r"""['"]*pool['"]*\s*?=""") - cypress_root_re = re.compile(r"""['"]*cypress_root['"]*\s*=""") - with open(filename, 'r') as afile: - yt_spec = afile.read() - return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec) - - +def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + perf test for configure + pool_re = re.compile(r"""['"]*pool['"]*\s*?=""") + cypress_root_re = re.compile(r"""['"]*cypress_root['"]*\s*=""") + with open(filename, 'r') as afile: + yt_spec = afile.read() + return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec) + + def validate_sb_vault(name, value): if not CANON_SB_VAULT_REGEX.match(value): return "sb_vault value '{}' should follow pattern <ENV_NAME>=:<value|file>:<owner>:<vault key>".format(value) @@ -84,8 +84,8 @@ def validate_choice_requirement(name, val, valid): return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(name, val, ", ".join(valid)) -def validate_force_sandbox_requirement(name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func): - if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run: +def validate_force_sandbox_requirement(name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func): + if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run: if value == 'all': return return validate_numerical_requirement(name, value) @@ -96,16 +96,16 @@ def validate_force_sandbox_requirement(name, value, test_size, is_force_sandbox, # TODO: Remove is_kvm param when there will be guarantees on RAM -def validate_requirement(req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run): +def validate_requirement(req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run): req_checks = { 'container': validate_numerical_requirement, - 'cpu': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_cpu), + 'cpu': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_cpu), 'disk_usage': validate_numerical_requirement, 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS), 'kvm': None, 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS), - 'ram': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram), - 'ram_disk': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram_disk), + 'ram': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram), + 'ram_disk': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram_disk), 'sb': None, 'sb_vault': validate_sb_vault, } @@ -147,7 +147,7 @@ def validate_test(unit, kw): in_autocheck = "ya:not_autocheck" not in tags and 'ya:manual' not in tags is_fat = 'ya:fat' in tags is_force_sandbox = 'ya:force_distbuild' not in tags and is_fat - is_ytexec_run = 'ya:yt' in tags + is_ytexec_run = 'ya:yt' in tags is_fuzzing = valid_kw.get("FUZZING", False) is_kvm = 'kvm' in requirements_orig requirements = {} @@ -176,7 +176,7 @@ def validate_test(unit, kw): if not errors: for req_name, req_value in requirements.items(): - error_msg = validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run) + error_msg = validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run) if error_msg: errors += [error_msg] @@ -252,7 +252,7 @@ def validate_test(unit, kw): break if valid_kw.get("YT-SPEC"): - if not is_ytexec_run: + if not is_ytexec_run: errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag") else: for filename in get_list("YT-SPEC"): @@ -260,17 +260,17 @@ def validate_test(unit, kw): if not os.path.exists(filename): errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename)) continue - if is_yt_spec_contain_pool_info(filename) and "ya:external" not in tags: - tags.append("ya:external") - tags.append("ya:yt_research_pool") + if is_yt_spec_contain_pool_info(filename) and "ya:external" not in tags: + tags.append("ya:external") + tags.append("ya:yt_research_pool") if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test": errors.append("PYTEST_SCRIPT is deprecated") - partition = valid_kw.get('TEST_PARTITION', 'SEQUENTIAL') - if partition not in PARTITION_MODS: - raise ValueError('partition mode should be one of {}, detected: {}'.format(PARTITION_MODS, partition)) - + partition = valid_kw.get('TEST_PARTITION', 'SEQUENTIAL') + if partition not in PARTITION_MODS: + raise ValueError('partition mode should be one of {}, detected: {}'.format(PARTITION_MODS, partition)) + if valid_kw.get('SPLIT-FACTOR'): if valid_kw.get('FORK-MODE') == 'none': errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro') @@ -292,7 +292,7 @@ def validate_test(unit, kw): nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value)) unit_path = get_norm_unit_path(unit) - if not is_fat and "ya:noretries" in tags and not is_ytexec_run \ + if not is_fat and "ya:noretries" in tags and not is_ytexec_run \ and not unit_path.startswith("devtools/") \ and not unit_path.startswith("infra/kernel/") \ and not unit_path.startswith("yt/python/yt") \ @@ -382,37 +382,37 @@ def match_coverage_extractor_requirements(unit): ]) -def get_tidy_config_map(unit): - global tidy_config_map - if tidy_config_map is None: - config_map_path = unit.resolve(os.path.join("$S", PROJECT_TIDY_CONFIG_MAP_PATH)) - with open(config_map_path, 'r') as afile: - tidy_config_map = json.load(afile) - return tidy_config_map - - -def get_default_tidy_config(unit): - unit_path = get_norm_unit_path(unit) - default_config_map_path = unit.resolve(os.path.join("$S", DEFAULT_TIDY_CONFIG_MAP_PATH)) - with open(default_config_map_path, 'r') as afile: - tidy_default_config_map = json.load(afile) - for project_prefix, config_path in tidy_default_config_map.items(): - if unit_path.startswith(project_prefix): - return config_path - return DEFAULT_TIDY_CONFIG - - -def get_project_tidy_config(unit): - tidy_map = get_tidy_config_map(unit) - unit_path = get_norm_unit_path(unit) - - for project_prefix, config_path in tidy_map.items(): - if unit_path.startswith(project_prefix): - return config_path - else: - return get_default_tidy_config(unit) - - +def get_tidy_config_map(unit): + global tidy_config_map + if tidy_config_map is None: + config_map_path = unit.resolve(os.path.join("$S", PROJECT_TIDY_CONFIG_MAP_PATH)) + with open(config_map_path, 'r') as afile: + tidy_config_map = json.load(afile) + return tidy_config_map + + +def get_default_tidy_config(unit): + unit_path = get_norm_unit_path(unit) + default_config_map_path = unit.resolve(os.path.join("$S", DEFAULT_TIDY_CONFIG_MAP_PATH)) + with open(default_config_map_path, 'r') as afile: + tidy_default_config_map = json.load(afile) + for project_prefix, config_path in tidy_default_config_map.items(): + if unit_path.startswith(project_prefix): + return config_path + return DEFAULT_TIDY_CONFIG + + +def get_project_tidy_config(unit): + tidy_map = get_tidy_config_map(unit) + unit_path = get_norm_unit_path(unit) + + for project_prefix, config_path in tidy_map.items(): + if unit_path.startswith(project_prefix): + return config_path + else: + return get_default_tidy_config(unit) + + def onadd_ytest(unit, *args): keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0} @@ -430,39 +430,39 @@ def onadd_ytest(unit, *args): # Current ymake implementation doesn't allow to call macro inside the 'when' body # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later return - elif flat_args[1] == "clang_tidy" and unit.get("TIDY") != "yes": - # Graph is not prepared - return + elif flat_args[1] == "clang_tidy" and unit.get("TIDY") != "yes": + # Graph is not prepared + return elif flat_args[1] == "no.test": return - test_size = ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '' - test_tags = serialize_list(_get_test_tags(unit, spec_args)) - test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or '' - test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE') - - if flat_args[1] != "clang_tidy" and unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"): - flat_args[1] = "clang_tidy" - test_size = 'SMALL' - test_tags = '' - test_timeout = "60" - test_requirements = [] - unit.set(["TEST_YT_SPEC_VALUE", ""]) - else: - return - - if flat_args[1] == "clang_tidy" and unit.get("TIDY") == "yes": - if unit.get("TIDY_CONFIG"): - default_config_path = unit.get("TIDY_CONFIG") - project_config_path = unit.get("TIDY_CONFIG") - else: - default_config_path = get_default_tidy_config(unit) - project_config_path = get_project_tidy_config(unit) - - unit.set(["DEFAULT_TIDY_CONFIG", default_config_path]) - unit.set(["PROJECT_TIDY_CONFIG", project_config_path]) - + test_size = ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '' + test_tags = serialize_list(_get_test_tags(unit, spec_args)) + test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or '' + test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE') + + if flat_args[1] != "clang_tidy" and unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"): + flat_args[1] = "clang_tidy" + test_size = 'SMALL' + test_tags = '' + test_timeout = "60" + test_requirements = [] + unit.set(["TEST_YT_SPEC_VALUE", ""]) + else: + return + + if flat_args[1] == "clang_tidy" and unit.get("TIDY") == "yes": + if unit.get("TIDY_CONFIG"): + default_config_path = unit.get("TIDY_CONFIG") + project_config_path = unit.get("TIDY_CONFIG") + else: + default_config_path = get_default_tidy_config(unit) + project_config_path = get_project_tidy_config(unit) + + unit.set(["DEFAULT_TIDY_CONFIG", default_config_path]) + unit.set(["PROJECT_TIDY_CONFIG", project_config_path]) + fork_mode = [] if 'FORK_SUBTESTS' in spec_args: fork_mode.append('subtests') @@ -482,18 +482,18 @@ def onadd_ytest(unit, *args): # TODO get rid of BUILD-FOLDER-PATH 'BUILD-FOLDER-PATH': unit_path, 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()), - 'GLOBAL-LIBRARY-PATH': unit.global_filename(), + 'GLOBAL-LIBRARY-PATH': unit.global_filename(), 'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')), 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")), 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), # 'TEST-PRESERVE-ENV': 'da', 'TEST-DATA': serialize_list(test_data), - 'TEST-TIMEOUT': test_timeout, + 'TEST-TIMEOUT': test_timeout, 'FORK-MODE': fork_mode, 'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '', - 'SIZE': test_size, - 'TAG': test_tags, - 'REQUIREMENTS': serialize_list(test_requirements), + 'SIZE': test_size, + 'TAG': test_tags, + 'REQUIREMENTS': serialize_list(test_requirements), 'TEST-CWD': unit.get('TEST_CWD_VALUE') or '', 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')), 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')), @@ -503,16 +503,16 @@ def onadd_ytest(unit, *args): 'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '', 'TEST_IOS_RUNTIME_TYPE': unit.get('TEST_IOS_RUNTIME_TYPE_VALUE') or '', 'ANDROID_APK_TEST_ACTIVITY': unit.get('ANDROID_APK_TEST_ACTIVITY_VALUE') or '', - 'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL', - 'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '', + 'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL', + 'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '', } - if flat_args[1] == "go.bench": - if "ya:run_go_benchmark" not in test_record["TAG"]: - return - else: - test_record["TEST-NAME"] += "_bench" - + if flat_args[1] == "go.bench": + if "ya:run_go_benchmark" not in test_record["TAG"]: + return + else: + test_record["TEST-NAME"] += "_bench" + if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes': test_record['FUZZING'] = '1' # use all cores if fuzzing requested @@ -544,9 +544,9 @@ def java_srcdirs_to_data(unit, var): def onadd_check(unit, *args): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1, "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args) check_type = flat_args[0] @@ -593,26 +593,26 @@ def onadd_check(unit, *args): script_rel_path = check_type use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') - uid_ext = '' + uid_ext = '' if check_type in ("check.data", "check.resource"): - if unit.get("VALIDATE_DATA") == "no": - return + if unit.get("VALIDATE_DATA") == "no": + return if check_type == "check.data": - uid_ext = unit.get("SBR_UID_EXT").split(" ", 1)[-1] # strip variable name - data_re = re.compile(r"sbr:/?/?(\d+)=?.*") - data = flat_args[1:] - resources = [] - for f in data: - matched = re.match(data_re, f) - if matched: - resources.append(matched.group(1)) - if resources: - test_files = serialize_list(resources) - else: - return - else: - test_files = serialize_list(flat_args[1:]) - + uid_ext = unit.get("SBR_UID_EXT").split(" ", 1)[-1] # strip variable name + data_re = re.compile(r"sbr:/?/?(\d+)=?.*") + data = flat_args[1:] + resources = [] + for f in data: + matched = re.match(data_re, f) + if matched: + resources.append(matched.group(1)) + if resources: + test_files = serialize_list(resources) + else: + return + else: + test_files = serialize_list(flat_args[1:]) + test_record = { 'TEST-NAME': check_type.lower(), 'TEST-TIMEOUT': test_timeout, @@ -621,9 +621,9 @@ def onadd_check(unit, *args): 'SOURCE-FOLDER-PATH': test_dir, 'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])), 'TEST-DATA': extra_test_data, - "SBR-UID-EXT": uid_ext, + "SBR-UID-EXT": uid_ext, 'SPLIT-FACTOR': '', - 'TEST_PARTITION': 'SEQUENTIAL', + 'TEST_PARTITION': 'SEQUENTIAL', 'FORK-MODE': fork_mode, 'FORK-TEST-FILES': '', 'SIZE': 'SMALL', @@ -652,9 +652,9 @@ def on_register_no_check_imports(unit): def onadd_check_py_imports(unit, *args): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return if unit.get('NO_CHECK_IMPORTS_FOR_VALUE').strip() == "": return unit.onpeerdir(['library/python/testing/import_test']) @@ -673,7 +673,7 @@ def onadd_check_py_imports(unit, *args): 'TEST-DATA': '', 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), 'SPLIT-FACTOR': '', - 'TEST_PARTITION': 'SEQUENTIAL', + 'TEST_PARTITION': 'SEQUENTIAL', 'FORK-MODE': '', 'FORK-TEST-FILES': '', 'SIZE': 'SMALL', @@ -696,13 +696,13 @@ def onadd_check_py_imports(unit, *args): def onadd_pytest_script(unit, *args): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return unit.set(["PYTEST_BIN", "no"]) custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE') timeout = filter(None, [unit.get(["TEST_TIMEOUT"])]) - + if timeout: timeout = timeout[0] else: @@ -725,9 +725,9 @@ def onadd_pytest_script(unit, *args): def onadd_pytest_bin(unit, *args): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return flat, kws = _common.sort_by_keywords({'RUNNER_BIN': 1}, args) if flat: ymake.report_configure_error( @@ -742,9 +742,9 @@ def onadd_pytest_bin(unit, *args): def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE') timeout = filter(None, [unit.get(["TEST_TIMEOUT"])]) if timeout: @@ -793,10 +793,10 @@ def extract_java_system_properties(unit, args): def onjava_test(unit, *args): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return - + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + assert unit.get('MODULE_TYPE') is not None if unit.get('MODULE_TYPE') == 'JTEST_FOR': @@ -884,10 +884,10 @@ def onjava_test(unit, *args): def onjava_test_deps(unit, *args): - if unit.get("TIDY") == "yes": - # graph changed for clang_tidy tests - return - + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + assert unit.get('MODULE_TYPE') is not None assert len(args) == 1 mode = args[0] @@ -902,7 +902,7 @@ def onjava_test_deps(unit, *args): 'TEST-TIMEOUT': '', 'TESTED-PROJECT-NAME': path, 'TEST-DATA': '', - 'TEST_PARTITION': 'SEQUENTIAL', + 'TEST_PARTITION': 'SEQUENTIAL', 'FORK-MODE': '', 'SPLIT-FACTOR': '', 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')), @@ -971,45 +971,45 @@ def _dump_test( use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') if test_cwd: test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip() - test_name = os.path.basename(binary_path) - test_record = { - 'TEST-NAME': os.path.splitext(test_name)[0], - 'TEST-TIMEOUT': timeout, - 'SCRIPT-REL-PATH': script_rel_path, - 'TESTED-PROJECT-NAME': test_name, - 'SOURCE-FOLDER-PATH': test_dir, - 'CUSTOM-DEPENDENCIES': " ".join(custom_deps), - 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), - # 'TEST-PRESERVE-ENV': 'da', - 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))), - 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")), - 'SPLIT-FACTOR': split_factor, - 'TEST_PARTITION': unit.get('TEST_PARTITION') or 'SEQUENTIAL', - 'FORK-MODE': fork_mode, - 'FORK-TEST-FILES': fork_test_files, - 'TEST-FILES': serialize_list(test_files), - 'SIZE': test_size, - 'TAG': serialize_list(tags), - 'REQUIREMENTS': serialize_list(requirements), - 'USE_ARCADIA_PYTHON': use_arcadia_python or '', - 'OLD_PYTEST': 'yes' if old_pytest else 'no', - 'PYTHON-PATHS': serialize_list(python_paths), - 'TEST-CWD': test_cwd or '', - 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '', - 'BUILD-FOLDER-PATH': _common.strip_roots(unit_path), - 'BLOB': unit.get('TEST_BLOB_DATA') or '', - 'CANONIZE_SUB_PATH': unit.get('CANONIZE_SUB_PATH') or '', - } + test_name = os.path.basename(binary_path) + test_record = { + 'TEST-NAME': os.path.splitext(test_name)[0], + 'TEST-TIMEOUT': timeout, + 'SCRIPT-REL-PATH': script_rel_path, + 'TESTED-PROJECT-NAME': test_name, + 'SOURCE-FOLDER-PATH': test_dir, + 'CUSTOM-DEPENDENCIES': " ".join(custom_deps), + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + # 'TEST-PRESERVE-ENV': 'da', + 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))), + 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")), + 'SPLIT-FACTOR': split_factor, + 'TEST_PARTITION': unit.get('TEST_PARTITION') or 'SEQUENTIAL', + 'FORK-MODE': fork_mode, + 'FORK-TEST-FILES': fork_test_files, + 'TEST-FILES': serialize_list(test_files), + 'SIZE': test_size, + 'TAG': serialize_list(tags), + 'REQUIREMENTS': serialize_list(requirements), + 'USE_ARCADIA_PYTHON': use_arcadia_python or '', + 'OLD_PYTEST': 'yes' if old_pytest else 'no', + 'PYTHON-PATHS': serialize_list(python_paths), + 'TEST-CWD': test_cwd or '', + 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '', + 'BUILD-FOLDER-PATH': _common.strip_roots(unit_path), + 'BLOB': unit.get('TEST_BLOB_DATA') or '', + 'CANONIZE_SUB_PATH': unit.get('CANONIZE_SUB_PATH') or '', + } if binary_path: - test_record['BINARY-PATH'] = _common.strip_roots(binary_path) - if runner_bin: - test_record['TEST-RUNNER-BIN'] = runner_bin - if yt_spec: - test_record['YT-SPEC'] = serialize_list(yt_spec) - data = dump_test(unit, test_record) - if data: - unit.set_property(["DART_DATA", data]) - save_in_file(unit.get('TEST_DART_OUT_FILE'), data) + test_record['BINARY-PATH'] = _common.strip_roots(binary_path) + if runner_bin: + test_record['TEST-RUNNER-BIN'] = runner_bin + if yt_spec: + test_record['YT-SPEC'] = serialize_list(yt_spec) + data = dump_test(unit, test_record) + if data: + unit.set_property(["DART_DATA", data]) + save_in_file(unit.get('TEST_DART_OUT_FILE'), data) def onsetup_pytest_bin(unit, *args): diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py index eb1b690ee9..80c97743a8 100644 --- a/build/scripts/clang_tidy.py +++ b/build/scripts/clang_tidy.py @@ -1,54 +1,54 @@ -import argparse +import argparse import contextlib -import json +import json import os import re import shutil -import sys +import sys import tempfile - -import subprocess - -import yaml - - -def setup_script(args): - global tidy_config_validation - sys.path.append(os.path.dirname(args.config_validation_script)) - import tidy_config_validation - - -def parse_args(): - parser = argparse.ArgumentParser() + +import subprocess + +import yaml + + +def setup_script(args): + global tidy_config_validation + sys.path.append(os.path.dirname(args.config_validation_script)) + import tidy_config_validation + + +def parse_args(): + parser = argparse.ArgumentParser() parser.add_argument("--testing-src", required=True) parser.add_argument("--clang-tidy-bin", required=True) - parser.add_argument("--config-validation-script", required=True) - parser.add_argument("--ymake-python", required=True) + parser.add_argument("--config-validation-script", required=True) + parser.add_argument("--ymake-python", required=True) parser.add_argument("--tidy-json", required=True) parser.add_argument("--source-root", required=True) - parser.add_argument("--build-root", required=True) - parser.add_argument("--default-config-file", required=True) - parser.add_argument("--project-config-file", required=True) - parser.add_argument("--export-fixes", required=True) - parser.add_argument("--checks", required=False, default="") - parser.add_argument("--header-filter", required=False, default=None) - return parser.parse_known_args() - - + parser.add_argument("--build-root", required=True) + parser.add_argument("--default-config-file", required=True) + parser.add_argument("--project-config-file", required=True) + parser.add_argument("--export-fixes", required=True) + parser.add_argument("--checks", required=False, default="") + parser.add_argument("--header-filter", required=False, default=None) + return parser.parse_known_args() + + def generate_compilation_database(clang_cmd, source_root, filename, path): compile_database = [ { "file": filename, - "command": subprocess.list2cmdline(clang_cmd), + "command": subprocess.list2cmdline(clang_cmd), "directory": source_root, } ] compilation_database_json = os.path.join(path, "compile_commands.json") with open(compilation_database_json, "w") as afile: - json.dump(compile_database, afile) - return compilation_database_json - - + json.dump(compile_database, afile) + return compilation_database_json + + @contextlib.contextmanager def gen_tmpdir(): path = tempfile.mkdtemp() @@ -56,13 +56,13 @@ def gen_tmpdir(): shutil.rmtree(path) -@contextlib.contextmanager -def gen_tmpfile(): - _, path = tempfile.mkstemp() - yield path - os.remove(path) - - +@contextlib.contextmanager +def gen_tmpfile(): + _, path = tempfile.mkstemp() + yield path + os.remove(path) + + def load_profile(path): if os.path.exists(path): files = os.listdir(path) @@ -78,53 +78,53 @@ def load_profile(path): } -def load_fixes(path): - if os.path.exists(path): - with open(path, 'r') as afile: - return afile.read() - else: - return "" - - -def is_generated(testing_src, build_root): - return testing_src.startswith(build_root) - - -def generate_outputs(output_json): - output_obj = os.path.splitext(output_json)[0] + ".o" - open(output_obj, "w").close() - open(output_json, "w").close() - - -def filter_configs(result_config, filtered_config): - with open(result_config, 'r') as afile: - input_config = yaml.safe_load(afile) - result_config = tidy_config_validation.filter_config(input_config) - with open(filtered_config, 'w') as afile: - yaml.safe_dump(result_config, afile) - - -def main(): - args, clang_cmd = parse_args() - setup_script(args) - clang_tidy_bin = args.clang_tidy_bin - output_json = args.tidy_json - generate_outputs(output_json) - if is_generated(args.testing_src, args.build_root): - return - if args.header_filter is None: - header_filter = r"^" + re.escape(os.path.dirname(args.testing_src)) + r".*" # .pb.h files will be excluded because they are not in source_root - else: - header_filter = r"^(" + args.header_filter + r").*" - - with gen_tmpdir() as profile_tmpdir, gen_tmpdir() as db_tmpdir, gen_tmpfile() as fixes_file, gen_tmpdir() as config_dir: - result_config_file = args.default_config_file - if args.project_config_file != args.default_config_file: - result_config = os.path.join(config_dir, "result_tidy_config.yaml") - filtered_config = os.path.join(config_dir, "filtered_tidy_config.yaml") - filter_configs(args.project_config_file, filtered_config) - result_config_file = tidy_config_validation.merge_tidy_configs(base_config_path=args.default_config_file, additional_config_path=filtered_config, result_config_path=result_config) - compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir) +def load_fixes(path): + if os.path.exists(path): + with open(path, 'r') as afile: + return afile.read() + else: + return "" + + +def is_generated(testing_src, build_root): + return testing_src.startswith(build_root) + + +def generate_outputs(output_json): + output_obj = os.path.splitext(output_json)[0] + ".o" + open(output_obj, "w").close() + open(output_json, "w").close() + + +def filter_configs(result_config, filtered_config): + with open(result_config, 'r') as afile: + input_config = yaml.safe_load(afile) + result_config = tidy_config_validation.filter_config(input_config) + with open(filtered_config, 'w') as afile: + yaml.safe_dump(result_config, afile) + + +def main(): + args, clang_cmd = parse_args() + setup_script(args) + clang_tidy_bin = args.clang_tidy_bin + output_json = args.tidy_json + generate_outputs(output_json) + if is_generated(args.testing_src, args.build_root): + return + if args.header_filter is None: + header_filter = r"^" + re.escape(os.path.dirname(args.testing_src)) + r".*" # .pb.h files will be excluded because they are not in source_root + else: + header_filter = r"^(" + args.header_filter + r").*" + + with gen_tmpdir() as profile_tmpdir, gen_tmpdir() as db_tmpdir, gen_tmpfile() as fixes_file, gen_tmpdir() as config_dir: + result_config_file = args.default_config_file + if args.project_config_file != args.default_config_file: + result_config = os.path.join(config_dir, "result_tidy_config.yaml") + filtered_config = os.path.join(config_dir, "filtered_tidy_config.yaml") + filter_configs(args.project_config_file, filtered_config) + result_config_file = tidy_config_validation.merge_tidy_configs(base_config_path=args.default_config_file, additional_config_path=filtered_config, result_config_path=result_config) + compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir) cmd = [ clang_tidy_bin, args.testing_src, @@ -133,38 +133,38 @@ def main(): "--warnings-as-errors", "*", "--config-file", - result_config_file, + result_config_file, "--header-filter", header_filter, "--use-color", "--enable-check-profile", - "--store-check-profile={}".format(profile_tmpdir), + "--store-check-profile={}".format(profile_tmpdir), ] - if args.export_fixes == "yes": - cmd += ["--export-fixes", fixes_file] - - if args.checks: - cmd += ["--checks", args.checks] + if args.export_fixes == "yes": + cmd += ["--export-fixes", fixes_file] + + if args.checks: + cmd += ["--checks", args.checks] res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = res.communicate() exit_code = res.returncode - profile = load_profile(profile_tmpdir) - testing_src = os.path.relpath(args.testing_src, args.source_root) - tidy_fixes = load_fixes(fixes_file) + profile = load_profile(profile_tmpdir) + testing_src = os.path.relpath(args.testing_src, args.source_root) + tidy_fixes = load_fixes(fixes_file) with open(output_json, "wb") as afile: json.dump( { - "file": testing_src, + "file": testing_src, "exit_code": exit_code, "profile": profile, "stderr": err, "stdout": out, - "fixes": tidy_fixes, + "fixes": tidy_fixes, }, afile, ) - -if __name__ == "__main__": - main() + +if __name__ == "__main__": + main() diff --git a/build/scripts/clang_tidy_arch.py b/build/scripts/clang_tidy_arch.py index 7caf623a3d..b142a8d48a 100644 --- a/build/scripts/clang_tidy_arch.py +++ b/build/scripts/clang_tidy_arch.py @@ -1,33 +1,33 @@ -import os -import argparse -import json - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument("--output-file") - parser.add_argument("--build-root") - parser.add_argument("--source-root") - return parser.parse_known_args() - - -def main(): - args, unknown_args = parse_args() - inputs = unknown_args - result_json = {} - for inp in inputs: - if os.path.exists(inp) and inp.endswith("tidyjson"): - with open(inp, 'r') as afile: - file_content = afile.read().strip() - if not file_content: - continue - errors = json.loads(file_content) - testing_src = errors["file"] - result_json[testing_src] = errors - - with open(args.output_file, 'w') as afile: - json.dump(result_json, afile, indent=4) # TODO remove indent - - -if __name__ == "__main__": - main() +import os +import argparse +import json + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--output-file") + parser.add_argument("--build-root") + parser.add_argument("--source-root") + return parser.parse_known_args() + + +def main(): + args, unknown_args = parse_args() + inputs = unknown_args + result_json = {} + for inp in inputs: + if os.path.exists(inp) and inp.endswith("tidyjson"): + with open(inp, 'r') as afile: + file_content = afile.read().strip() + if not file_content: + continue + errors = json.loads(file_content) + testing_src = errors["file"] + result_json[testing_src] = errors + + with open(args.output_file, 'w') as afile: + json.dump(result_json, afile, indent=4) # TODO remove indent + + +if __name__ == "__main__": + main() diff --git a/build/ya.conf.json b/build/ya.conf.json index 5f7cc875d6..e40510b81c 100644 --- a/build/ya.conf.json +++ b/build/ya.conf.json @@ -71,9 +71,9 @@ "pprof": { "description": "Run pprof" }, - "ytexec": { - "description": "Run ytexec" - }, + "ytexec": { + "description": "Run ytexec" + }, "gdb": { "description": "Run gdb" }, @@ -2690,22 +2690,22 @@ } ] }, - "ytexec": { - "tools": { - "ytexec": { - "bottle": "ytexec", - "executable": "ytexec" - } - }, - "platforms": [ - { - "host": { - "os": "LINUX" - }, - "default": true - } - ] - }, + "ytexec": { + "tools": { + "ytexec": { + "bottle": "ytexec", + "executable": "ytexec" + } + }, + "platforms": [ + { + "host": { + "os": "LINUX" + }, + "default": true + } + ] + }, "gdb": { "tools": { "gdb": { @@ -6756,18 +6756,18 @@ ] } }, - "ytexec": { - "formula": { + "ytexec": { + "formula": { "sandbox_id": 1101655914, - "match": "ytexec" - }, - "executable": { - "ytexec": [ - "ytexec", - "ytexec" - ] - } - }, + "match": "ytexec" + }, + "executable": { + "ytexec": [ + "ytexec", + "ytexec" + ] + } + }, "ag": { "formula": { "sandbox_id": 75851513, diff --git a/build/ya.make b/build/ya.make index 407b8f13e0..a275e71d5b 100644 --- a/build/ya.make +++ b/build/ya.make @@ -34,5 +34,5 @@ RECURSE( scripts tests tests_slow - yandex_specific/config + yandex_specific/config ) diff --git a/build/ymake.core.conf b/build/ymake.core.conf index 081833998b..27fea2c3f9 100644 --- a/build/ymake.core.conf +++ b/build/ymake.core.conf @@ -87,29 +87,29 @@ when ($HOST_ARCH_X86_64 == "yes") { FAIL_MODULE_CMD=$YMAKE_PYTHON ${input:"build/scripts/fail_module_cmd.py"} $TARGET ${kv;hide:"p ER"} ${kv;hide:"pc red"} -when ($TIDY == "yes") { - when ($TIDY_EXPORT_FIXES=="") { - TIDY_EXPORT_FIXES="no" - } - - CLANG_TIDY_ARGS=${hide:CLANG_TIDY_FAKEID} $YMAKE_PYTHON ${input:"build/scripts/clang_tidy.py"} "--ymake-python" $YMAKE_PYTHON "--clang-tidy-bin" "$(CLANG_TIDY_BIN)/bin/clang-tidy" "--config-validation-script" ${input:"build/tests/config/clang_tidy/tidy_config_validation.py"} "--testing-src" ${input:SRC} "--source-root" $(SOURCE_ROOT) "--build-root" $(BUILD_ROOT) "--tidy-json" ${output;noauto;suf=${OBJ_CROSS_SUF}${COMPILE_OUT_SUFFIX}.tidyjson:SRC} "--export-fixes" $TIDY_EXPORT_FIXES - CLANG_TIDY_ARGS+="--default-config-file" ${input:DEFAULT_TIDY_CONFIG} - CLANG_TIDY_ARGS+="--project-config-file" ${input:PROJECT_TIDY_CONFIG} - when ($TIDY_CHECKS) { - CLANG_TIDY_ARGS+="--checks=$TIDY_CHECKS" - } - - when ($TIDY_HEADER_FILTER) { - CLANG_TIDY_ARGS+="--header-filter=$TIDY_HEADER_FILTER" - } - - TIDY_VALUE=USE_CONDITIONAL_SRCS -} -otherwise { - CLANG_TIDY_ARGS= - TIDY_VALUE= -} - +when ($TIDY == "yes") { + when ($TIDY_EXPORT_FIXES=="") { + TIDY_EXPORT_FIXES="no" + } + + CLANG_TIDY_ARGS=${hide:CLANG_TIDY_FAKEID} $YMAKE_PYTHON ${input:"build/scripts/clang_tidy.py"} "--ymake-python" $YMAKE_PYTHON "--clang-tidy-bin" "$(CLANG_TIDY_BIN)/bin/clang-tidy" "--config-validation-script" ${input:"build/tests/config/clang_tidy/tidy_config_validation.py"} "--testing-src" ${input:SRC} "--source-root" $(SOURCE_ROOT) "--build-root" $(BUILD_ROOT) "--tidy-json" ${output;noauto;suf=${OBJ_CROSS_SUF}${COMPILE_OUT_SUFFIX}.tidyjson:SRC} "--export-fixes" $TIDY_EXPORT_FIXES + CLANG_TIDY_ARGS+="--default-config-file" ${input:DEFAULT_TIDY_CONFIG} + CLANG_TIDY_ARGS+="--project-config-file" ${input:PROJECT_TIDY_CONFIG} + when ($TIDY_CHECKS) { + CLANG_TIDY_ARGS+="--checks=$TIDY_CHECKS" + } + + when ($TIDY_HEADER_FILTER) { + CLANG_TIDY_ARGS+="--header-filter=$TIDY_HEADER_FILTER" + } + + TIDY_VALUE=USE_CONDITIONAL_SRCS +} +otherwise { + CLANG_TIDY_ARGS= + TIDY_VALUE= +} + # tag:codenav when ($CODENAVIGATION && $NOCODENAVIGATION != "yes") { PY_PROGRAM_LINK_EXE=$LINK_EXE ${kv;hide:"pyndex $TARGET"} @@ -1595,12 +1595,12 @@ module _LINK_UNIT: _BASE_UNIT { MODULE_TYPE=UNKNOWN -macro ADD_CLANG_TIDY() { - ADD_YTEST($MODULE_PREFIX$REALPRJNAME clang_tidy) - SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_project_map.json) - SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_default_map.json) -} - +macro ADD_CLANG_TIDY() { + ADD_YTEST($MODULE_PREFIX$REALPRJNAME clang_tidy) + SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_project_map.json) + SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_default_map.json) +} + # tag:internal ### @usage: _BASE_PROGRAM # internal ### @@ -1619,10 +1619,10 @@ module _BASE_PROGRAM: _LINK_UNIT { MODULE_SUFFIX=.exe } - when ($TIDY == "yes") { - MODULE_SUFFIX=.tidyjson - } - + when ($TIDY == "yes") { + MODULE_SUFFIX=.tidyjson + } + when ($MSVC != "yes" && $NOPLATFORM != "yes" && $WITH_VALGRIND != "yes" && $USE_ASMLIB != "no" && $MIC_ARCH != "yes" && $PIC != "yes" && $PIE != "yes") { PEERDIR+=contrib/libs/asmlib } @@ -1676,7 +1676,7 @@ module PROGRAM: _BASE_PROGRAM { .SEM=CPP_PROGRAM_SEM ADD_YTEST($MODULE_PREFIX$REALPRJNAME coverage.extractor) - ADD_CLANG_TIDY() + ADD_CLANG_TIDY() SET(MODULE_LANG CPP) } @@ -1758,12 +1758,12 @@ macro _YCR_GENERATE_CONFIGS(Package, App) { } # tag:ycr-specific -when ($TIDY=="yes") { - YCR_LINK_EXE=$LINK_EXE -} -otherwise { - YCR_LINK_EXE=$LINK_EXE && $_YCR_GENERATE_CONFIGS($MODULE_PREFIX$REALPRJNAME $MODULE_PREFIX$REALPRJNAME) -} +when ($TIDY=="yes") { + YCR_LINK_EXE=$LINK_EXE +} +otherwise { + YCR_LINK_EXE=$LINK_EXE && $_YCR_GENERATE_CONFIGS($MODULE_PREFIX$REALPRJNAME $MODULE_PREFIX$REALPRJNAME) +} # tag:ycr-specific ### @usage: YCR_PROGRAM([progname]) @@ -2026,10 +2026,10 @@ macro TEST_SRCS(Tests...) { SET_APPEND(TEST_SRCS_VALUE $Tests) } -macro DISABLE_DATA_VALIDATION() { - DISABLE(VALIDATE_DATA) -} - +macro DISABLE_DATA_VALIDATION() { + DISABLE(VALIDATE_DATA) +} + # tag:test TEST_DATA_VALUE= ### @usage: DATA([path...]) @@ -2043,7 +2043,7 @@ TEST_DATA_VALUE= ### Documentation: https://wiki.yandex-team.ru/yatool/test/#dannyeizrepozitorija macro DATA(Data...) { SET_APPEND(TEST_DATA_VALUE $Data) - ADD_CHECK(check.data $Data) + ADD_CHECK(check.data $Data) } # tag:test @@ -2313,7 +2313,7 @@ module Y_BENCHMARK: PROGRAM { ### Benchmark test based on the google benchmark. ### ### For more details see: https://a.yandex-team.ru/arc/trunk/arcadia/contrib/libs/benchmark/README.md -module G_BENCHMARK: _BASE_PROGRAM { +module G_BENCHMARK: _BASE_PROGRAM { PEERDIR(library/cpp/testing/gbenchmark_main) ADD_YTEST($MODULE_PREFIX$REALPRJNAME g_benchmark) SET(MODULE_LANG CPP) @@ -2362,7 +2362,7 @@ module _LIBRARY: _BASE_UNIT { .RESTRICTED=ALLOCATOR SIZE TAG DATA TEST_DATA DEPENDS FORK_TESTS FORK_SUBTESTS SPLIT_FACTOR TEST_CWD RUN TIMEOUT SPLIT_DWARF .FINAL_TARGET=no .GLOBAL_CMD=GLOBAL_LINK_LIB - .GLOBAL_EXTS=.o .obj .tidyjson + .GLOBAL_EXTS=.o .obj .tidyjson .ALIASES=RES_REQUIREMENTS=PY_REQUIREMENTS GLOBAL_SUFFIX=.global$MODULE_SUFFIX @@ -2380,14 +2380,14 @@ module _LIBRARY: _BASE_UNIT { MODULE_SUFFIX=.lib } otherwise { - when ($TIDY == "yes") { - MODULE_PREFIX= - MODULE_SUFFIX=.tidyjson - } - otherwise { - MODULE_PREFIX=lib - MODULE_SUFFIX=.a - } + when ($TIDY == "yes") { + MODULE_PREFIX= + MODULE_SUFFIX=.tidyjson + } + otherwise { + MODULE_PREFIX=lib + MODULE_SUFFIX=.a + } } when ($WERROR != "no") { @@ -2455,12 +2455,12 @@ module LIBRARY: _LIBRARY { } when ($HAS_CPP_PROTOBUF_PEERS == "yes") { - PEERDIR+=$CPP_PROTOBUF_PEERS + PEERDIR+=$CPP_PROTOBUF_PEERS } SET(MODULE_TYPE LIBRARY) SET(MODULE_LANG CPP) - - ADD_CLANG_TIDY() + + ADD_CLANG_TIDY() } # tag:internal @@ -2475,7 +2475,7 @@ macro _BARE_MODULE() { NO_PLATFORM() NO_RUNTIME() NO_UTIL() - NO_CLANG_TIDY() + NO_CLANG_TIDY() } # tag:internal @@ -2488,14 +2488,14 @@ macro _BARE_LINK_MODULE() { } # tag:internal -### @usage: _CONDITIONAL_SRCS([USE_CONDITIONAL_SRCS] Files...) # internal -### -### Adds Files... to SRCS if first word is `USE_CONDITIONAL_SRCS` -### To be used with some variable which is set to `USE_CONDITIONAL_SRCS` under condition -macro _CONDITIONAL_SRCS(USE_CONDITIONAL_SRCS[], DYMMY...) { - SRCS($USE_CONDITIONAL_SRCS) -} - +### @usage: _CONDITIONAL_SRCS([USE_CONDITIONAL_SRCS] Files...) # internal +### +### Adds Files... to SRCS if first word is `USE_CONDITIONAL_SRCS` +### To be used with some variable which is set to `USE_CONDITIONAL_SRCS` under condition +macro _CONDITIONAL_SRCS(USE_CONDITIONAL_SRCS[], DYMMY...) { + SRCS($USE_CONDITIONAL_SRCS) +} + # XXX: dirty hack for correct LDFLAGS passing RESOURCES_LIBRARY_LINK=$TOUCH_UNIT ${hide:LDFLAGS_GLOBAL} ${hide:LDFLAGS} RESOURCES_LIBRARY_SEM=ignored @@ -2603,8 +2603,8 @@ module DLL_UNIT: _LINK_UNIT { SET(MODULE_TYPE DLL) SET(MODULE_TAG DLL) SET(MODULE_LANG CPP) - ADD_CLANG_TIDY() - + ADD_CLANG_TIDY() + when ($MSVC == "yes" || $CYGWIN == "yes") { MODULE_SUFFIX=.dll } @@ -2612,10 +2612,10 @@ module DLL_UNIT: _LINK_UNIT { MODULE_PREFIX=lib MODULE_SUFFIX=.dylib } - elsewhen ($TIDY == "yes") { - MODULE_PREFIX= - MODULE_SUFFIX=.tidyjson - } + elsewhen ($TIDY == "yes") { + MODULE_PREFIX= + MODULE_SUFFIX=.tidyjson + } otherwise { MODULE_PREFIX=lib MODULE_SUFFIX=.so @@ -3039,7 +3039,7 @@ module CI_GROUP: _BARE_UNIT { .ALL_INS_TO_OUT=yes .FINAL_TARGET=yes .RESTRICTED=SRCS - MODULE_SUFFIX=.ci.pkg.fake + MODULE_SUFFIX=.ci.pkg.fake PEERDIR_TAGS=CPP_PROTO PY3 PY3_NATIVE YQL_UDF_SHARED __EMPTY__ DOCBOOK JAR_RUNNABLE DLL } @@ -5490,19 +5490,19 @@ macro TIMEOUT(Time) { SET(TEST_TIMEOUT $Time) } -SBR_UID_EXT="" -# tag:test -### @usage: VALIDATE_DATA_RESTART(ext) -### -### Change uid for resource validation tests. May be useful when sandbox resource ttl is changed, but test status is cached in CI. -### You can change ext to change test's uid. For example VALIDATE_DATA_RESTART(X), where is X is current revision. -macro VALIDATE_DATA_RESTART(Ext) { - SET(SBR_UID_EXT $Ext) -} - +SBR_UID_EXT="" # tag:test +### @usage: VALIDATE_DATA_RESTART(ext) +### +### Change uid for resource validation tests. May be useful when sandbox resource ttl is changed, but test status is cached in CI. +### You can change ext to change test's uid. For example VALIDATE_DATA_RESTART(X), where is X is current revision. +macro VALIDATE_DATA_RESTART(Ext) { + SET(SBR_UID_EXT $Ext) +} + +# tag:test TEST_FORK_MODE=none -TEST_PARTITION=SEQUENTIAL +TEST_PARTITION=SEQUENTIAL # tag:test ### @usage: FORK_TESTS() @@ -5513,9 +5513,9 @@ TEST_PARTITION=SEQUENTIAL ### Allows to run tests in parallel. Supported in UNITTEST, JTEST/JUNIT5 and PY2TEST/PY3TEST modules. ### ### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/ -macro FORK_TESTS(MODE...) { +macro FORK_TESTS(MODE...) { SET(TEST_FORK_MODE tests) - SET(TEST_PARTITION $MODE) + SET(TEST_PARTITION $MODE) } # tag:test @@ -5527,9 +5527,9 @@ macro FORK_TESTS(MODE...) { ### Allows to run tests in parallel. Supported in UNITTEST, JTEST/JUNIT5 and PY2TEST/PY3TEST modules. ### ### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/ -macro FORK_SUBTESTS(MODE...) { +macro FORK_SUBTESTS(MODE...) { SET(TEST_FORK_MODE subtests) - SET(TEST_PARTITION $MODE) + SET(TEST_PARTITION $MODE) } # tag:test @@ -5577,7 +5577,7 @@ macro SIZE(Type) { macro JOIN_SRCS(Out, Src...) { .CMD=$YMAKE_PYTHON ${input:"build/scripts/gen_join_srcs.py"} ${output:Out} --ya-start-command-file ${input;rootrel:Src} --ya-end-command-file ${output_include;hide:Src} ${kv;hide:"p JS"} ${kv;hide:"pc magenta"} .SEM=target_joined_source $Out ${input:Src} ${output;hide;suf=.o:Out} && modules_required yandex_common.cmake - _CONDITIONAL_SRCS($TIDY_VALUE $Src) + _CONDITIONAL_SRCS($TIDY_VALUE $Src) } ### @usage: JOIN_SRCS_GLOBAL(Out Src...) @@ -6750,10 +6750,10 @@ macro NO_CLANG_COVERAGE() { DISABLE(CLANG_COVERAGE) } -macro NO_CLANG_TIDY() { - DISABLE(TIDY) -} - +macro NO_CLANG_TIDY() { + DISABLE(TIDY) +} + # tag:python-specific tag:coverage ### @usage: NO_PYTHON_COVERAGE() ### @@ -6984,7 +6984,7 @@ macro SYMLINK(From, To) { ### - OUT[_NOAUTO] outputs... - Output files. NOAUTO outputs are not automatically added to the build process. ### - STDOUT[_NOAUTO] output - Redirect the standard output to the output file. ### - OUTPUT_INCLUDES output_includes... - Includes of the output files that are needed to build them. -### - REQUIREMENTS - Override default requirements for CPU and RAM +### - REQUIREMENTS - Override default requirements for CPU and RAM ### ### For absolute paths use ${ARCADIA_ROOT} and ${ARCADIA_BUILD_ROOT}, or ### ${CURDIR} and ${BINDIR} which are expanded where the outputs are used. @@ -7009,7 +7009,7 @@ macro RUN_PROGRAM(Tool, IN{input}[], OUT{output}[], OUT_NOAUTO{output}[], TOOL{t ### - OUT[_NOAUTO] outputs... - Output files. NOAUTO outputs are not automatically added to the build process. ### - STDOUT[_NOAUTO] output - Redirect the standard output to the output file. ### - OUTPUT_INCLUDES output_includes... - Includes of the output files that are needed to build them. -### - REQUIREMENTS - Override default requirements for CPU and RAM +### - REQUIREMENTS - Override default requirements for CPU and RAM ### ### For absolute paths use ${ARCADIA_ROOT} and ${ARCADIA_BUILD_ROOT}, or ### ${CURDIR} and ${BINDIR} which are expanded where the outputs are used. @@ -7033,7 +7033,7 @@ macro LUA(ScriptPath, IN{input}[], OUT{output}[], OUT_NOAUTO{output}[], TOOL{too ### - OUT[_NOAUTO] outputs... - Output files. NOAUTO outputs are not automatically added to the build process. ### - STDOUT[_NOAUTO] output - Redirect the standard output to the output file. ### - OUTPUT_INCLUDES output_includes... - Includes of the output files that are needed to build them. -### - REQUIREMENTS - Override default requirements for CPU and RAM +### - REQUIREMENTS - Override default requirements for CPU and RAM ### ### For absolute paths use ${ARCADIA_ROOT} and ${ARCADIA_BUILD_ROOT}, or ### ${CURDIR} and ${BINDIR} which are expanded where the outputs are used. @@ -7482,7 +7482,7 @@ multimodule PROTO_LIBRARY { .SEM=PROTO_LIBRARY_SEM ENABLE(CPP_PROTO) ENABLE(GEN_PROTO) - NO_CLANG_TIDY() + NO_CLANG_TIDY() SET(PEERDIR_TAGS CPP_PROTO) when ($BUILD_PROTO_AS_EVLOG == "yes" && $USE_VANILLA_PROTOC == "yes") { @@ -8630,15 +8630,15 @@ module GO_DLL: _GO_DLL_BASE_UNIT { } # tag:go-specific tag:test -### @usage: GO_BENCH_TIMEOUT(x) -### -### Sets timeout in seconds for 1 Benchmark in go benchmark suite -### -### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/ -macro GO_BENCH_TIMEOUT(bench_timeout) { - SET(GO_BENCH_TIMEOUT $bench_timeout) -} - +### @usage: GO_BENCH_TIMEOUT(x) +### +### Sets timeout in seconds for 1 Benchmark in go benchmark suite +### +### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/ +macro GO_BENCH_TIMEOUT(bench_timeout) { + SET(GO_BENCH_TIMEOUT $bench_timeout) +} + # tag:go-specific tag:test ### @usage: GO_TEST([name]) ### diff --git a/build/ymake_conf.py b/build/ymake_conf.py index 30219eb85e..fb5a639e2d 100755 --- a/build/ymake_conf.py +++ b/build/ymake_conf.py @@ -1666,7 +1666,7 @@ class GnuCompiler(Compiler): style = ['${requirements;hide:CC_REQUIREMENTS} ${hide;kv:"p CC"} ${hide;kv:"pc green"}'] cxx_args = [ - '$CLANG_TIDY_ARGS', + '$CLANG_TIDY_ARGS', '$YNDEXER_ARGS', '$CXX_COMPILER', '$C_FLAGS_PLATFORM', @@ -1680,12 +1680,12 @@ class GnuCompiler(Compiler): '$_LANG_CFLAGS_VALUE', '${input:SRC}', '$TOOLCHAIN_ENV', - '$YNDEXER_OUTPUT', + '$YNDEXER_OUTPUT', '&& $COMPILER_TIME_TRACE_POSTPROCESS', ] + style c_args = [ - '$CLANG_TIDY_ARGS', + '$CLANG_TIDY_ARGS', '$YNDEXER_ARGS', '$C_COMPILER', '$C_FLAGS_PLATFORM', @@ -1699,7 +1699,7 @@ class GnuCompiler(Compiler): '$SRCFLAGS', '${input:SRC}', '$TOOLCHAIN_ENV', - '$YNDEXER_OUTPUT', + '$YNDEXER_OUTPUT', '&& $COMPILER_TIME_TRACE_POSTPROCESS', ] + style @@ -2054,25 +2054,25 @@ class LD(Linker): ld_env_style = '${cwd:ARCADIA_BUILD_ROOT} $TOOLCHAIN_ENV ${kv;hide:"p LD"} ${requirements;hide:LD_REQUIREMENTS} ${kv;hide:"pc light-blue"} ${kv;hide:"show_out"}' # Program - emit( + emit( "GENERATE_MF_CMD", - '$YMAKE_PYTHON', '${input:"build/scripts/generate_mf.py"}', + '$YMAKE_PYTHON', '${input:"build/scripts/generate_mf.py"}', '--build-root $ARCADIA_BUILD_ROOT --module-name $REALPRJNAME -o ${output;pre=$MODULE_PREFIX;suf=$MODULE_SUFFIX.mf:REALPRJNAME}', '-t $MODULE_TYPE --ya-start-command-file -Ya,lics $LICENSE_NAMES -Ya,peers ${rootrel:PEERS} -Ya,credits ${input:CREDITS_TEXTS_FILE} $CREDITS_FLAGS --ya-end-command-file', - ) - if is_positive("TIDY"): - emit( - 'REAL_LINK_EXE', - '$YMAKE_PYTHON ${input:"build/scripts/clang_tidy_arch.py"}', - '--build-root $ARCADIA_BUILD_ROOT', - '--source-root $ARCADIA_ROOT', - '--output-file', - '$TARGET', - '$AUTO_INPUT', - ld_env_style - ) - else: - emit('LINK_SCRIPT_EXE_FLAGS') + ) + if is_positive("TIDY"): + emit( + 'REAL_LINK_EXE', + '$YMAKE_PYTHON ${input:"build/scripts/clang_tidy_arch.py"}', + '--build-root $ARCADIA_BUILD_ROOT', + '--source-root $ARCADIA_ROOT', + '--output-file', + '$TARGET', + '$AUTO_INPUT', + ld_env_style + ) + else: + emit('LINK_SCRIPT_EXE_FLAGS') emit('REAL_LINK_EXE_CMDLINE', '$YMAKE_PYTHON ${input:"build/scripts/link_exe.py"}', '--source-root $ARCADIA_ROOT', @@ -2133,25 +2133,25 @@ class LD(Linker): emit('DWARF_COMMAND') else: emit('DWARF_COMMAND', self.dwarf_command, ld_env_style) - if is_positive("TIDY"): - emit('LINK_EXE', '$REAL_LINK_EXE') - else: - emit('LINK_EXE', '$GENERATE_MF && $GENERATE_VCS_C_INFO_NODEP && $REAL_LINK_EXE && $DWARF_COMMAND && $LINK_ADDITIONAL_SECTIONS_COMMAND && $PACK_IOS_CMD') - if is_positive("TIDY"): - emit('LINK_DYN_LIB', "$REAL_LINK_EXE") - else: - emit('LINK_DYN_LIB', '$GENERATE_MF && $GENERATE_VCS_C_INFO_NODEP && $REAL_LINK_DYN_LIB && $DWARF_COMMAND && $LINK_ADDITIONAL_SECTIONS_COMMAND') + if is_positive("TIDY"): + emit('LINK_EXE', '$REAL_LINK_EXE') + else: + emit('LINK_EXE', '$GENERATE_MF && $GENERATE_VCS_C_INFO_NODEP && $REAL_LINK_EXE && $DWARF_COMMAND && $LINK_ADDITIONAL_SECTIONS_COMMAND && $PACK_IOS_CMD') + if is_positive("TIDY"): + emit('LINK_DYN_LIB', "$REAL_LINK_EXE") + else: + emit('LINK_DYN_LIB', '$GENERATE_MF && $GENERATE_VCS_C_INFO_NODEP && $REAL_LINK_DYN_LIB && $DWARF_COMMAND && $LINK_ADDITIONAL_SECTIONS_COMMAND') emit('LINK_EXEC_DYN_LIB', '$GENERATE_MF && $GENERATE_VCS_C_INFO_NODEP && $REAL_LINK_EXEC_DYN_LIB && $DWARF_COMMAND && $LINK_ADDITIONAL_SECTIONS_COMMAND') emit('SWIG_DLL_JAR_CMD', '$GENERATE_MF && $GENERATE_VCS_C_INFO_NODEP && $REAL_SWIG_DLL_JAR_CMD && $DWARF_COMMAND') tail_link_lib = '$AUTO_INPUT ${requirements;hide:LIB_REQUIREMENTS} ${kv;hide:"p AR"} $TOOLCHAIN_ENV ${kv;hide:"pc light-red"} ${kv;hide:"show_out"}' - if is_positive("TIDY"): - archiver = '$YMAKE_PYTHON ${input:"build/scripts/clang_tidy_arch.py"} --source-root $ARCADIA_ROOT --build-root $ARCADIA_BUILD_ROOT --output-file' - emit('LINK_LIB', archiver, "$TARGET", tail_link_lib) - else: - archiver = '$YMAKE_PYTHON ${input:"build/scripts/link_lib.py"} ${quo:AR_TOOL} $AR_TYPE %s $ARCADIA_BUILD_ROOT %s' % (self.llvm_ar_format, self.ar_plugin or 'None') - # Static Library - emit('LINK_LIB', '$GENERATE_MF &&', archiver, '$TARGET', tail_link_lib) + if is_positive("TIDY"): + archiver = '$YMAKE_PYTHON ${input:"build/scripts/clang_tidy_arch.py"} --source-root $ARCADIA_ROOT --build-root $ARCADIA_BUILD_ROOT --output-file' + emit('LINK_LIB', archiver, "$TARGET", tail_link_lib) + else: + archiver = '$YMAKE_PYTHON ${input:"build/scripts/link_lib.py"} ${quo:AR_TOOL} $AR_TYPE %s $ARCADIA_BUILD_ROOT %s' % (self.llvm_ar_format, self.ar_plugin or 'None') + # Static Library + emit('LINK_LIB', '$GENERATE_MF &&', archiver, '$TARGET', tail_link_lib) emit('GLOBAL_LINK_LIB', archiver, '$GLOBAL_TARGET', tail_link_lib) # "Fat Object" : pre-linked global objects and static library with all dependencies |