aboutsummaryrefslogtreecommitdiffstats
path: root/build/plugins
diff options
context:
space:
mode:
authoralevitskii <alevitskii@yandex-team.com>2024-07-29 09:19:59 +0300
committeralevitskii <alevitskii@yandex-team.com>2024-07-29 09:29:36 +0300
commit283d3cef351feb73e36f38c5ffe9b3263afef4a9 (patch)
treef3807110d0b9943d17df0b487ad1e33d1ccd5e06 /build/plugins
parent894193a843e1543e9cc4ff3516093cb1a30fcb31 (diff)
downloadydb-283d3cef351feb73e36f38c5ffe9b3263afef4a9.tar.gz
Better naming in dart fields
Better naming aa878a81e18decf5d412f705b04e15d47e3cf6ff
Diffstat (limited to 'build/plugins')
-rw-r--r--build/plugins/_dart_fields.py144
-rw-r--r--build/plugins/nots.py72
-rw-r--r--build/plugins/ytest.py202
3 files changed, 205 insertions, 213 deletions
diff --git a/build/plugins/_dart_fields.py b/build/plugins/_dart_fields.py
index 563c1a12c6..866821585c 100644
--- a/build/plugins/_dart_fields.py
+++ b/build/plugins/_dart_fields.py
@@ -276,28 +276,22 @@ class BinaryPath:
KEY = 'BINARY-PATH'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def normalized(cls, unit, flat_args, spec_args):
unit_path = _common.get_norm_unit_path(unit)
- return {cls.KEY: "{}/{}".format(unit_path, unit.filename())}
+ return {cls.KEY: os.path.join(unit_path, unit.filename())}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def stripped(cls, unit, flat_args, spec_args):
unit_path = unit.path()
binary_path = os.path.join(unit_path, unit.filename())
if binary_path:
return {cls.KEY: _common.strip_roots(binary_path)}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def stripped_without_pkg_ext(cls, unit, flat_args, spec_args):
value = _common.strip_roots(os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
return {cls.KEY: value}
- # TODO replace with `value`
- @classmethod
- def value4(cls, unit, flat_args, spec_args):
- test_dir = _common.get_norm_unit_path(unit)
- return {cls.KEY: os.path.join(test_dir, unit.filename())}
-
class Blob:
KEY = 'BLOB'
@@ -311,11 +305,11 @@ class BuildFolderPath:
KEY = 'BUILD-FOLDER-PATH'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def normalized(cls, unit, flat_args, spec_args):
return {cls.KEY: _common.get_norm_unit_path(unit)}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def stripped(cls, unit, flat_args, spec_args):
return {cls.KEY: _common.strip_roots(unit.path())}
@@ -351,21 +345,21 @@ class CustomDependencies:
KEY = 'CUSTOM-DEPENDENCIES'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def all_standard(cls, unit, flat_args, spec_args):
custom_deps = ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE'))
return {cls.KEY: custom_deps}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def depends_only(cls, unit, flat_args, spec_args):
return {cls.KEY: " ".join(spec_args.get('DEPENDS', []))}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def test_depends_only(cls, unit, flat_args, spec_args):
custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
return {cls.KEY: " ".join(custom_deps)}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
+ def depends_with_linter(cls, unit, flat_args, spec_args):
deps = []
_, linter = flat_args
deps.append(os.path.dirname(linter))
@@ -373,7 +367,7 @@ class CustomDependencies:
return {cls.KEY: " ".join(deps)}
@classmethod
- def value5(cls, unit, flat_args, spec_args):
+ def nots_with_recipies(cls, unit, flat_args, spec_args):
deps = flat_args[0]
recipes_lines = format_recipes(unit.get("TEST_RECIPES_VALUE")).strip().splitlines()
if recipes_lines:
@@ -396,7 +390,7 @@ class ForkMode:
KEY = 'FORK-MODE'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_and_unit(cls, unit, flat_args, spec_args):
fork_mode = []
if 'FORK_SUBTESTS' in spec_args:
fork_mode.append('subtests')
@@ -407,7 +401,7 @@ class ForkMode:
return {cls.KEY: fork_mode}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def test_fork_mode(cls, unit, flat_args, spec_args):
return {cls.KEY: unit.get('TEST_FORK_MODE')}
@@ -593,12 +587,12 @@ class Requirements:
KEY = 'REQUIREMENTS'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
return {cls.KEY: serialize_list(test_requirements)}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def with_maybe_fuzzing(cls, unit, flat_args, spec_args):
test_requirements = serialize_list(
spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
)
@@ -609,17 +603,17 @@ class Requirements:
return {cls.KEY: test_requirements}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def from_macro_args(cls, unit, flat_args, spec_args):
value = " ".join(spec_args.get('REQUIREMENTS', []))
return {cls.KEY: value}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
return {cls.KEY: serialize_list(requirements)}
@classmethod
- def value5(cls, unit, flat_args, spec_args):
+ def from_unit_with_full_network(cls, unit, flat_args, spec_args):
requirements = sorted(set(["network:full"] + get_values_list(unit, "TEST_REQUIREMENTS_VALUE")))
return {cls.KEY: serialize_list(requirements)}
@@ -637,19 +631,19 @@ class ScriptRelPath:
KEY = 'SCRIPT-REL-PATH'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def second_flat(cls, unit, flat_args, spec_args):
return {cls.KEY: flat_args[1]}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def first_flat(cls, unit, flat_args, spec_args):
return {cls.KEY: flat_args[0]}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def pytest(cls, unit, flat_args, spec_args):
return {cls.KEY: 'py3test.bin' if (unit.get("PYTHON3") == 'yes') else "pytest.bin"}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
+ def junit(cls, unit, flat_args, spec_args):
return {cls.KEY: 'junit5.test' if unit.get('MODULE_TYPE') == 'JUNIT5' else 'junit.test'}
@@ -657,11 +651,11 @@ class Size:
KEY = 'SIZE'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
return {cls.KEY: ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME')}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
return {cls.KEY: unit.get('TEST_SIZE_NAME')}
@@ -677,11 +671,11 @@ class SourceFolderPath:
KEY = 'SOURCE-FOLDER-PATH'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def normalized(cls, unit, flat_args, spec_args):
return {cls.KEY: _common.get_norm_unit_path(unit)}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def test_dir(cls, unit, flat_args, spec_args):
test_dir = _common.get_norm_unit_path(unit)
test_files = flat_args[1:]
if test_files:
@@ -693,12 +687,12 @@ class SplitFactor:
KEY = 'SPLIT-FACTOR'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
value = ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR')
return {cls.KEY: value}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
return {cls.KEY: unit.get('TEST_SPLIT_FACTOR')}
@@ -706,17 +700,17 @@ class Tag:
KEY = 'TAG'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
tags = serialize_list(sorted(_get_test_tags(unit, spec_args)))
return {cls.KEY: tags}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
tags = serialize_list(get_values_list(unit, "TEST_TAGS_VALUE"))
return {cls.KEY: tags}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def from_unit_fat_external_no_retries(cls, unit, flat_args, spec_args):
tags = sorted(set(["ya:fat", "ya:external", "ya:noretries"] + get_values_list(unit, "TEST_TAGS_VALUE")))
return {cls.KEY: serialize_list(tags)}
@@ -761,19 +755,19 @@ class TestCwd:
KEY = 'TEST-CWD'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
test_cwd = unit.get('TEST_CWD_VALUE') # TODO: validate test_cwd value
return {cls.KEY: test_cwd}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def keywords_replaced(cls, unit, flat_args, spec_args):
test_cwd = unit.get('TEST_CWD_VALUE') or ''
if test_cwd:
test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip()
return {cls.KEY: test_cwd}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def moddir(cls, unit, flat_args, spec_args):
return {cls.KEY: unit.get("MODDIR")}
@@ -781,7 +775,7 @@ class TestData:
KEY = 'TEST-DATA'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
test_data = sorted(
_common.filter_out_by_keyword(
spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'
@@ -790,7 +784,7 @@ class TestData:
return {cls.KEY: serialize_list(test_data)}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit_with_canonical(cls, unit, flat_args, spec_args):
test_data = sorted(
_common.filter_out_by_keyword(
spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'
@@ -803,7 +797,7 @@ class TestData:
return {cls.KEY: value}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def ktlint(cls, unit, flat_args, spec_args):
if unit.get('_USE_KTLINT_OLD') == 'yes':
extra_test_data = serialize_list([KTLINT_OLD_EDITOR_CONFIG])
else:
@@ -816,13 +810,13 @@ class TestData:
return {cls.KEY: extra_test_data}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
+ def java_style(cls, unit, flat_args, spec_args):
ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
if ymake_java_test:
return {cls.KEY: java_srcdirs_to_data(unit, 'ALL_SRCDIRS')}
@classmethod
- def value5(cls, unit, flat_args, spec_args):
+ def from_unit_with_canonical(cls, unit, flat_args, spec_args):
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
data, _ = get_canonical_test_resources(unit)
test_data += data
@@ -830,7 +824,7 @@ class TestData:
return {cls.KEY: value}
@classmethod
- def value6(cls, unit, flat_args, spec_args):
+ def java_test(cls, unit, flat_args, spec_args):
test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
test_data.append('arcadia/build/scripts/run_junit.py')
test_data.append('arcadia/build/scripts/unpacking_jtest_runner.py')
@@ -850,7 +844,7 @@ class TestData:
return {cls.KEY: value}
@classmethod
- def value7(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
return {cls.KEY: serialize_list(get_values_list(unit, "TEST_DATA_VALUE"))}
@@ -911,16 +905,16 @@ class TestedProjectName:
KEY = 'TESTED-PROJECT-NAME'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def unit_name(cls, unit, flat_args, spec_args):
return {cls.KEY: unit.name()}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def normalized_basename(cls, unit, flat_args, spec_args):
test_dir = _common.get_norm_unit_path(unit)
return {cls.KEY: os.path.basename(test_dir)}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def test_dir(cls, unit, flat_args, spec_args):
test_dir = _common.get_norm_unit_path(unit)
test_files = flat_args[1:]
if test_files:
@@ -928,21 +922,21 @@ class TestedProjectName:
return {cls.KEY: os.path.basename(test_dir)}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
+ def path_filename_basename(cls, unit, flat_args, spec_args):
binary_path = os.path.join(unit.path(), unit.filename())
return {cls.KEY: os.path.basename(binary_path)}
@classmethod
- def value5(cls, unit, flat_args, spec_args):
+ def normalized(cls, unit, flat_args, spec_args):
return {cls.KEY: _common.get_norm_unit_path(unit)}
@classmethod
- def value6(cls, unit, flat_args, spec_args):
+ def path_filename_basename_without_pkg_ext(cls, unit, flat_args, spec_args):
value = os.path.basename(os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
return {cls.KEY: value}
@classmethod
- def value7(cls, unit, flat_args, spec_args):
+ def filename_without_ext(cls, unit, flat_args, spec_args):
return {cls.KEY: os.path.splitext(unit.filename())[0]}
@@ -964,12 +958,12 @@ class TestFiles:
return {cls.KEY: value, cls.KEY2: value}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def flat_args_wo_first(cls, unit, flat_args, spec_args):
value = serialize_list(flat_args[1:])
return {cls.KEY: value, cls.KEY2: value}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def java_style(cls, unit, flat_args, spec_args):
test_files = flat_args[1:]
check_level = flat_args[1]
allowed_levels = {
@@ -985,29 +979,29 @@ class TestFiles:
return {cls.KEY: value, cls.KEY2: value}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
+ def normalized(cls, unit, flat_args, spec_args):
value = serialize_list([_common.get_norm_unit_path(unit, unit.filename())])
return {cls.KEY: value, cls.KEY2: value}
@classmethod
- def value5(cls, unit, flat_args, spec_args):
+ def test_srcs(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
return {cls.KEY: serialize_list(test_files)}
@classmethod
- def value6(cls, unit, flat_args, spec_args):
+ def ts_test_srcs(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, "_TS_TEST_SRCS_VALUE")
test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files)
return {cls.KEY: serialize_list(test_files)}
@classmethod
- def value7(cls, unit, flat_args, spec_args):
+ def ts_input_files(cls, unit, flat_args, spec_args):
typecheck_files = get_values_list(unit, "TS_INPUT_FILES")
test_files = [_common.resolve_common_const(f) for f in typecheck_files]
return {cls.KEY: serialize_list(test_files)}
@classmethod
- def value8(cls, unit, flat_args, spec_args):
+ def ts_lint_srcs(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, "_TS_LINT_SRCS_VALUE")
test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files)
return {cls.KEY: serialize_list(test_files)}
@@ -1067,34 +1061,32 @@ class TestName:
return {cls.KEY: flat_args[0]}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def first_flat_with_bench(cls, unit, flat_args, spec_args):
return {cls.KEY: flat_args[0] + '_bench'}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def first_flat(cls, unit, flat_args, spec_args):
return {cls.KEY: flat_args[0].lower()}
@classmethod
- def value4(cls, unit, flat_args, spec_args):
- unit_path = unit.path()
- binary_path = os.path.join(unit_path, unit.filename())
- test_name = os.path.basename(binary_path)
+ def filename_without_ext(cls, unit, flat_args, spec_args):
+ test_name = os.path.basename(os.path.join(unit.path(), unit.filename()))
return {cls.KEY: os.path.splitext(test_name)[0]}
@classmethod
- def value5(cls, unit, flat_args, spec_args):
+ def normalized_joined_dir_basename(cls, unit, flat_args, spec_args):
path = _common.get_norm_unit_path(unit)
value = '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)])
return {cls.KEY: value}
@classmethod
- def value6(cls, unit, flat_args, spec_args):
+ def normalized_joined_dir_basename_deps(cls, unit, flat_args, spec_args):
path = _common.get_norm_unit_path(unit)
value = '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path), 'dependencies']).strip('-')
return {cls.KEY: value}
@classmethod
- def value7(cls, unit, flat_args, spec_args):
+ def filename_without_pkg_ext(cls, unit, flat_args, spec_args):
test_name = os.path.basename(os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
return {cls.KEY: os.path.splitext(test_name)[0]}
@@ -1129,12 +1121,12 @@ class TestTimeout:
KEY = 'TEST-TIMEOUT'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or ''
return {cls.KEY: test_timeout}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def from_unit_with_default(cls, unit, flat_args, spec_args):
timeout = list(filter(None, [unit.get(["TEST_TIMEOUT"])]))
if timeout:
timeout = timeout[0]
@@ -1143,7 +1135,7 @@ class TestTimeout:
return {cls.KEY: timeout}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
return {cls.KEY: unit.get('TEST_TIMEOUT')}
@@ -1219,17 +1211,17 @@ class YtSpec:
KEY = 'YT-SPEC'
@classmethod
- def value(cls, unit, flat_args, spec_args):
+ def from_macro_args_and_unit(cls, unit, flat_args, spec_args):
value = serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE'))
return {cls.KEY: value}
@classmethod
- def value2(cls, unit, flat_args, spec_args):
+ def from_unit(cls, unit, flat_args, spec_args):
yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
if yt_spec:
return {cls.KEY: serialize_list(yt_spec)}
@classmethod
- def value3(cls, unit, flat_args, spec_args):
+ def from_unit_list_var(cls, unit, flat_args, spec_args):
yt_spec_values = get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')
return {cls.KEY: serialize_list(yt_spec_values)}
diff --git a/build/plugins/nots.py b/build/plugins/nots.py
index 1e3f71e357..329b1c111d 100644
--- a/build/plugins/nots.py
+++ b/build/plugins/nots.py
@@ -31,26 +31,26 @@ class TsTestType(StrEnum):
TS_TEST_FIELDS_BASE = (
- df.BinaryPath.value4,
- df.BuildFolderPath.value,
- df.ForkMode.value2,
+ df.BinaryPath.normalized,
+ df.BuildFolderPath.normalized,
+ df.ForkMode.test_fork_mode,
df.NodejsRootVarName.value,
- df.ScriptRelPath.value2,
- df.SourceFolderPath.value,
- df.SplitFactor.value2,
- df.TestData.value7,
- df.TestedProjectName.value7,
+ df.ScriptRelPath.first_flat,
+ df.SourceFolderPath.normalized,
+ df.SplitFactor.from_unit,
+ df.TestData.from_unit,
+ df.TestedProjectName.filename_without_ext,
df.TestEnv.value,
df.TestName.value,
df.TestRecipes.value,
- df.TestTimeout.value3,
+ df.TestTimeout.from_unit,
)
TS_TEST_SPECIFIC_FIELDS = {
TsTestType.JEST: (
- df.Size.value2,
- df.Tag.value2,
- df.Requirements.value4,
+ df.Size.from_unit,
+ df.Tag.from_unit,
+ df.Requirements.from_unit,
df.ConfigPath.value,
df.TsTestDataDirs.value,
df.TsTestDataDirsRename.value,
@@ -58,8 +58,8 @@ TS_TEST_SPECIFIC_FIELDS = {
df.TsTestForPath.value,
),
TsTestType.HERMIONE: (
- df.Tag.value3,
- df.Requirements.value5,
+ df.Tag.from_unit_fat_external_no_retries,
+ df.Requirements.from_unit_with_full_network,
df.ConfigPath.value,
df.TsTestDataDirs.value,
df.TsTestDataDirsRename.value,
@@ -67,9 +67,9 @@ TS_TEST_SPECIFIC_FIELDS = {
df.TsTestForPath.value,
),
TsTestType.PLAYWRIGHT: (
- df.Size.value2,
- df.Tag.value2,
- df.Requirements.value4,
+ df.Size.from_unit,
+ df.Tag.from_unit,
+ df.Requirements.from_unit,
df.ConfigPath.value,
df.TsTestDataDirs.value,
df.TsTestDataDirsRename.value,
@@ -77,17 +77,17 @@ TS_TEST_SPECIFIC_FIELDS = {
df.TsTestForPath.value,
),
TsTestType.ESLINT: (
- df.Size.value2,
- df.TestCwd.value3,
- df.Tag.value2,
- df.Requirements.value4,
+ df.Size.from_unit,
+ df.TestCwd.moddir,
+ df.Tag.from_unit,
+ df.Requirements.from_unit,
df.EslintConfigPath.value,
),
TsTestType.TSC_TYPECHECK: (
- df.Size.value2,
- df.TestCwd.value3,
- df.Tag.value2,
- df.Requirements.value4,
+ df.Size.from_unit,
+ df.TestCwd.moddir,
+ df.Tag.from_unit,
+ df.Requirements.from_unit,
),
TsTestType.TS_STYLELINT: (
df.TsStylelintConfig.value,
@@ -451,7 +451,7 @@ def _setup_eslint(unit):
if unit.get("_NO_LINT_VALUE") == "none":
return
- test_files = df.TestFiles.value8(unit, (), {})[df.TestFiles.KEY]
+ test_files = df.TestFiles.ts_lint_srcs(unit, (), {})[df.TestFiles.KEY]
if not test_files:
return
@@ -464,7 +464,7 @@ def _setup_eslint(unit):
from lib.nots.package_manager import constants
peers = _create_pm(unit).get_peers_from_package_json()
- deps = df.CustomDependencies.value5(unit, (peers,), {})[df.CustomDependencies.KEY].split()
+ deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
joined_deps = "\n".join(deps)
@@ -482,7 +482,7 @@ def _setup_eslint(unit):
dart_record[df.TestFiles.KEY] = test_files
dart_record[df.NodeModulesBundleFilename.KEY] = constants.NODE_MODULES_WORKSPACE_BUNDLE_FILENAME
- extra_deps = df.CustomDependencies.value3(unit, (), {})[df.CustomDependencies.KEY].split()
+ extra_deps = df.CustomDependencies.test_depends_only(unit, (), {})[df.CustomDependencies.KEY].split()
dart_record[df.CustomDependencies.KEY] = " ".join(sort_uniq(deps + extra_deps))
dart_record[df.LintFileProcessingTime.KEY] = str(ESLINT_FILE_PROCESSING_TIME_DEFAULT)
@@ -500,7 +500,7 @@ def _setup_tsc_typecheck(unit):
if unit.get("_TS_TYPECHECK_VALUE") == "none":
return
- test_files = df.TestFiles.value7(unit, (), {})[df.TestFiles.KEY]
+ test_files = df.TestFiles.ts_input_files(unit, (), {})[df.TestFiles.KEY]
if not test_files:
return
@@ -526,7 +526,7 @@ def _setup_tsc_typecheck(unit):
from lib.nots.package_manager import constants
peers = _create_pm(unit).get_peers_from_package_json()
- deps = df.CustomDependencies.value5(unit, (peers,), {})[df.CustomDependencies.KEY].split()
+ deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
joined_deps = "\n".join(deps)
@@ -544,7 +544,7 @@ def _setup_tsc_typecheck(unit):
dart_record[df.TestFiles.KEY] = test_files
dart_record[df.NodeModulesBundleFilename.KEY] = constants.NODE_MODULES_WORKSPACE_BUNDLE_FILENAME
- extra_deps = df.CustomDependencies.value3(unit, (), {})[df.CustomDependencies.KEY].split()
+ extra_deps = df.CustomDependencies.test_depends_only(unit, (), {})[df.CustomDependencies.KEY].split()
dart_record[df.CustomDependencies.KEY] = " ".join(sort_uniq(deps + extra_deps))
dart_record[df.TsConfigPath.KEY] = tsconfig_path
@@ -576,7 +576,7 @@ def _setup_stylelint(unit):
peers = _create_pm(unit).get_peers_from_package_json()
- deps = df.CustomDependencies.value5(unit, (peers,), {})[df.CustomDependencies.KEY].split()
+ deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
joined_deps = "\n".join(deps)
logger.info(f"{test_type} deps: \n{joined_deps}")
@@ -589,7 +589,7 @@ def _setup_stylelint(unit):
TS_TEST_FIELDS_BASE + TS_TEST_SPECIFIC_FIELDS[test_type], unit, flat_args, spec_args
)
- extra_deps = df.CustomDependencies.value3(unit, (), {})[df.CustomDependencies.KEY].split()
+ extra_deps = df.CustomDependencies.test_depends_only(unit, (), {})[df.CustomDependencies.KEY].split()
dart_record[df.CustomDependencies.KEY] = " ".join(sort_uniq(deps + extra_deps))
data = ytest.dump_test(unit, dart_record)
@@ -779,7 +779,7 @@ def on_ts_test_for_configure(unit, test_runner, default_config, node_modules_fil
config_path = os.path.join(for_mod_path, default_config)
unit.set(["TS_TEST_CONFIG_PATH", config_path])
- test_files = df.TestFiles.value6(unit, (), {})[df.TestFiles.KEY]
+ test_files = df.TestFiles.ts_test_srcs(unit, (), {})[df.TestFiles.KEY]
if not test_files:
ymake.report_configure_error("No tests found")
return
@@ -787,7 +787,7 @@ def on_ts_test_for_configure(unit, test_runner, default_config, node_modules_fil
from lib.nots.package_manager import constants
peers = _create_pm(unit).get_peers_from_package_json()
- deps = df.CustomDependencies.value5(unit, (peers,), {})[df.CustomDependencies.KEY].split()
+ deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
joined_deps = "\n".join(deps)
@@ -806,7 +806,7 @@ def on_ts_test_for_configure(unit, test_runner, default_config, node_modules_fil
dart_record[df.TestFiles.KEY] = test_files
dart_record[df.NodeModulesBundleFilename.KEY] = constants.NODE_MODULES_WORKSPACE_BUNDLE_FILENAME
- extra_deps = df.CustomDependencies.value3(unit, (), {})[df.CustomDependencies.KEY].split()
+ extra_deps = df.CustomDependencies.test_depends_only(unit, (), {})[df.CustomDependencies.KEY].split()
dart_record[df.CustomDependencies.KEY] = " ".join(sort_uniq(deps + extra_deps))
if test_runner == TsTestType.HERMIONE:
dart_record[df.Size.KEY] = "LARGE"
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index 5f74e15d20..aa41dff565 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -40,17 +40,17 @@ KTLINT_OLD_EDITOR_CONFIG = "arcadia/build/platform/java/ktlint_old/.editorconfig
YTEST_FIELDS_BASE = (
df.AndroidApkTestActivity.value,
- df.BinaryPath.value,
- df.BuildFolderPath.value,
- df.CustomDependencies.value,
+ df.BinaryPath.normalized,
+ df.BuildFolderPath.normalized,
+ df.CustomDependencies.all_standard,
df.GlobalLibraryPath.value,
- df.ScriptRelPath.value,
+ df.ScriptRelPath.second_flat,
df.SkipTest.value,
- df.SourceFolderPath.value,
- df.SplitFactor.value,
- df.TestCwd.value,
+ df.SourceFolderPath.normalized,
+ df.SplitFactor.from_macro_args_and_unit,
+ df.TestCwd.from_unit,
df.TestedProjectFilename.value,
- df.TestedProjectName.value,
+ df.TestedProjectName.unit_name,
df.TestEnv.value,
df.TestIosDeviceType.value,
df.TestIosRuntimeType.value,
@@ -59,43 +59,43 @@ YTEST_FIELDS_BASE = (
YTEST_FIELDS_EXTRA = (
df.Blob.value,
- df.ForkMode.value,
- df.Size.value,
- df.Tag.value,
- df.TestTimeout.value,
- df.YtSpec.value,
+ df.ForkMode.from_macro_and_unit,
+ df.Size.from_macro_args_and_unit,
+ df.Tag.from_macro_args_and_unit,
+ df.TestTimeout.from_macro_args_and_unit,
+ df.YtSpec.from_macro_args_and_unit,
)
PY_EXEC_FIELDS_BASE = (
df.Blob.value,
- df.BuildFolderPath.value2,
+ df.BuildFolderPath.stripped,
df.CanonizeSubPath.value,
- df.CustomDependencies.value3,
- df.ForkMode.value2,
+ df.CustomDependencies.test_depends_only,
+ df.ForkMode.test_fork_mode,
df.ForkTestFiles.value,
df.PythonPaths.value,
- df.Requirements.value4,
- df.Size.value2,
+ df.Requirements.from_unit,
+ df.Size.from_unit,
df.SkipTest.value,
- df.SourceFolderPath.value,
- df.SplitFactor.value2,
- df.Tag.value,
- df.TestCwd.value2,
- df.TestData.value5,
+ df.SourceFolderPath.normalized,
+ df.SplitFactor.from_unit,
+ df.Tag.from_macro_args_and_unit,
+ df.TestCwd.keywords_replaced,
+ df.TestData.from_unit_with_canonical,
df.TestEnv.value,
- df.TestFiles.value5,
+ df.TestFiles.test_srcs,
df.TestPartition.value,
df.TestRecipes.value,
- df.TestTimeout.value2,
+ df.TestTimeout.from_unit_with_default,
df.UseArcadiaPython.value,
)
CHECK_FIELDS_BASE = (
- df.CustomDependencies.value2,
- df.Requirements.value3,
- df.ScriptRelPath.value2,
+ df.CustomDependencies.depends_only,
+ df.Requirements.from_macro_args,
+ df.ScriptRelPath.first_flat,
df.TestEnv.value,
- df.TestName.value3,
+ df.TestName.first_flat,
df.UseArcadiaPython.value,
)
@@ -478,8 +478,8 @@ def get_project_tidy_config(unit):
@df.with_fields(
CHECK_FIELDS_BASE
+ (
- df.TestedProjectName.value2,
- df.SourceFolderPath.value,
+ df.TestedProjectName.normalized_basename,
+ df.SourceFolderPath.normalized,
df.SbrUidExt.value,
df.TestFiles.value,
)
@@ -513,10 +513,10 @@ def check_data(fields, unit, *args):
@df.with_fields(
CHECK_FIELDS_BASE
+ (
- df.TestedProjectName.value2,
- df.SourceFolderPath.value,
+ df.TestedProjectName.normalized_basename,
+ df.SourceFolderPath.normalized,
df.SbrUidExt.value,
- df.TestFiles.value2,
+ df.TestFiles.flat_args_wo_first,
)
)
def check_resource(fields, unit, *args):
@@ -546,10 +546,10 @@ def check_resource(fields, unit, *args):
@df.with_fields(
CHECK_FIELDS_BASE
+ (
- df.TestedProjectName.value2,
- df.SourceFolderPath.value,
- df.TestData.value3,
- df.TestFiles.value2,
+ df.TestedProjectName.normalized_basename,
+ df.SourceFolderPath.normalized,
+ df.TestData.ktlint,
+ df.TestFiles.flat_args_wo_first,
df.ModuleLang.value,
df.KtlintBinary.value,
df.UseKtlintOld.value,
@@ -584,11 +584,11 @@ def ktlint(fields, unit, *args):
@df.with_fields(
CHECK_FIELDS_BASE
+ (
- df.TestedProjectName.value2,
- df.SourceFolderPath.value,
- df.TestData.value4,
- df.ForkMode.value2,
- df.TestFiles.value3,
+ df.TestedProjectName.normalized_basename,
+ df.SourceFolderPath.normalized,
+ df.TestData.java_style,
+ df.ForkMode.test_fork_mode,
+ df.TestFiles.java_style,
df.JdkLatestVersion.value,
df.JdkResource.value,
df.ModuleLang.value,
@@ -628,10 +628,10 @@ def java_style(fields, unit, *args):
@df.with_fields(
CHECK_FIELDS_BASE
+ (
- df.TestedProjectName.value3,
- df.SourceFolderPath.value2,
- df.ForkMode.value2,
- df.TestFiles.value2,
+ df.TestedProjectName.test_dir,
+ df.SourceFolderPath.test_dir,
+ df.ForkMode.test_fork_mode,
+ df.TestFiles.flat_args_wo_first,
df.ModuleLang.value,
)
)
@@ -662,10 +662,10 @@ def gofmt(fields, unit, *args):
@df.with_fields(
CHECK_FIELDS_BASE
+ (
- df.TestedProjectName.value2,
- df.SourceFolderPath.value,
- df.ForkMode.value2,
- df.TestFiles.value2,
+ df.TestedProjectName.normalized_basename,
+ df.SourceFolderPath.normalized,
+ df.ForkMode.test_fork_mode,
+ df.TestFiles.flat_args_wo_first,
df.ModuleLang.value,
)
)
@@ -737,11 +737,11 @@ def on_register_no_check_imports(unit):
@df.with_fields(
(
- df.TestedProjectName.value2,
- df.SourceFolderPath.value,
+ df.TestedProjectName.normalized_basename,
+ df.SourceFolderPath.normalized,
df.TestEnv.value,
df.UseArcadiaPython.value,
- df.TestFiles.value4,
+ df.TestFiles.normalized,
df.ModuleLang.value,
df.NoCheck.value,
)
@@ -766,11 +766,11 @@ def onadd_check_py_imports(fields, unit, *args):
@df.with_fields(
PY_EXEC_FIELDS_BASE
+ (
- df.TestName.value4,
- df.ScriptRelPath.value3,
- df.TestedProjectName.value4,
+ df.TestName.filename_without_ext,
+ df.ScriptRelPath.pytest,
+ df.TestedProjectName.path_filename_basename,
df.ModuleLang.value,
- df.BinaryPath.value2,
+ df.BinaryPath.stripped,
df.TestRunnerBin.value,
)
)
@@ -787,7 +787,7 @@ def onadd_pytest_bin(fields, unit, *args):
if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
unit.ondata_files(_common.get_norm_unit_path(unit))
- yt_spec = df.YtSpec.value2(unit, flat_args, spec_args)
+ yt_spec = df.YtSpec.from_unit(unit, flat_args, spec_args)
if yt_spec and yt_spec[df.YtSpec.KEY]:
unit.ondata_files(deserialize_list(yt_spec[df.YtSpec.KEY]))
@@ -802,26 +802,26 @@ def onadd_pytest_bin(fields, unit, *args):
@df.with_fields(
(
- df.SourceFolderPath.value,
- df.TestName.value5,
- df.ScriptRelPath.value4,
- df.TestTimeout.value3,
- df.TestedProjectName.value5,
+ df.SourceFolderPath.normalized,
+ df.TestName.normalized_joined_dir_basename,
+ df.ScriptRelPath.junit,
+ df.TestTimeout.from_unit,
+ df.TestedProjectName.normalized,
df.TestEnv.value,
- df.TestData.value6,
- df.ForkMode.value2,
- df.SplitFactor.value2,
- df.CustomDependencies.value3,
- df.Tag.value,
- df.Size.value2,
- df.Requirements.value2,
+ df.TestData.java_test,
+ df.ForkMode.test_fork_mode,
+ df.SplitFactor.from_unit,
+ df.CustomDependencies.test_depends_only,
+ df.Tag.from_macro_args_and_unit,
+ df.Size.from_unit,
+ df.Requirements.with_maybe_fuzzing,
df.TestRecipes.value,
df.ModuleType.value,
df.UnittestDir.value,
df.JvmArgs.value,
# TODO optimize, SystemProperties is used in TestData
df.SystemProperties.value,
- df.TestCwd.value,
+ df.TestCwd.from_unit,
df.SkipTest.value,
df.JavaClasspathCmdType.value,
df.JdkResource.value,
@@ -848,7 +848,7 @@ def onjava_test(fields, unit, *args):
if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
unit.ondata_files(_common.get_norm_unit_path(unit))
- yt_spec = df.YtSpec.value3(unit, (), {})
+ yt_spec = df.YtSpec.from_unit_list_var(unit, (), {})
unit.ondata_files(deserialize_list(yt_spec[df.YtSpec.KEY]))
try:
@@ -864,10 +864,10 @@ def onjava_test(fields, unit, *args):
@df.with_fields(
(
- df.SourceFolderPath.value,
- df.TestName.value6,
- df.TestedProjectName.value5,
- df.CustomDependencies.value3,
+ df.SourceFolderPath.normalized,
+ df.TestName.normalized_joined_dir_basename_deps,
+ df.TestedProjectName.normalized,
+ df.CustomDependencies.test_depends_only,
df.IgnoreClasspathClash.value,
df.ModuleType.value,
df.ModuleLang.value,
@@ -908,9 +908,9 @@ def onrun(unit, *args):
@df.with_fields(
PY_EXEC_FIELDS_BASE
+ (
- df.TestName.value7,
- df.TestedProjectName.value6,
- df.BinaryPath.value3,
+ df.TestName.filename_without_pkg_ext,
+ df.TestedProjectName.path_filename_basename_without_pkg_ext,
+ df.BinaryPath.stripped_without_pkg_ext,
)
)
def onsetup_exectest(fields, unit, *args):
@@ -928,7 +928,7 @@ def onsetup_exectest(fields, unit, *args):
if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
unit.ondata_files(_common.get_norm_unit_path(unit))
- yt_spec = df.YtSpec.value2(unit, (), {})
+ yt_spec = df.YtSpec.from_unit(unit, (), {})
if yt_spec and yt_spec[df.YtSpec.KEY]:
unit.ondata_files(deserialize_list(yt_spec[df.YtSpec.KEY]))
@@ -1078,8 +1078,8 @@ def clang_tidy(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
df.ModuleLang.value,
)
@@ -1111,8 +1111,8 @@ def unittest_py(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
df.ModuleLang.value,
)
@@ -1144,8 +1144,8 @@ def gunittest(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
df.ModuleLang.value,
df.BenchmarkOpts.value,
@@ -1178,8 +1178,8 @@ def g_benchmark(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value2,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit_with_canonical,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
df.ModuleLang.value,
)
@@ -1212,8 +1212,8 @@ def go_test(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
)
)
@@ -1245,8 +1245,8 @@ def boost_test(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value2,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.with_maybe_fuzzing,
df.FuzzDicts.value,
df.FuzzOpts.value,
df.Fuzzing.value,
@@ -1281,8 +1281,8 @@ def fuzz_test(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
df.ModuleLang.value,
df.BenchmarkOpts.value,
@@ -1314,8 +1314,8 @@ def y_benchmark(fields, unit, *args):
+ YTEST_FIELDS_EXTRA
+ (
df.TestName.value,
- df.TestData.value,
- df.Requirements.value,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
)
)
@@ -1344,9 +1344,9 @@ def coverage_extractor(fields, unit, *args):
YTEST_FIELDS_BASE
+ YTEST_FIELDS_EXTRA
+ (
- df.TestName.value2,
- df.TestData.value,
- df.Requirements.value,
+ df.TestName.first_flat_with_bench,
+ df.TestData.from_macro_args_and_unit,
+ df.Requirements.from_macro_args_and_unit,
df.TestPartition.value,
df.GoBenchTimeout.value,
df.ModuleLang.value,
@@ -1363,7 +1363,7 @@ def go_bench(fields, unit, *args):
"FORK_TESTS": 0,
}
flat_args, spec_args = _common.sort_by_keywords(keywords, args)
- tags = df.Tag.value(unit, flat_args, spec_args)[df.Tag.KEY]
+ tags = df.Tag.from_macro_args_and_unit(unit, flat_args, spec_args)[df.Tag.KEY]
if "ya:run_go_benchmark" not in tags:
return