aboutsummaryrefslogtreecommitdiffstats
path: root/build/plugins
diff options
context:
space:
mode:
authoralexv-smirnov <alex@ydb.tech>2023-03-15 19:59:12 +0300
committeralexv-smirnov <alex@ydb.tech>2023-03-15 19:59:12 +0300
commit056bb284ccf8dd6793ec3a54ffa36c4fb2b9ad11 (patch)
tree4740980126f32e3af7937ba0ca5f83e59baa4ab0 /build/plugins
parent269126dcced1cc8b53eb4398b4a33e5142f10290 (diff)
downloadydb-056bb284ccf8dd6793ec3a54ffa36c4fb2b9ad11.tar.gz
add library/cpp/actors, ymake build to ydb oss export
Diffstat (limited to 'build/plugins')
-rw-r--r--build/plugins/_common.py201
-rw-r--r--build/plugins/_requirements.py176
-rw-r--r--build/plugins/_xsyn_includes.py60
-rw-r--r--build/plugins/build_mn_files.py29
-rw-r--r--build/plugins/bundle.py22
-rw-r--r--build/plugins/code_generator.py45
-rw-r--r--build/plugins/container_layers.py6
-rw-r--r--build/plugins/cp.py30
-rw-r--r--build/plugins/cpp_style.py19
-rw-r--r--build/plugins/create_init_py.py15
-rw-r--r--build/plugins/credits.py22
-rw-r--r--build/plugins/docs.py46
-rw-r--r--build/plugins/files.py5
-rw-r--r--build/plugins/gobuild.py309
-rw-r--r--build/plugins/ios_app_settings.py19
-rw-r--r--build/plugins/ios_assets.py30
-rw-r--r--build/plugins/java.py446
-rw-r--r--build/plugins/large_files.py39
-rw-r--r--build/plugins/lib/__init__.py0
-rw-r--r--build/plugins/lib/_metric_resolvers.py11
-rw-r--r--build/plugins/lib/nots/__init__.py0
-rw-r--r--build/plugins/lib/nots/constants.py12
-rw-r--r--build/plugins/lib/nots/package_manager/__init__.py11
-rw-r--r--build/plugins/lib/nots/package_manager/base/__init__.py14
-rw-r--r--build/plugins/lib/nots/package_manager/base/constants.py5
-rw-r--r--build/plugins/lib/nots/package_manager/base/lockfile.py68
-rw-r--r--build/plugins/lib/nots/package_manager/base/node_modules_bundler.py66
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_json.py170
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_manager.py141
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/package_json.py152
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/utils.py15
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/package_manager/base/utils.py29
-rw-r--r--build/plugins/lib/nots/package_manager/base/ya.make23
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/__init__.py12
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/constants.py2
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/lockfile.py162
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/package_manager.py215
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py320
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py68
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/ya.make15
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/utils.py11
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/workspace.py75
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/ya.make24
-rw-r--r--build/plugins/lib/nots/package_manager/ya.make14
-rw-r--r--build/plugins/lib/nots/semver/__init__.py5
-rw-r--r--build/plugins/lib/nots/semver/semver.py228
-rw-r--r--build/plugins/lib/nots/semver/tests/test_version.py242
-rw-r--r--build/plugins/lib/nots/semver/tests/test_version_range.py69
-rw-r--r--build/plugins/lib/nots/semver/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/semver/ya.make14
-rw-r--r--build/plugins/lib/nots/typescript/__init__.py9
-rw-r--r--build/plugins/lib/nots/typescript/tests/ts_config.py86
-rw-r--r--build/plugins/lib/nots/typescript/tests/ya.make13
-rw-r--r--build/plugins/lib/nots/typescript/ts_config.py300
-rw-r--r--build/plugins/lib/nots/typescript/ts_errors.py10
-rw-r--r--build/plugins/lib/nots/typescript/ya.make19
-rw-r--r--build/plugins/lib/nots/ya.make15
-rw-r--r--build/plugins/lib/test_const/__init__.py522
-rw-r--r--build/plugins/lib/test_const/ya.make9
-rw-r--r--build/plugins/lib/ya.make7
-rw-r--r--build/plugins/linker_script.py12
-rw-r--r--build/plugins/lj_archive.py44
-rw-r--r--build/plugins/llvm_bc.py33
-rw-r--r--build/plugins/macros_with_error.py29
-rw-r--r--build/plugins/mx_archive.py16
-rw-r--r--build/plugins/nots.py309
-rw-r--r--build/plugins/pybuild.py682
-rw-r--r--build/plugins/res.py133
-rw-r--r--build/plugins/sandbox_registry.py21
-rw-r--r--build/plugins/scarab_cant_clash.py66
-rw-r--r--build/plugins/split_codegen.py43
-rw-r--r--build/plugins/ssqls.py40
-rw-r--r--build/plugins/suppressions.py19
-rw-r--r--build/plugins/tests/fake_ymake.py2
-rw-r--r--build/plugins/tests/test_code_generator.py20
-rw-r--r--build/plugins/tests/test_common.py49
-rw-r--r--build/plugins/tests/test_requirements.py67
-rw-r--r--build/plugins/tests/test_ssqls.py27
-rw-r--r--build/plugins/tests/ya.make25
-rw-r--r--build/plugins/uservices.py26
-rw-r--r--build/plugins/ya.make23
-rw-r--r--build/plugins/yabs_generate_conf.py61
-rw-r--r--build/plugins/yql_python_udf.py55
-rw-r--r--build/plugins/ytest.py1203
-rw-r--r--build/plugins/ytest2.py54
86 files changed, 7789 insertions, 0 deletions
diff --git a/build/plugins/_common.py b/build/plugins/_common.py
new file mode 100644
index 0000000000..2f831a94db
--- /dev/null
+++ b/build/plugins/_common.py
@@ -0,0 +1,201 @@
+import sys
+import hashlib
+import base64
+
+
+class Result(object):
+ pass
+
+
+def lazy(func):
+ result = Result()
+
+ def wrapper():
+ try:
+ return result._result
+ except AttributeError:
+ result._result = func()
+
+ return result._result
+
+ return wrapper
+
+
+def pathid(path):
+ return base64.b32encode(hashlib.md5(path).digest()).lower().strip('=')
+
+
+def listid(l):
+ return pathid(str(sorted(l)))
+
+
+def unpair(lst):
+ for x, y in lst:
+ yield x
+ yield y
+
+
+def iterpair(lst):
+ y = None
+
+ for x in lst:
+ if y:
+ yield (y, x)
+
+ y = None
+ else:
+ y = x
+
+
+def stripext(fname):
+ return fname[:fname.rfind('.')]
+
+
+def tobuilddir(fname):
+ if not fname:
+ return '$B'
+ if fname.startswith('$S'):
+ return fname.replace('$S', '$B', 1)
+ else:
+ return fname
+
+
+def before(s, ss):
+ p = s.find(ss)
+
+ if p == -1:
+ return s
+
+ return s[:p]
+
+
+def sort_by_keywords(keywords, args):
+ flat = []
+ res = {}
+
+ cur_key = None
+ limit = -1
+ for arg in args:
+ if arg in keywords:
+ limit = keywords[arg]
+ if limit == 0:
+ res[arg] = True
+ cur_key = None
+ limit = -1
+ else:
+ cur_key = arg
+ continue
+ if limit == 0:
+ cur_key = None
+ limit = -1
+ if cur_key:
+ if cur_key in res:
+ res[cur_key].append(arg)
+ else:
+ res[cur_key] = [arg]
+ limit -= 1
+ else:
+ flat.append(arg)
+ return (flat, res)
+
+
+def resolve_common_const(path):
+ if path.startswith('${ARCADIA_ROOT}'):
+ return path.replace('${ARCADIA_ROOT}', '$S', 1)
+ if path.startswith('${ARCADIA_BUILD_ROOT}'):
+ return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1)
+ return path
+
+
+def resolve_to_abs_path(path, source_root, build_root):
+ if path.startswith('$S') and source_root is not None:
+ return path.replace('$S', source_root, 1)
+ if path.startswith('$B') and build_root is not None:
+ return path.replace('$B', build_root, 1)
+ return path
+
+
+def resolve_to_ymake_path(path):
+ return resolve_to_abs_path(path, '${ARCADIA_ROOT}', '${ARCADIA_BUILD_ROOT}')
+
+
+def join_intl_paths(*args):
+ return '/'.join(args)
+
+
+def get(fun, num):
+ return fun()[num][0]
+
+
+def make_tuples(arg_list):
+ def tpl():
+ for x in arg_list:
+ yield (x, [])
+
+ return list(tpl())
+
+
+def resolve_includes(unit, src, paths):
+ return unit.resolve_include([src] + paths) if paths else []
+
+
+def rootrel_arc_src(src, unit):
+ if src.startswith('${ARCADIA_ROOT}/'):
+ return src[16:]
+
+ if src.startswith('${ARCADIA_BUILD_ROOT}/'):
+ return src[22:]
+
+ elif src.startswith('${CURDIR}/'):
+ return unit.path()[3:] + '/' + src[10:]
+
+ else:
+ resolved = unit.resolve_arc_path(src)
+
+ if resolved.startswith('$S/'):
+ return resolved[3:]
+
+ return src # leave as is
+
+
+def skip_build_root(x):
+ if x.startswith('${ARCADIA_BUILD_ROOT}'):
+ return x[len('${ARCADIA_BUILD_ROOT}'):].lstrip('/')
+
+ return x
+
+
+def get_interpreter_path():
+ interpreter_path = [sys.executable]
+ if 'ymake' in interpreter_path[0]:
+ interpreter_path.append('--python')
+ return interpreter_path
+
+
+def filter_out_by_keyword(test_data, keyword):
+ def _iterate():
+ i = 0
+ while i < len(test_data):
+ if test_data[i] == keyword:
+ i += 2
+ else:
+ yield test_data[i]
+ i += 1
+
+ return list(_iterate())
+
+
+def generate_chunks(lst, chunk_size):
+ for i in xrange(0, len(lst), chunk_size):
+ yield lst[i:(i + chunk_size)]
+
+
+def strip_roots(path):
+ for prefix in ["$B/", "$S/"]:
+ if path.startswith(prefix):
+ return path[len(prefix):]
+ return path
+
+
+def to_yesno(x):
+ return "yes" if x else "no"
diff --git a/build/plugins/_requirements.py b/build/plugins/_requirements.py
new file mode 100644
index 0000000000..40c50f8791
--- /dev/null
+++ b/build/plugins/_requirements.py
@@ -0,0 +1,176 @@
+import lib.test_const as consts
+import re
+import lib._metric_resolvers as mr
+
+CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
+CANON_YAV_REGEX = re.compile(r"\w+=(value|file):sec-[a-z0-9]+:\w+")
+VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
+VALID_NETWORK_REQUIREMENTS = ("full", "restricted")
+
+
+def check_cpu(suite_cpu_requirements, test_size, is_kvm=False):
+ min_cpu_requirements = consts.TestRequirementsConstants.MinCpu
+ max_cpu_requirements = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu)
+ if isinstance(suite_cpu_requirements, str):
+ if all(
+ consts.TestRequirementsConstants.is_all_cpu(req) for req in (max_cpu_requirements, suite_cpu_requirements)
+ ):
+ return None
+ return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format(
+ suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size
+ )
+
+ if not isinstance(suite_cpu_requirements, int):
+ return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements)
+
+ if (
+ suite_cpu_requirements < min_cpu_requirements
+ or suite_cpu_requirements > consts.TestRequirementsConstants.get_cpu_value(max_cpu_requirements)
+ ):
+ return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format(
+ suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size
+ )
+
+ return None
+
+
+# TODO: Remove is_kvm param when there will be guarantees on RAM
+def check_ram(suite_ram_requirements, test_size, is_kvm=False):
+ if not isinstance(suite_ram_requirements, int):
+ return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements)
+ min_ram_requirements = consts.TestRequirementsConstants.MinRam
+ max_ram_requirements = (
+ consts.MAX_RAM_REQUIREMENTS_FOR_KVM
+ if is_kvm
+ else consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram)
+ )
+ if suite_ram_requirements < min_ram_requirements or suite_ram_requirements > max_ram_requirements:
+ err_msg = "Wrong 'ram' requirements: {}, should be in [{}..{}] for {}-size tests".format(
+ suite_ram_requirements, min_ram_requirements, max_ram_requirements, test_size
+ )
+ if is_kvm:
+ err_msg += ' with kvm requirements'
+ return err_msg
+ return None
+
+
+def check_ram_disk(suite_ram_disk, test_size, is_kvm=False):
+ min_ram_disk = consts.TestRequirementsConstants.MinRamDisk
+ max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk)
+ if isinstance(suite_ram_disk, str):
+ if all(consts.TestRequirementsConstants.is_all_ram_disk(req) for req in (max_ram_disk, suite_ram_disk)):
+ return None
+ return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format(
+ suite_ram_disk, 0, max_ram_disk, test_size
+ )
+
+ if not isinstance(suite_ram_disk, int):
+ return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk)
+
+ if suite_ram_disk < min_ram_disk or suite_ram_disk > consts.TestRequirementsConstants.get_ram_disk_value(
+ max_ram_disk
+ ):
+ return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format(
+ suite_ram_disk, min_ram_disk, max_ram_disk, test_size
+ )
+
+ return None
+
+
+def validate_sb_vault(name, value):
+ if not CANON_SB_VAULT_REGEX.match(value):
+ return "sb_vault value '{}' should follow pattern <ENV_NAME>=<value|file>:<owner>:<vault key>".format(value)
+
+
+def validate_yav_vault(name, value):
+ if not CANON_YAV_REGEX.match(value):
+ return "yav value '{}' should follow pattern <ENV_NAME>=<value|file>:<sec-id>:<key>".format(value)
+
+
+def validate_numerical_requirement(name, value):
+ if mr.resolve_value(value) is None:
+ return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name)
+
+
+def validate_choice_requirement(name, val, valid):
+ if val not in valid:
+ return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(
+ name, val, ", ".join(valid)
+ )
+
+
+def validate_force_sandbox_requirement(
+ name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func
+):
+ if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run:
+ if value == 'all':
+ return
+ return validate_numerical_requirement(name, value)
+ error_msg = validate_numerical_requirement(name, value)
+ if error_msg:
+ return error_msg
+ return check_func(mr.resolve_value(value), test_size, is_kvm)
+
+
+def validate_ram_disk_requirement(
+ name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, ram
+):
+ error_msg = validate_force_sandbox_requirement(
+ name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_ram_disk
+ )
+ if error_msg:
+ return error_msg
+ if is_force_sandbox or not in_autocheck or test_size == consts.TestSize.Large:
+ return
+ if int(value) > int(ram):
+ return "Wrong 'ram_disk' value, 'ram_disk':{} should be no more than 'ram':{}".format(value, ram)
+ return None
+
+
+# TODO: Remove is_kvm param when there will be guarantees on RAM
+def validate_requirement(
+ req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, requirements
+):
+ req_checks = {
+ 'container': validate_numerical_requirement,
+ 'cpu': lambda n, v: validate_force_sandbox_requirement(
+ n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_cpu
+ ),
+ 'disk_usage': validate_numerical_requirement,
+ 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS),
+ 'kvm': None,
+ 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS),
+ 'ram': lambda n, v: validate_force_sandbox_requirement(
+ n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_ram
+ ),
+ 'ram_disk': lambda n, v: validate_ram_disk_requirement(
+ n,
+ v,
+ test_size,
+ is_force_sandbox,
+ in_autocheck,
+ is_fuzzing,
+ is_kvm,
+ is_ytexec_run,
+ requirements.get(
+ 'ram', consts.TestSize.get_default_requirements(test_size).get(consts.TestRequirements.Ram)
+ ),
+ ),
+ 'sb': None,
+ 'sb_vault': validate_sb_vault,
+ 'yav': validate_yav_vault,
+ }
+
+ if req_name not in req_checks:
+ return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(
+ req_name, ", ".join(sorted(req_checks))
+ )
+
+ if req_name in ('container', 'disk') and not is_force_sandbox:
+ return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format(
+ req_name
+ )
+
+ check_func = req_checks[req_name]
+ if check_func:
+ return check_func(req_name, value)
diff --git a/build/plugins/_xsyn_includes.py b/build/plugins/_xsyn_includes.py
new file mode 100644
index 0000000000..8d33cea2f0
--- /dev/null
+++ b/build/plugins/_xsyn_includes.py
@@ -0,0 +1,60 @@
+def get_include_callback():
+ """
+ .. function: get_include_callback returns function that processes each DOM element to get xsyn include from it, and it's aware of directory with all the xsyns.
+
+ :param xsyn_dir directory with xsyns.
+ """
+ def get_include(element):
+ """
+ .. function: get_include returns list of includes from this DOM element.
+
+ :param element DOM element.
+ """
+ res = []
+ if element.nodeType == element.ELEMENT_NODE and element.nodeName == "parse:include":
+ attrs = element.attributes
+ for i in xrange(attrs.length):
+ attr = attrs.item(i)
+ if attr.nodeName == "path":
+ include_filename = attr.nodeValue
+ res.append(include_filename)
+ return res
+
+ return get_include
+
+
+def traverse_xsyn(element, on_element):
+ """
+ .. function: traverse_xsyn traverses element and returns concatenated lists of calling on_element of each element.
+
+ :param element element in DOM.
+ :param on_element callback on element that returns list of values.
+ """
+ res = on_element(element)
+ for child in element.childNodes:
+ child_results = traverse_xsyn(child, on_element)
+ res += child_results
+ return res
+
+
+def process_xsyn(filepath, on_element):
+ """
+ .. function: process_xsyn processes xsyn file and return concatenated list of calling on_element on each DOM element.
+
+ :param filepath path to xsyn file
+ :param on_element callback called on each element in xsyn that returns list of values.
+
+ """
+
+ # keep a stack of filepathes if on_element calls process_xsyn recursively
+ with open(filepath) as xsyn_file:
+ from xml.dom.minidom import parse
+ tree = parse(xsyn_file)
+ tree.normalize()
+ res = traverse_xsyn(tree, on_element)
+ return res
+
+
+def get_all_includes(filepath):
+ callback = get_include_callback()
+ return process_xsyn(filepath, callback)
diff --git a/build/plugins/build_mn_files.py b/build/plugins/build_mn_files.py
new file mode 100644
index 0000000000..4da76f1852
--- /dev/null
+++ b/build/plugins/build_mn_files.py
@@ -0,0 +1,29 @@
+from os.path import basename, splitext
+
+
+def on_build_mns_files(unit, *args):
+ files = []
+ name = ''
+ ranking_suffix = ''
+ check = ''
+ index = 0
+ fml_unused_tool = ''
+ while index < len(args):
+ if args[index] == 'NAME':
+ index += 1
+ name = args[index]
+ elif args[index] == 'RANKING_SUFFIX':
+ index += 1
+ ranking_suffix = args[index]
+ elif args[index] == 'CHECK':
+ check = 'CHECK'
+ fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL'
+ else:
+ files.append(args[index])
+ index += 1
+
+ for filename in files:
+ file_basename, _ = splitext(basename(filename))
+ asmdataname = "staticMn{0}{1}Ptr".format(ranking_suffix, file_basename)
+ output_name = 'mn.staticMn{0}{1}Ptr.cpp'.format(ranking_suffix, file_basename)
+ unit.on_build_mns_file([filename, name, output_name, ranking_suffix, check, fml_unused_tool, asmdataname])
diff --git a/build/plugins/bundle.py b/build/plugins/bundle.py
new file mode 100644
index 0000000000..0bec8254ee
--- /dev/null
+++ b/build/plugins/bundle.py
@@ -0,0 +1,22 @@
+import os
+
+
+def onbundle(unit, *args):
+ """
+ @usage BUNDLE(<Dir [NAME Name]>...)
+
+ Brings build artefact from module Dir under optional Name to the current module (e.g. UNION)
+ If NAME is not specified, the name of the Dir's build artefact will be preserved
+ It makes little sense to specify BUNDLE on non-final targets and so this may stop working without prior notice.
+ Bundle on multimodule will select final target among multimodule variants and will fail if there are none or more than one.
+ """
+ i = 0
+ while i < len(args):
+ if i + 2 < len(args) and args[i + 1] == "NAME":
+ target, name = args[i], args[i + 2]
+ i += 3
+ else:
+ target, name = args[i], os.path.basename(args[i])
+ i += 1
+
+ unit.on_bundle_target([target, name])
diff --git a/build/plugins/code_generator.py b/build/plugins/code_generator.py
new file mode 100644
index 0000000000..c1dc792201
--- /dev/null
+++ b/build/plugins/code_generator.py
@@ -0,0 +1,45 @@
+import re
+import os
+
+import ymake
+
+pattern = re.compile(r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']")
+
+
+class CodeGeneratorTemplateParser(object):
+ def __init__(self, path, unit):
+ self._path = path
+ retargeted = os.path.join(unit.path(), os.path.relpath(path, unit.resolve(unit.path())))
+ with open(path, 'rb') as f:
+ includes, induced = CodeGeneratorTemplateParser.parse_includes(f.readlines())
+ self._includes = unit.resolve_include([retargeted] + includes) if includes else []
+ self._induced = unit.resolve_include([retargeted] + induced) if induced else []
+
+ @staticmethod
+ def parse_includes(lines):
+ includes = []
+ induced = []
+
+ for line in lines:
+ for match in pattern.finditer(line):
+ type = match.lastgroup
+ if type == 'INCLUDE':
+ includes.append(match.group(type))
+ elif type == 'INDUCED':
+ induced.append(match.group(type))
+ else:
+ raise Exception("Unexpected match! Perhaps it is a result of an error in pattern.")
+ return (includes, induced)
+
+ def includes(self):
+ return self._includes
+
+ def induced_deps(self):
+ return {
+ 'h+cpp': self._induced
+ }
+
+
+def init():
+ ymake.addparser('markettemplate', CodeGeneratorTemplateParser)
+ ymake.addparser('macro', CodeGeneratorTemplateParser)
diff --git a/build/plugins/container_layers.py b/build/plugins/container_layers.py
new file mode 100644
index 0000000000..500e7dcd88
--- /dev/null
+++ b/build/plugins/container_layers.py
@@ -0,0 +1,6 @@
+from _common import rootrel_arc_src
+
+def oncheck_allowed_path(unit, *args):
+ module_path = rootrel_arc_src(unit.path(), unit)
+ if not (module_path.startswith("junk") or module_path.startswith("base_layers")):
+ unit.message(["error", "Cannot create container layer in this directory. See https://st.yandex-team.ru/DTCC-1123"])
diff --git a/build/plugins/cp.py b/build/plugins/cp.py
new file mode 100644
index 0000000000..5c663a3bdd
--- /dev/null
+++ b/build/plugins/cp.py
@@ -0,0 +1,30 @@
+import os
+
+from _common import sort_by_keywords
+
+
+def oncopy(unit, *args):
+ keywords = {'RESULT': 1, 'KEEP_DIR_STRUCT': 0, 'DESTINATION': 1, 'FROM': 1}
+
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+
+ dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else ''
+ from_dir = spec_args['FROM'][0] if 'FROM' in spec_args else ''
+ keep_struct = 'KEEP_DIR_STRUCT' in spec_args
+ save_in_var = 'RESULT' in spec_args
+ targets = []
+
+ for source in flat_args:
+ rel_path = ''
+ path_list = source.split(os.sep)
+ filename = path_list[-1]
+ if keep_struct:
+ if path_list[:-1]:
+ rel_path = os.path.join(*path_list[:-1])
+ source_path = os.path.join(from_dir, rel_path, filename)
+ target_path = os.path.join(dest_dir, rel_path, filename)
+ if save_in_var:
+ targets.append(target_path)
+ unit.oncopy_file([source_path, target_path])
+ if save_in_var:
+ unit.set([spec_args["RESULT"][0], " ".join(targets)])
diff --git a/build/plugins/cpp_style.py b/build/plugins/cpp_style.py
new file mode 100644
index 0000000000..3ab78b7320
--- /dev/null
+++ b/build/plugins/cpp_style.py
@@ -0,0 +1,19 @@
+import os
+
+from _common import sort_by_keywords
+
+
+def on_style(unit, *args):
+ def it():
+ yield 'DONT_PARSE'
+
+ for f in args:
+ f = f[len('${ARCADIA_ROOT}') + 1:]
+
+ if '/generated/' in f:
+ continue
+
+ yield f
+ yield '/cpp_style/files/' + f
+
+ unit.onresource(list(it()))
diff --git a/build/plugins/create_init_py.py b/build/plugins/create_init_py.py
new file mode 100644
index 0000000000..bbe8e8945a
--- /dev/null
+++ b/build/plugins/create_init_py.py
@@ -0,0 +1,15 @@
+import os
+
+from _common import sort_by_keywords
+
+
+def oncreate_init_py_structure(unit, *args):
+ if unit.get('DISTBUILD') or unit.get('AUTOCHECK'):
+ return
+ target_dir = unit.get('PY_PROTOS_FOR_DIR')
+ path_list = target_dir.split(os.path.sep)[1:]
+ inits = [os.path.join("${ARCADIA_BUILD_ROOT}", '__init__.py')]
+ for i in range(1, len(path_list) + 1):
+ inits.append(os.path.join("${ARCADIA_BUILD_ROOT}", os.path.join(*path_list[0:i]), '__init__.py'))
+ unit.ontouch(inits)
+
diff --git a/build/plugins/credits.py b/build/plugins/credits.py
new file mode 100644
index 0000000000..0ce1659326
--- /dev/null
+++ b/build/plugins/credits.py
@@ -0,0 +1,22 @@
+from _common import rootrel_arc_src
+
+
+def oncredits_disclaimer(unit, *args):
+ if unit.get('WITH_CREDITS'):
+ unit.message(["warn", "CREDITS WARNING: {}".format(' '.join(args))])
+
+def oncheck_contrib_credits(unit, *args):
+ module_path = rootrel_arc_src(unit.path(), unit)
+ excepts = set()
+ if 'EXCEPT' in args:
+ args = list(args)
+ except_pos = args.index('EXCEPT')
+ excepts = set(args[except_pos + 1:])
+ args = args[:except_pos]
+ for arg in args:
+ if module_path.startswith(arg) and not unit.get('CREDITS_TEXTS_FILE') and not unit.get('NO_CREDITS_TEXTS_FILE'):
+ for ex in excepts:
+ if module_path.startswith(ex):
+ break
+ else:
+ unit.message(["error", "License texts not found. See https://st.yandex-team.ru/DTCC-324"])
diff --git a/build/plugins/docs.py b/build/plugins/docs.py
new file mode 100644
index 0000000000..29f89e9a8e
--- /dev/null
+++ b/build/plugins/docs.py
@@ -0,0 +1,46 @@
+import json
+
+
+def extract_macro_calls(unit, macro_value_name):
+ if not unit.get(macro_value_name):
+ return []
+
+ return filter(None, unit.get(macro_value_name).replace('$' + macro_value_name, '').split())
+
+
+def macro_calls_to_dict(unit, calls):
+ def split_args(arg):
+ if arg is None:
+ return None
+
+ kv = filter(None, arg.split('='))
+ if len(kv) != 2:
+ unit.message(['error', 'Invalid variables specification "{}": value expected to be in form %name%=%value% (with no spaces)'.format(arg)])
+ return None
+
+ return kv
+
+ return dict(filter(None, map(split_args, calls)))
+
+
+def get_variables(unit):
+ orig_variables = macro_calls_to_dict(unit, extract_macro_calls(unit, '_DOCS_VARS_VALUE'))
+ return {k: unit.get(k) or v for k, v in orig_variables.items()}
+
+
+def onprocess_docs(unit, *args):
+ if unit.enabled('_DOCS_USE_PLANTUML'):
+ unit.on_docs_yfm_use_plantuml([])
+
+ if unit.get('_DOCS_DIR_VALUE') == '':
+ unit.on_yfm_docs_dir([unit.get('_YFM_DOCS_DIR_DEFAULT_VALUE')])
+
+ variables = get_variables(unit)
+ if variables:
+ unit.set(['_DOCS_VARS_FLAG', '--vars {}'.format(json.dumps(json.dumps(variables, sort_keys=True)))])
+
+
+def onprocess_mkdocs(unit, *args):
+ variables = get_variables(unit)
+ if variables:
+ unit.set(['_DOCS_VARS_FLAG', ' '.join(['--var {}={}'.format(k, v) for k, v in variables.items()])])
diff --git a/build/plugins/files.py b/build/plugins/files.py
new file mode 100644
index 0000000000..78a6fe6169
--- /dev/null
+++ b/build/plugins/files.py
@@ -0,0 +1,5 @@
+def onfiles(unit, *args):
+ args = list(args)
+ for arg in args:
+ if not arg.startswith('${ARCADIA_BUILD_ROOT}'):
+ unit.oncopy_file([arg, arg])
diff --git a/build/plugins/gobuild.py b/build/plugins/gobuild.py
new file mode 100644
index 0000000000..8df96ebc55
--- /dev/null
+++ b/build/plugins/gobuild.py
@@ -0,0 +1,309 @@
+import base64
+import itertools
+import md5
+import os
+from _common import rootrel_arc_src, tobuilddir
+import ymake
+
+
+runtime_cgo_path = os.path.join('runtime', 'cgo')
+runtime_msan_path = os.path.join('runtime', 'msan')
+runtime_race_path = os.path.join('runtime', 'race')
+arc_project_prefix = 'a.yandex-team.ru/'
+import_runtime_cgo_false = {
+ 'norace': (runtime_cgo_path, runtime_msan_path, runtime_race_path),
+ 'race': (runtime_cgo_path, runtime_msan_path),
+}
+import_syscall_false = {
+ 'norace': (runtime_cgo_path),
+ 'race': (runtime_cgo_path, runtime_race_path),
+}
+
+
+def get_import_path(unit):
+ # std_lib_prefix = unit.get('GO_STD_LIB_PREFIX')
+ # unit.get() doesn't evalutate the value of variable, so the line above doesn't really work
+ std_lib_prefix = unit.get('GOSTD') + '/'
+ arc_project_prefix = unit.get('GO_ARCADIA_PROJECT_PREFIX')
+ vendor_prefix = unit.get('GO_CONTRIB_PROJECT_PREFIX')
+
+ module_path = rootrel_arc_src(unit.path(), unit)
+ assert len(module_path) > 0
+ import_path = module_path.replace('\\', '/')
+ if import_path.startswith(std_lib_prefix):
+ import_path = import_path[len(std_lib_prefix):]
+ elif import_path.startswith(vendor_prefix):
+ import_path = import_path[len(vendor_prefix):]
+ else:
+ import_path = arc_project_prefix + import_path
+ assert len(import_path) > 0
+ return import_path
+
+
+def get_appended_values(unit, key):
+ value = []
+ raw_value = unit.get(key)
+ if raw_value:
+ value = filter(lambda x: len(x) > 0, raw_value.split(' '))
+ assert len(value) == 0 or value[0] == '$' + key
+ return value[1:] if len(value) > 0 else value
+
+
+def compare_versions(version1, version2):
+ def last_index(version):
+ index = version.find('beta')
+ return len(version) if index < 0 else index
+
+ v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.'))
+ v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.'))
+ if v1 == v2:
+ return 0
+ return 1 if v1 < v2 else -1
+
+
+def need_compiling_runtime(import_path, gostd_version):
+ return import_path in ('runtime', 'reflect', 'syscall') or \
+ import_path.startswith('runtime/internal/') or \
+ compare_versions('1.17', gostd_version) >= 0 and import_path == 'internal/bytealg'
+
+
+def go_package_name(unit):
+ name = unit.get('GO_PACKAGE_VALUE')
+ if not name:
+ name = unit.get('GO_TEST_IMPORT_PATH')
+ if name:
+ name = os.path.basename(os.path.normpath(name))
+ elif unit.get('MODULE_TYPE') == 'PROGRAM':
+ name = 'main'
+ else:
+ name = unit.get('REALPRJNAME')
+ return name
+
+
+def need_lint(path):
+ return not path.startswith('$S/vendor/') and not path.startswith('$S/contrib/')
+
+
+def on_go_process_srcs(unit):
+ """
+ _GO_PROCESS_SRCS() macro processes only 'CGO' files. All remaining *.go files
+ and other input files are currently processed by a link command of the
+ GO module (GO_LIBRARY, GO_PROGRAM)
+ """
+
+ srcs_files = get_appended_values(unit, '_GO_SRCS_VALUE')
+
+ asm_files = []
+ c_files = []
+ cxx_files = []
+ ev_files = []
+ fbs_files = []
+ go_files = []
+ in_files = []
+ proto_files = []
+ s_files = []
+ syso_files = []
+
+ classifed_files = {
+ '.c': c_files,
+ '.cc': cxx_files,
+ '.cpp': cxx_files,
+ '.cxx': cxx_files,
+ '.ev': ev_files,
+ '.fbs': fbs_files,
+ '.go': go_files,
+ '.in': in_files,
+ '.proto': proto_files,
+ '.s': asm_files,
+ '.syso': syso_files,
+ '.C': cxx_files,
+ '.S': s_files,
+ }
+
+ # Classify files specifed in _GO_SRCS() macro by extension and process CGO_EXPORT keyword
+ # which can preceed C/C++ files only
+ is_cgo_export = False
+ for f in srcs_files:
+ _, ext = os.path.splitext(f)
+ ext_files = classifed_files.get(ext)
+ if ext_files is not None:
+ if is_cgo_export:
+ is_cgo_export = False
+ if ext in ('.c', '.cc', '.cpp', '.cxx', '.C'):
+ unit.oncopy_file_with_context([f, f, 'OUTPUT_INCLUDES', '${BINDIR}/_cgo_export.h'])
+ f = '${BINDIR}/' + f
+ else:
+ ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS() macro')
+ ext_files.append(f)
+ elif f == 'CGO_EXPORT':
+ is_cgo_export = True
+ else:
+ # FIXME(snermolaev): We can report an unsupported files for _GO_SRCS here
+ pass
+ if is_cgo_export:
+ ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS() macro')
+
+ for f in go_files:
+ if f.endswith('_test.go'):
+ ymake.report_configure_error('file {} must be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f))
+ go_test_files = get_appended_values(unit, '_GO_TEST_SRCS_VALUE')
+ go_xtest_files = get_appended_values(unit, '_GO_XTEST_SRCS_VALUE')
+ for f in go_test_files + go_xtest_files:
+ if not f.endswith('_test.go'):
+ ymake.report_configure_error('file {} should not be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f))
+
+ is_test_module = unit.enabled('GO_TEST_MODULE')
+
+ # Add gofmt style checks
+ if unit.enabled('_GO_FMT_ADD_CHECK'):
+ resolved_go_files = []
+ go_source_files = [] if is_test_module and unit.get(['GO_TEST_FOR_DIR']) else go_files
+ for path in itertools.chain(go_source_files, go_test_files, go_xtest_files):
+ if path.endswith('.go'):
+ resolved = unit.resolve_arc_path([path])
+ if resolved != path and need_lint(resolved):
+ resolved_go_files.append(resolved)
+ if resolved_go_files:
+ basedirs = {}
+ for f in resolved_go_files:
+ basedir = os.path.dirname(f)
+ if basedir not in basedirs:
+ basedirs[basedir] = []
+ basedirs[basedir].append(f)
+ for basedir in basedirs:
+ unit.onadd_check(['gofmt'] + basedirs[basedir])
+
+ # Go coverage instrumentation (NOTE! go_files list is modified here)
+ if is_test_module and unit.enabled('GO_TEST_COVER'):
+ cover_info = []
+
+ for f in go_files:
+ if f.endswith('_test.go'):
+ continue
+ cover_var = 'GoCover' + base64.b32encode(f).rstrip('=')
+ cover_file = unit.resolve_arc_path(f)
+ unit.on_go_gen_cover_go([cover_file, cover_var])
+ if cover_file.startswith('$S/'):
+ cover_file = arc_project_prefix + cover_file[3:]
+ cover_info.append('{}:{}'.format(cover_var, cover_file))
+
+ # go_files should be empty now since the initial list shouldn't contain
+ # any non-go or go test file. The value of go_files list will be used later
+ # to update the value of _GO_SRCS_VALUE
+ go_files = []
+ unit.set(['GO_COVER_INFO_VALUE', ' '.join(cover_info)])
+
+ # We have cleaned up the list of files from _GO_SRCS_VALUE var and we have to update
+ # the value since it is used in module command line
+ unit.set(['_GO_SRCS_VALUE', ' '.join(itertools.chain(go_files, asm_files, syso_files))])
+
+ unit_path = unit.path()
+
+ # Add go vet check
+ if unit.enabled('_GO_VET_ADD_CHECK') and need_lint(unit_path):
+ vet_report_file_name = os.path.join(unit_path, '{}{}'.format(unit.filename(), unit.get('GO_VET_REPORT_EXT')))
+ unit.onadd_check(["govet", '$(BUILD_ROOT)/' + tobuilddir(vet_report_file_name)[3:]])
+
+ for f in ev_files:
+ ev_proto_file = '{}.proto'.format(f)
+ unit.oncopy_file_with_context([f, ev_proto_file])
+ proto_files.append(ev_proto_file)
+
+ # Process .proto files
+ for f in proto_files:
+ unit.on_go_proto_cmd(f)
+
+ # Process .fbs files
+ for f in fbs_files:
+ unit.on_go_flatc_cmd([f, go_package_name(unit)])
+
+ # Process .in files
+ for f in in_files:
+ unit.onsrc(f)
+
+ # Generate .symabis for .s files (starting from 1.12 version)
+ if len(asm_files) > 0:
+ symabis_flags = []
+ gostd_version = unit.get('GOSTD_VERSION')
+ if compare_versions('1.16', gostd_version) >= 0:
+ import_path = get_import_path(unit)
+ symabis_flags.extend(['FLAGS', '-p', import_path])
+ if need_compiling_runtime(import_path, gostd_version):
+ symabis_flags.append('-compiling-runtime')
+ unit.on_go_compile_symabis(asm_files + symabis_flags)
+
+ # Process cgo files
+ cgo_files = get_appended_values(unit, '_CGO_SRCS_VALUE')
+
+ cgo_cflags = []
+ if len(c_files) + len(cxx_files) + len(s_files) + len(cgo_files) > 0:
+ if is_test_module:
+ go_test_for_dir = unit.get('GO_TEST_FOR_DIR')
+ if go_test_for_dir and go_test_for_dir.startswith('$S/'):
+ unit.onaddincl(['FOR', 'c', go_test_for_dir[3:]])
+ unit.onaddincl(['FOR', 'c', unit.get('MODDIR')])
+ cgo_cflags = get_appended_values(unit, 'CGO_CFLAGS_VALUE')
+
+ for f in itertools.chain(c_files, cxx_files, s_files):
+ unit.onsrc([f] + cgo_cflags)
+
+ if len(cgo_files) > 0:
+ if not unit.enabled('CGO_ENABLED'):
+ ymake.report_configure_error('trying to build with CGO (CGO_SRCS is non-empty) when CGO is disabled')
+ import_path = get_import_path(unit)
+ if import_path != runtime_cgo_path:
+ go_std_root = unit.get('GOSTD')
+ unit.onpeerdir(os.path.join(go_std_root, runtime_cgo_path))
+ race_mode = 'race' if unit.enabled('RACE') else 'norace'
+ import_runtime_cgo = 'false' if import_path in import_runtime_cgo_false[race_mode] else 'true'
+ import_syscall = 'false' if import_path in import_syscall_false[race_mode] else 'true'
+ args = [import_path] + cgo_files + ['FLAGS', '-import_runtime_cgo=' + import_runtime_cgo, '-import_syscall=' + import_syscall]
+ unit.on_go_compile_cgo1(args)
+ cgo2_cflags = get_appended_values(unit, 'CGO2_CFLAGS_VALUE')
+ for f in cgo_files:
+ if f.endswith('.go'):
+ unit.onsrc([f[:-2] + 'cgo2.c'] + cgo_cflags + cgo2_cflags)
+ else:
+ ymake.report_configure_error('file {} should not be listed in CGO_SRCS() macros'.format(f))
+ args = [go_package_name(unit)] + cgo_files
+ if len(c_files) > 0:
+ args += ['C_FILES'] + c_files
+ if len(s_files) > 0:
+ args += ['S_FILES'] + s_files
+ if len(syso_files) > 0:
+ args += ['OBJ_FILES'] + syso_files
+ unit.on_go_compile_cgo2(args)
+
+
+def on_go_resource(unit, *args):
+ args = list(args)
+ files = args[::2]
+ keys = args[1::2]
+ suffix_md5 = md5.new('@'.join(args)).hexdigest()
+ resource_go = os.path.join("resource.{}.res.go".format(suffix_md5))
+
+ unit.onpeerdir(["library/go/core/resource"])
+
+ if len(files) != len(keys):
+ ymake.report_configure_error("last file {} is missing resource key".format(files[-1]))
+
+ for i, (key, filename) in enumerate(zip(keys, files)):
+ if not key:
+ ymake.report_configure_error("file key must be non empty")
+ return
+
+ if filename == "-" and "=" not in key:
+ ymake.report_configure_error("key \"{}\" must contain = sign".format(key))
+ return
+
+ # quote key, to avoid automatic substitution of filename by absolute
+ # path in RUN_PROGRAM
+ args[2*i+1] = "notafile" + args[2*i+1]
+
+ files = [file for file in files if file != "-"]
+ unit.onrun_program([
+ "library/go/core/resource/cc",
+ "-package", go_package_name(unit),
+ "-o", resource_go] + list(args) + [
+ "IN"] + files + [
+ "OUT", resource_go])
diff --git a/build/plugins/ios_app_settings.py b/build/plugins/ios_app_settings.py
new file mode 100644
index 0000000000..60ec0b4b52
--- /dev/null
+++ b/build/plugins/ios_app_settings.py
@@ -0,0 +1,19 @@
+import _common as common
+import ymake
+import os
+
+def onios_app_settings(unit, *args):
+ tail, kv = common.sort_by_keywords(
+ {'OS_VERSION': 1, 'DEVICES': -1},
+ args
+ )
+ if tail:
+ ymake.report_configure_error('Bad IOS_COMMON_SETTINGS usage - unknown data: ' + str(tail))
+ if kv.get('OS_VERSION', []):
+ unit.onios_app_common_flags(['--minimum-deployment-target', kv.get('OS_VERSION', [])[0]])
+ unit.onios_app_assets_flags(['--filter-for-device-os-version', kv.get('OS_VERSION', [])[0]])
+ devices_flags = []
+ for device in kv.get('DEVICES', []):
+ devices_flags += ['--target-device', device]
+ if devices_flags:
+ unit.onios_app_common_flags(devices_flags)
diff --git a/build/plugins/ios_assets.py b/build/plugins/ios_assets.py
new file mode 100644
index 0000000000..5f0ccb9467
--- /dev/null
+++ b/build/plugins/ios_assets.py
@@ -0,0 +1,30 @@
+import _common as common
+import ymake
+import os
+
+
+def onios_assets(unit, *args):
+ _, kv = common.sort_by_keywords(
+ {'ROOT': 1, 'CONTENTS': -1, 'FLAGS': -1},
+ args
+ )
+ if not kv.get('ROOT', []) and kv.get('CONTENTS', []):
+ ymake.report_configure_error('Please specify ROOT directory for assets')
+ origin_root = kv.get('ROOT')[0]
+ destination_root = os.path.normpath(os.path.join('$BINDIR', os.path.basename(origin_root)))
+ rel_list = []
+ for cont in kv.get('CONTENTS', []):
+ rel = os.path.relpath(cont, origin_root)
+ if rel.startswith('..'):
+ ymake.report_configure_error('{} is not subpath of {}'.format(cont, origin_root))
+ rel_list.append(rel)
+ if not rel_list:
+ return
+ results_list = [os.path.join('$B', unit.path()[3:], os.path.basename(origin_root), i) for i in rel_list]
+ if len(kv.get('CONTENTS', [])) != len(results_list):
+ ymake.report_configure_error('IOS_ASSETTS content length is not equals results')
+ for s, d in zip(kv.get('CONTENTS', []), results_list):
+ unit.oncopy_file([s, d])
+ if kv.get('FLAGS', []):
+ unit.onios_app_assets_flags(kv.get('FLAGS', []))
+ unit.on_ios_assets([destination_root] + results_list)
diff --git a/build/plugins/java.py b/build/plugins/java.py
new file mode 100644
index 0000000000..0846dcada9
--- /dev/null
+++ b/build/plugins/java.py
@@ -0,0 +1,446 @@
+import _common as common
+import ymake
+import json
+import os
+import base64
+
+
+DELIM = '================================'
+CONTRIB_JAVA_PREFIX = 'contrib/java/'
+
+
+def split_args(s): # TODO quotes, escapes
+ return filter(None, s.split())
+
+
+def extract_macro_calls(unit, macro_value_name, macro_calls_delim):
+ if not unit.get(macro_value_name):
+ return []
+
+ return filter(None, map(split_args, unit.get(macro_value_name).replace('$' + macro_value_name, '').split(macro_calls_delim)))
+
+
+def extract_macro_calls2(unit, macro_value_name):
+ if not unit.get(macro_value_name):
+ return []
+
+ calls = []
+ for call_encoded_args in unit.get(macro_value_name).strip().split():
+ call_args = json.loads(base64.b64decode(call_encoded_args), encoding='utf-8')
+ calls.append(call_args)
+
+ return calls
+
+
+def on_run_jbuild_program(unit, *args):
+ args = list(args)
+ """
+ Custom code generation
+ @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram
+ """
+
+ flat, kv = common.sort_by_keywords({'IN': -1, 'IN_DIR': -1, 'OUT': -1, 'OUT_DIR': -1, 'CWD': 1, 'CLASSPATH': -1, 'CP_USE_COMMAND_FILE': 1, 'ADD_SRCS_TO_CLASSPATH': 0}, args)
+ depends = kv.get('CLASSPATH', []) + kv.get('JAR', [])
+ fake_out = None
+ if depends:
+ # XXX: hack to force ymake to build dependencies
+ fake_out = "fake.out.{}".format(hash(tuple(args)))
+ unit.on_run_java(['TOOL'] + depends + ["OUT", fake_out])
+
+ if not kv.get('CP_USE_COMMAND_FILE'):
+ args += ['CP_USE_COMMAND_FILE', unit.get(['JAVA_PROGRAM_CP_USE_COMMAND_FILE']) or 'yes']
+
+ if fake_out is not None:
+ args += ['FAKE_OUT', fake_out]
+
+ prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
+ new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
+ unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
+
+
+def ongenerate_script(unit, *args):
+ """
+ heretic@ promised to make tutorial here
+ Don't forget
+ Feel free to remind
+ """
+ flat, kv = common.sort_by_keywords(
+ {'OUT': -1, 'TEMPLATE': -1, 'CUSTOM_PROPERTY': -1},
+ args
+ )
+ if len(kv.get('TEMPLATE', [])) > len(kv.get('OUT', [])):
+ ymake.report_configure_error('To many arguments for TEMPLATE parameter')
+ prev = unit.get(['GENERATE_SCRIPT_VALUE']) or ''
+ new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
+ unit.set(['GENERATE_SCRIPT_VALUE', new_val])
+
+
+def onjava_module(unit, *args):
+ args_delim = unit.get('ARGS_DELIM')
+ idea_only = True if 'IDEA_ONLY' in args else False
+ if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
+ unit.ondata_files(common.strip_roots(unit.path()))
+
+ if idea_only:
+ if unit.get('YA_IDE_IDEA') != 'yes':
+ return
+ if unit.get('YMAKE_JAVA_MODULES') != 'yes':
+ return
+
+ data = {
+ 'BUNDLE_NAME': unit.name(),
+ 'PATH': unit.path(),
+ 'IDEA_ONLY': 'yes' if idea_only else 'no',
+ 'MODULE_TYPE': unit.get('MODULE_TYPE'),
+ 'MODULE_ARGS': unit.get('MODULE_ARGS'),
+ 'MANAGED_PEERS': '${MANAGED_PEERS}',
+ 'MANAGED_PEERS_CLOSURE': '${MANAGED_PEERS_CLOSURE}',
+ 'NON_NAMAGEABLE_PEERS': '${NON_NAMAGEABLE_PEERS}',
+ 'TEST_CLASSPATH_MANAGED': '${TEST_CLASSPATH_MANAGED}',
+ 'EXCLUDE': extract_macro_calls(unit, 'EXCLUDE_VALUE', args_delim),
+ 'JAVA_SRCS': extract_macro_calls(unit, 'JAVA_SRCS_VALUE', args_delim),
+ 'JAVAC_FLAGS': extract_macro_calls(unit, 'JAVAC_FLAGS_VALUE', args_delim),
+ 'ANNOTATION_PROCESSOR': extract_macro_calls(unit, 'ANNOTATION_PROCESSOR_VALUE', args_delim),
+ 'EXTERNAL_JAR': extract_macro_calls(unit, 'EXTERNAL_JAR_VALUE', args_delim),
+ 'RUN_JAVA_PROGRAM': extract_macro_calls2(unit, 'RUN_JAVA_PROGRAM_VALUE'),
+ 'RUN_JAVA_PROGRAM_MANAGED': '${RUN_JAVA_PROGRAM_MANAGED}',
+ 'MAVEN_GROUP_ID': extract_macro_calls(unit, 'MAVEN_GROUP_ID_VALUE', args_delim),
+ 'JAR_INCLUDE_FILTER': extract_macro_calls(unit, 'JAR_INCLUDE_FILTER_VALUE', args_delim),
+ 'JAR_EXCLUDE_FILTER': extract_macro_calls(unit, 'JAR_EXCLUDE_FILTER_VALUE', args_delim),
+
+ # TODO remove when java test dart is in prod
+ 'UNITTEST_DIR': unit.get('UNITTEST_DIR'),
+ 'SYSTEM_PROPERTIES': extract_macro_calls(unit, 'SYSTEM_PROPERTIES_VALUE', args_delim),
+ 'JVM_ARGS': extract_macro_calls(unit, 'JVM_ARGS_VALUE', args_delim),
+ 'TEST_CWD': extract_macro_calls(unit, 'TEST_CWD_VALUE', args_delim),
+ 'TEST_FORK_MODE': extract_macro_calls(unit, 'TEST_FORK_MODE', args_delim),
+ 'SPLIT_FACTOR': extract_macro_calls(unit, 'TEST_SPLIT_FACTOR', args_delim),
+ 'TIMEOUT': extract_macro_calls(unit, 'TEST_TIMEOUT', args_delim),
+ 'TAG': extract_macro_calls(unit, 'TEST_TAGS_VALUE', args_delim),
+ 'SIZE': extract_macro_calls(unit, 'TEST_SIZE_NAME', args_delim),
+ 'DEPENDS': extract_macro_calls(unit, 'TEST_DEPENDS_VALUE', args_delim),
+ 'IDEA_EXCLUDE': extract_macro_calls(unit, 'IDEA_EXCLUDE_DIRS_VALUE', args_delim),
+ 'IDEA_RESOURCE': extract_macro_calls(unit, 'IDEA_RESOURCE_DIRS_VALUE', args_delim),
+ 'IDEA_MODULE_NAME': extract_macro_calls(unit, 'IDEA_MODULE_NAME_VALUE', args_delim),
+ 'GENERATE_SCRIPT': extract_macro_calls2(unit, 'GENERATE_SCRIPT_VALUE'),
+ 'FAKEID': extract_macro_calls(unit, 'FAKEID', args_delim),
+ 'TEST_DATA': extract_macro_calls(unit, 'TEST_DATA_VALUE', args_delim),
+ 'JAVA_FORBIDDEN_LIBRARIES': extract_macro_calls(unit, 'JAVA_FORBIDDEN_LIBRARIES_VALUE', args_delim),
+ 'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT')
+ }
+ if unit.get('ENABLE_PREVIEW_VALUE') == 'yes' and (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION')) in ('15', '16', '17', '18', '19'):
+ data['ENABLE_PREVIEW'] = extract_macro_calls(unit, 'ENABLE_PREVIEW_VALUE', args_delim)
+
+ if unit.get('SAVE_JAVAC_GENERATED_SRCS_DIR') and unit.get('SAVE_JAVAC_GENERATED_SRCS_TAR'):
+ data['SAVE_JAVAC_GENERATED_SRCS_DIR'] = extract_macro_calls(unit, 'SAVE_JAVAC_GENERATED_SRCS_DIR', args_delim)
+ data['SAVE_JAVAC_GENERATED_SRCS_TAR'] = extract_macro_calls(unit, 'SAVE_JAVAC_GENERATED_SRCS_TAR', args_delim)
+
+ if unit.get('JAVA_ADD_DLLS_VALUE') == 'yes':
+ data['ADD_DLLS_FROM_DEPENDS'] = extract_macro_calls(unit, 'JAVA_ADD_DLLS_VALUE', args_delim)
+
+ if unit.get('ERROR_PRONE_VALUE') == 'yes':
+ data['ERROR_PRONE'] = extract_macro_calls(unit, 'ERROR_PRONE_VALUE', args_delim)
+
+ if unit.get('WITH_KOTLIN_VALUE') == 'yes':
+ data['WITH_KOTLIN'] = extract_macro_calls(unit, 'WITH_KOTLIN_VALUE', args_delim)
+ if unit.get('KOTLIN_JVM_TARGET'):
+ data['KOTLIN_JVM_TARGET'] = extract_macro_calls(unit, 'KOTLIN_JVM_TARGET', args_delim)
+ if unit.get('KOTLINC_FLAGS_VALUE'):
+ data['KOTLINC_FLAGS'] = extract_macro_calls(unit, 'KOTLINC_FLAGS_VALUE', args_delim)
+ if unit.get('KOTLINC_OPTS_VALUE'):
+ data['KOTLINC_OPTS'] = extract_macro_calls(unit, 'KOTLINC_OPTS_VALUE', args_delim)
+
+ if unit.get('DIRECT_DEPS_ONLY_VALUE') == 'yes':
+ data['DIRECT_DEPS_ONLY'] = extract_macro_calls(unit, 'DIRECT_DEPS_ONLY_VALUE', args_delim)
+
+ if unit.get('JAVA_EXTERNAL_DEPENDENCIES_VALUE'):
+ valid = []
+ for dep in sum(extract_macro_calls(unit, 'JAVA_EXTERNAL_DEPENDENCIES_VALUE', args_delim), []):
+ if os.path.normpath(dep).startswith('..'):
+ ymake.report_configure_error('{}: {} - relative paths in JAVA_EXTERNAL_DEPENDENCIES is not allowed'.format(unit.path(), dep))
+ elif os.path.isabs(dep):
+ ymake.report_configure_error('{}: {} absolute paths in JAVA_EXTERNAL_DEPENDENCIES is not allowed'.format(unit.path(), dep))
+ else:
+ valid.append(dep)
+ if valid:
+ data['EXTERNAL_DEPENDENCIES'] = [valid]
+
+ if unit.get('MAKE_UBERJAR_VALUE') == 'yes':
+ if unit.get('MODULE_TYPE') != 'JAVA_PROGRAM':
+ ymake.report_configure_error('{}: UBERJAR supported only for JAVA_PROGRAM module type'.format(unit.path()))
+ data['UBERJAR'] = extract_macro_calls(unit, 'MAKE_UBERJAR_VALUE', args_delim)
+ data['UBERJAR_PREFIX'] = extract_macro_calls(unit, 'UBERJAR_PREFIX_VALUE', args_delim)
+ data['UBERJAR_HIDE_EXCLUDE'] = extract_macro_calls(unit, 'UBERJAR_HIDE_EXCLUDE_VALUE', args_delim)
+ data['UBERJAR_PATH_EXCLUDE'] = extract_macro_calls(unit, 'UBERJAR_PATH_EXCLUDE_VALUE', args_delim)
+ data['UBERJAR_MANIFEST_TRANSFORMER_MAIN'] = extract_macro_calls(unit, 'UBERJAR_MANIFEST_TRANSFORMER_MAIN_VALUE', args_delim)
+ data['UBERJAR_MANIFEST_TRANSFORMER_ATTRIBUTE'] = extract_macro_calls(unit, 'UBERJAR_MANIFEST_TRANSFORMER_ATTRIBUTE_VALUE', args_delim)
+ data['UBERJAR_APPENDING_TRANSFORMER'] = extract_macro_calls(unit, 'UBERJAR_APPENDING_TRANSFORMER_VALUE', args_delim)
+ data['UBERJAR_SERVICES_RESOURCE_TRANSFORMER'] = extract_macro_calls(unit, 'UBERJAR_SERVICES_RESOURCE_TRANSFORMER_VALUE', args_delim)
+
+ if unit.get('WITH_JDK_VALUE') == 'yes':
+ if unit.get('MODULE_TYPE') != 'JAVA_PROGRAM':
+ ymake.report_configure_error('{}: JDK export supported only for JAVA_PROGRAM module type'.format(unit.path()))
+ data['WITH_JDK'] = extract_macro_calls(unit, 'WITH_JDK_VALUE', args_delim)
+
+ if not data['EXTERNAL_JAR']:
+ has_processor = extract_macro_calls(unit, 'GENERATE_VCS_JAVA_INFO_NODEP', args_delim)
+ # IMPORTANT before switching vcs_info.py to python3 the value was always evaluated to $YMAKE_PYTHON but no
+ # code in java dart parser extracts its value only checks this key for existance.
+ data['EMBED_VCS'] = [['yes']]
+ # FORCE_VCS_INFO_UPDATE is responsible for setting special value of VCS_INFO_DISABLE_CACHE__NO_UID__
+ macro_val = extract_macro_calls(unit, 'FORCE_VCS_INFO_UPDATE', args_delim)
+ macro_str = macro_val[0][0] if macro_val and macro_val[0] and macro_val[0][0] else ''
+ if macro_str and macro_str == 'yes':
+ data['VCS_INFO_DISABLE_CACHE__NO_UID__'] = macro_val
+
+ for java_srcs_args in data['JAVA_SRCS']:
+ external = None
+
+ for i in xrange(len(java_srcs_args)):
+ arg = java_srcs_args[i]
+
+ if arg == 'EXTERNAL':
+ if not i + 1 < len(java_srcs_args):
+ continue # TODO configure error
+
+ ex = java_srcs_args[i + 1]
+
+ if ex in ('EXTERNAL', 'SRCDIR', 'PACKAGE_PREFIX', 'EXCLUDE'):
+ continue # TODO configure error
+
+ if external is not None:
+ continue # TODO configure error
+
+ external = ex
+
+ if external:
+ unit.onpeerdir(external)
+
+ for k, v in data.items():
+ if not v:
+ data.pop(k)
+
+ dart = 'JAVA_DART: ' + base64.b64encode(json.dumps(data)) + '\n' + DELIM + '\n'
+
+ unit.set_property(['JAVA_DART_DATA', dart])
+ if not idea_only and unit.get('MODULE_TYPE') in ('JAVA_PROGRAM', 'JAVA_LIBRARY', 'JTEST', 'TESTNG', 'JUNIT5') and not unit.path().startswith('$S/contrib/java'):
+ unit.on_add_classpath_clash_check()
+ if unit.get('LINT_LEVEL_VALUE') != "none" and unit.get('_NO_LINT_VALUE') != 'none':
+ unit.onadd_check(['JAVA_STYLE', unit.get('LINT_LEVEL_VALUE')])
+
+
+def on_add_java_style_checks(unit, *args):
+ if unit.get('LINT_LEVEL_VALUE') != "none" and unit.get('_NO_LINT_VALUE') != 'none':
+ unit.onadd_check(['JAVA_STYLE', unit.get('LINT_LEVEL_VALUE')] + list(args))
+
+
+def on_add_kotlin_style_checks(unit, *args):
+ """
+ ktlint can be disabled using NO_LINT() and NO_LINT(ktlint)
+ """
+ if unit.get('WITH_KOTLIN_VALUE') == 'yes':
+ no_lint_value = unit.get('_NO_LINT_VALUE')
+ if no_lint_value == '':
+ unit.onadd_check(['ktlint'] + list(args))
+ elif no_lint_value not in ('none', 'none_internal', 'ktlint'):
+ ymake.report_configure_error('Unsupported value for NO_LINT macro: {}'.format(no_lint_value))
+
+
+
+def on_add_classpath_clash_check(unit, *args):
+ jdeps_val = (unit.get('CHECK_JAVA_DEPS_VALUE') or '').lower()
+ if jdeps_val and jdeps_val not in ('yes', 'no', 'strict'):
+ ymake.report_configure_error('CHECK_JAVA_DEPS: "yes", "no" or "strict" required')
+ if jdeps_val and jdeps_val != 'no':
+ unit.onjava_test_deps(jdeps_val)
+
+
+# Ymake java modules related macroses
+
+
+def onexternal_jar(unit, *args):
+ args = list(args)
+ flat, kv = common.sort_by_keywords({'SOURCES': 1}, args)
+ if not flat:
+ ymake.report_configure_error('EXTERNAL_JAR requires exactly one resource URL of compiled jar library')
+ res = flat[0]
+ resid = res[4:] if res.startswith('sbr:') else res
+ unit.set(['JAR_LIB_RESOURCE', resid])
+ unit.set(['JAR_LIB_RESOURCE_URL', res])
+
+
+def on_check_java_srcdir(unit, *args):
+ args = list(args)
+ for arg in args:
+ if not '$' in arg:
+ arc_srcdir = os.path.join(unit.get('MODDIR'), arg)
+ abs_srcdir = unit.resolve(os.path.join("$S/", arc_srcdir))
+ if not os.path.exists(abs_srcdir) or not os.path.isdir(abs_srcdir):
+ ymake.report_configure_error(
+ 'Trying to set a [[alt1]]JAVA_SRCS[[rst]] for a missing directory: [[imp]]$S/{}[[rst]]',
+ missing_dir=arc_srcdir
+ )
+ return
+ srcdir = unit.resolve_arc_path(arg)
+ if srcdir and not srcdir.startswith('$S'):
+ continue
+ abs_srcdir = unit.resolve(srcdir) if srcdir else unit.resolve(arg)
+ if not os.path.exists(abs_srcdir) or not os.path.isdir(abs_srcdir):
+ ymake.report_configure_error(
+ 'Trying to set a [[alt1]]JAVA_SRCS[[rst]] for a missing directory: [[imp]]{}[[rst]]',
+ missing_dir=srcdir
+ )
+
+
+def on_fill_jar_copy_resources_cmd(unit, *args):
+ if len(args) == 4:
+ varname, srcdir, base_classes_dir, reslist = tuple(args)
+ package = ''
+ else:
+ varname, srcdir, base_classes_dir, package, reslist = tuple(args)
+ dest_dir = os.path.join(base_classes_dir, *package.split('.')) if package else base_classes_dir
+ var = unit.get(varname)
+ var += ' && $FS_TOOLS copy_files {} {} {}'.format(srcdir if srcdir.startswith('"$') else '${CURDIR}/' + srcdir, dest_dir, reslist)
+ unit.set([varname, var])
+
+
+def on_fill_jar_gen_srcs(unit, *args):
+ varname, jar_type, srcdir, base_classes_dir, java_list, kt_list, groovy_list, res_list = tuple(args[0:8])
+ resolved_srcdir = unit.resolve_arc_path(srcdir)
+ if not resolved_srcdir.startswith('$') or resolved_srcdir.startswith('$S'):
+ return
+
+ exclude_pos = args.index('EXCLUDE')
+ globs = args[7:exclude_pos]
+ excludes = args[exclude_pos + 1:]
+ var = unit.get(varname)
+ var += ' && ${{cwd:BINDIR}} $YMAKE_PYTHON ${{input:"build/scripts/resolve_java_srcs.py"}} --append -d {} -s {} -k {} -g {} -r {} --include-patterns {}'.format(srcdir, java_list, kt_list, groovy_list, res_list, ' '.join(globs))
+ if jar_type == 'SRC_JAR':
+ var += ' --all-resources'
+ if len(excludes) > 0:
+ var += ' --exclude-patterns {}'.format(' '.join(excludes))
+ if unit.get('WITH_KOTLIN_VALUE') == 'yes':
+ var += ' --resolve-kotlin'
+ unit.set([varname, var])
+
+
+def on_check_run_java_prog_classpath(unit, *args):
+ if len(args) != 1:
+ ymake.report_configure_error('multiple CLASSPATH elements in RUN_JAVA_PROGRAM invocation no more supported. Use JAVA_RUNTIME_PEERDIR on the JAVA_PROGRAM module instead')
+
+
+def extract_words(words, keys):
+ kv = {}
+ k = None
+
+ for w in words:
+ if w in keys:
+ k = w
+ else:
+ if not k in kv:
+ kv[k] = []
+ kv[k].append(w)
+
+ return kv
+
+
+def parse_words(words):
+ kv = extract_words(words, {'OUT', 'TEMPLATE'})
+ if not 'TEMPLATE' in kv:
+ kv['TEMPLATE'] = ['template.tmpl']
+ ws = []
+ for item in ('OUT', 'TEMPLATE'):
+ for i, word in list(enumerate(kv[item])):
+ if word == 'CUSTOM_PROPERTY':
+ ws += kv[item][i:]
+ kv[item] = kv[item][:i]
+ templates = kv['TEMPLATE']
+ outputs = kv['OUT']
+ if len(outputs) < len(templates):
+ ymake.report_configure_error('To many arguments for TEMPLATE parameter')
+ return
+ if ws and ws[0] != 'CUSTOM_PROPERTY':
+ ymake.report_configure_error('''Can't parse {}'''.format(ws))
+ custom_props = []
+ for item in ws:
+ if item == 'CUSTOM_PROPERTY':
+ custom_props.append([])
+ else:
+ custom_props[-1].append(item)
+ props = []
+ for p in custom_props:
+ if not p:
+ ymake.report_configure_error('Empty CUSTOM_PROPERTY')
+ continue
+ props.append('-B')
+ if len(p) > 1:
+ props.append(base64.b64encode("{}={}".format(p[0], ' '.join(p[1:]))))
+ else:
+ ymake.report_configure_error('CUSTOM_PROPERTY "{}" value is not specified'.format(p[0]))
+ for i, o in enumerate(outputs):
+ yield o, templates[min(i, len(templates) - 1)], props
+
+
+def on_ymake_generate_script(unit, *args):
+ for out, tmpl, props in parse_words(list(args)):
+ unit.on_add_gen_java_script([out, tmpl] + list(props))
+
+
+def on_jdk_version_macro_check(unit, *args):
+ if len(args) != 1:
+ unit.message(["error", "Invalid syntax. Single argument required."])
+ jdk_version = args[0]
+ available_versions = ('10', '11', '15', '16', '17', '18', '19',)
+ if jdk_version not in available_versions:
+ ymake.report_configure_error("Invalid jdk version: {}. {} are available".format(jdk_version, available_versions))
+ if int(jdk_version) >= 19 and unit.get('WITH_JDK_VALUE') != 'yes' and unit.get('MODULE_TAG') == 'JAR_RUNNABLE':
+ msg = (
+ "Missing WITH_JDK() macro for JDK version >= 19"
+ # temporary link with additional explanation
+ ". For more info see https://clubs.at.yandex-team.ru/arcadia/28543"
+ )
+ ymake.report_configure_error(msg)
+
+
+def _maven_coords_for_project(unit, project_dir):
+ parts = project_dir.split('/')
+
+ g = '.'.join(parts[2:-2])
+ a = parts[-2]
+ v = parts[-1]
+ c = ''
+
+ pom_path = unit.resolve(os.path.join('$S', project_dir, 'pom.xml'))
+ if os.path.exists(pom_path):
+ import xml.etree.ElementTree as et
+ with open(pom_path) as f:
+ root = et.fromstring(f.read())
+ for xpath in ('./{http://maven.apache.org/POM/4.0.0}artifactId', './artifactId'):
+ artifact = root.find(xpath)
+ if artifact is not None:
+ artifact = artifact.text
+ if a != artifact and a.startswith(artifact):
+ c = a[len(artifact):].lstrip('-_')
+ a = artifact
+ break
+
+ return '{}:{}:{}:{}'.format(g, a, v, c)
+
+
+def on_setup_maven_export_coords_if_need(unit, *args):
+ if not unit.enabled('MAVEN_EXPORT'):
+ return
+
+ unit.set(['MAVEN_EXPORT_COORDS_GLOBAL', _maven_coords_for_project(unit, args[0])])
+
+
+def on_setup_project_coords_if_needed(unit, *args):
+ if not unit.enabled('EXPORT_GRADLE'):
+ return
+
+ project_dir = args[0]
+ if project_dir.startswith(CONTRIB_JAVA_PREFIX):
+ value = '\\"{}\\"'.format(_maven_coords_for_project(unit, project_dir).rstrip(':'))
+ else:
+ value = 'project(\\":{}\\")'.format(project_dir.replace('/', ':'))
+ unit.set(['_EXPORT_GRADLE_PROJECT_COORDS', value])
diff --git a/build/plugins/large_files.py b/build/plugins/large_files.py
new file mode 100644
index 0000000000..33a78d7110
--- /dev/null
+++ b/build/plugins/large_files.py
@@ -0,0 +1,39 @@
+import os
+import ymake
+from _common import strip_roots
+
+PLACEHOLDER_EXT = "external"
+
+
+def onlarge_files(unit, *args):
+ """
+ @usage LARGE_FILES([AUTOUPDATED] Files...)
+
+ Use large file ether from working copy or from remote storage via placeholder <File>.external
+ If <File> is present locally (and not a symlink!) it will be copied to build directory.
+ Otherwise macro will try to locate <File>.external, parse it retrieve ot during build phase.
+ """
+ args = list(args)
+
+ if args and args[0] == 'AUTOUPDATED':
+ args = args[1:]
+
+ for arg in args:
+ if arg == 'AUTOUPDATED':
+ unit.message(["warn", "Please set AUTOUPDATED argument before other file names"])
+ continue
+
+ src = unit.resolve_arc_path(arg)
+ if src.startswith("$S"):
+ msg = "Used local large file {}. Don't forget to run 'ya upload --update-external' and commit {}.{}".format(src, src, PLACEHOLDER_EXT)
+ unit.message(["warn", msg])
+ unit.oncopy_file([arg, arg])
+ else:
+ out_file = strip_roots(os.path.join(unit.path(), arg))
+ external = "{}.{}".format(arg, PLACEHOLDER_EXT)
+ from_external_cmd = [external, out_file, 'OUT_NOAUTO', arg]
+ if os.path.dirname(arg):
+ from_external_cmd.extend(("RENAME", os.path.basename(arg)))
+ unit.on_from_external(from_external_cmd)
+ unit.onadd_check(['check.external', external])
+
diff --git a/build/plugins/lib/__init__.py b/build/plugins/lib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/__init__.py
diff --git a/build/plugins/lib/_metric_resolvers.py b/build/plugins/lib/_metric_resolvers.py
new file mode 100644
index 0000000000..270eb78345
--- /dev/null
+++ b/build/plugins/lib/_metric_resolvers.py
@@ -0,0 +1,11 @@
+import re
+
+VALUE_PATTERN = re.compile(r"^\s*(?P<value>\d+)\s*$")
+
+
+def resolve_value(val):
+ match = VALUE_PATTERN.match(val)
+ if not match:
+ return None
+ val = match.group('value')
+ return int(val)
diff --git a/build/plugins/lib/nots/__init__.py b/build/plugins/lib/nots/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/nots/__init__.py
diff --git a/build/plugins/lib/nots/constants.py b/build/plugins/lib/nots/constants.py
new file mode 100644
index 0000000000..d819e86192
--- /dev/null
+++ b/build/plugins/lib/nots/constants.py
@@ -0,0 +1,12 @@
+from lib.nots.semver import Version
+
+# it is crucial to keep this array sorted
+SUPPORTED_NODE_VERSIONS = [
+ Version.from_str("12.18.4"),
+ Version.from_str("12.22.12"),
+ Version.from_str("14.21.1"),
+ Version.from_str("16.18.1"),
+ Version.from_str("18.12.1")
+]
+
+DEFAULT_NODE_VERSION = SUPPORTED_NODE_VERSIONS[0]
diff --git a/build/plugins/lib/nots/package_manager/__init__.py b/build/plugins/lib/nots/package_manager/__init__.py
new file mode 100644
index 0000000000..4128980efe
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/__init__.py
@@ -0,0 +1,11 @@
+from .pnpm import PnpmPackageManager
+from .base import PackageJson, constants, utils, bundle_node_modules, extract_node_modules
+
+
+manager = PnpmPackageManager
+
+__all__ = [
+ "PackageJson",
+ "constants", "utils",
+ "bundle_node_modules", "extract_node_modules"
+]
diff --git a/build/plugins/lib/nots/package_manager/base/__init__.py b/build/plugins/lib/nots/package_manager/base/__init__.py
new file mode 100644
index 0000000000..8950fd818e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/__init__.py
@@ -0,0 +1,14 @@
+from . import constants, utils
+from .lockfile import BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+from .package_json import PackageJson
+from .package_manager import BasePackageManager, PackageManagerError, PackageManagerCommandError
+from .node_modules_bundler import bundle_node_modules, extract_node_modules
+
+
+__all__ = [
+ "constants", "utils",
+ "BaseLockfile", "LockfilePackageMeta", "LockfilePackageMetaInvalidError",
+ "BasePackageManager", "PackageManagerError", "PackageManagerCommandError",
+ "PackageJson",
+ "bundle_node_modules", "extract_node_modules",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/constants.py b/build/plugins/lib/nots/package_manager/base/constants.py
new file mode 100644
index 0000000000..ecdbe05ebf
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/constants.py
@@ -0,0 +1,5 @@
+PACKAGE_JSON_FILENAME = "package.json"
+NODE_MODULES_DIRNAME = "node_modules"
+NODE_MODULES_BUNDLE_FILENAME = "node_modules.tar"
+NODE_MODULES_WORKSPACE_BUNDLE_FILENAME = "workspace_node_modules.tar"
+NPM_REGISTRY_URL = "http://npm.yandex-team.ru"
diff --git a/build/plugins/lib/nots/package_manager/base/lockfile.py b/build/plugins/lib/nots/package_manager/base/lockfile.py
new file mode 100644
index 0000000000..9b9c0be954
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/lockfile.py
@@ -0,0 +1,68 @@
+import os
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+class LockfilePackageMeta(object):
+ """
+ Basic struct representing package meta from lockfile.
+ """
+ __slots__ = ("name", "version", "sky_id", "integrity", "integrity_algorithm", "tarball_path")
+
+ @staticmethod
+ def from_str(s):
+ return LockfilePackageMeta(*s.strip().split(" "))
+
+ def __init__(self, name, version, sky_id, integrity, integrity_algorithm):
+ self.name = name
+ self.version = version
+ self.sky_id = sky_id
+ self.integrity = integrity
+ self.integrity_algorithm = integrity_algorithm
+ self.tarball_path = "{}-{}.tgz".format(name, version)
+
+ def to_str(self):
+ return " ".join([self.name, self.version, self.sky_id, self.integrity, self.integrity_algorithm])
+
+
+class LockfilePackageMetaInvalidError(RuntimeError):
+ pass
+
+
+@add_metaclass(ABCMeta)
+class BaseLockfile(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: lockfile path
+ :type path: str
+ :rtype: BaseLockfile
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ @abstractmethod
+ def read(self):
+ pass
+
+ @abstractmethod
+ def write(self, path=None):
+ pass
+
+ @abstractmethod
+ def get_packages_meta(self):
+ pass
+
+ @abstractmethod
+ def update_tarball_resolutions(self, fn):
+ pass
diff --git a/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
new file mode 100644
index 0000000000..c835c4d7ca
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
@@ -0,0 +1,66 @@
+import os
+import tarfile
+
+from io import BytesIO
+
+from .utils import build_nm_path
+
+
+PEERS_DIR = ".peers"
+PEERS_INDEX = "index"
+
+
+def bundle_node_modules(build_root, peers, node_modules_path, bundle_path):
+ """
+ Creates node_modules bundle.
+ Bundle contains node_modules directory, peers' node_modules directories,
+ and index file with the list of added peers (\\n delimited).
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param peers: list of peers (arcadia root related)
+ :type peers: list of str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path, "w") as tf:
+ tf.add(node_modules_path, arcname=".")
+
+ # Peers' node_modules.
+ added_peers = []
+ for p in peers:
+ peer_nm_path = build_nm_path(os.path.join(build_root, p))
+ peer_bundled_nm_path = build_nm_path(os.path.join(PEERS_DIR, p))
+ if not os.path.isdir(peer_nm_path):
+ continue
+ tf.add(peer_nm_path, arcname=peer_bundled_nm_path)
+ added_peers.append(p)
+
+ # Peers index.
+ peers_index = "\n".join(added_peers)
+ ti = tarfile.TarInfo(name=os.path.join(PEERS_DIR, PEERS_INDEX))
+ ti.size = len(peers_index)
+ tf.addfile(ti, BytesIO(peers_index.encode()))
+
+
+def extract_node_modules(build_root, node_modules_path, bundle_path):
+ """
+ Extracts node_modules bundle.
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path) as tf:
+ tf.extractall(node_modules_path)
+
+ peers = open(os.path.join(node_modules_path, PEERS_DIR, PEERS_INDEX)).read().split("\n")
+ for p in peers:
+ if not p:
+ continue
+ bundled_nm_path = build_nm_path(os.path.join(node_modules_path, PEERS_DIR, p))
+ nm_path = build_nm_path(os.path.join(build_root, p))
+ os.rename(bundled_nm_path, nm_path)
diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py
new file mode 100644
index 0000000000..29d7b01203
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_json.py
@@ -0,0 +1,170 @@
+import os
+import json
+
+from six import iteritems
+
+from .utils import build_pj_path
+
+
+class PackageJsonWorkspaceError(RuntimeError):
+ pass
+
+
+class PackageJson(object):
+ DEP_KEY = "dependencies"
+ DEV_DEP_KEY = "devDependencies"
+ PEER_DEP_KEY = "peerDependencies"
+ OPT_DEP_KEY = "optionalDependencies"
+ DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY)
+
+ WORKSPACE_SCHEMA = "workspace:"
+
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: package.json path
+ :type path: str
+ :rtype: PackageJson
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ def read(self):
+ with open(self.path) as f:
+ self.data = json.load(f)
+
+ def write(self, path=None):
+ """
+ :param path: path to store package.json, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ directory = os.path.dirname(path)
+ if not os.path.exists(directory):
+ os.mkdir(directory)
+
+ with open(path, "w") as f:
+ json.dump(self.data, f, indent=4)
+
+ def get_name(self):
+ return self.data["name"]
+
+ def get_version(self):
+ return self.data["version"]
+
+ def get_description(self):
+ return self.data.get("description")
+
+ def get_nodejs_version(self):
+ return self.data.get("engines", {}).get("node")
+
+ def dependencies_iter(self):
+ for key in self.DEP_KEYS:
+ deps = self.data.get(key)
+ if not deps:
+ continue
+
+ for name, spec in iteritems(deps):
+ yield (name, spec)
+
+ def get_workspace_dep_spec_paths(self):
+ """
+ Returns names and paths from specifiers of the defined workspace dependencies.
+ :rtype: list of (str, str)
+ """
+ spec_paths = []
+ schema = self.WORKSPACE_SCHEMA
+ schema_len = len(schema)
+
+ for name, spec in self.dependencies_iter():
+ if not spec.startswith(schema):
+ continue
+
+ spec_path = spec[schema_len:]
+ if not (spec_path.startswith(".") or spec_path.startswith("..")):
+ raise PackageJsonWorkspaceError(
+ "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(
+ spec, name, self.path
+ )
+ )
+
+ spec_paths.append((name, spec_path))
+
+ return spec_paths
+
+ def get_workspace_dep_paths(self, base_path=None):
+ """
+ Returns paths of the defined workspace dependencies.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()]
+
+ def get_workspace_deps(self):
+ """
+ :rtype: list of PackageJson
+ """
+ ws_deps = []
+ pj_dir = os.path.dirname(self.path)
+
+ for name, rel_path in self.get_workspace_dep_spec_paths():
+ dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
+ dep_pj = PackageJson.load(build_pj_path(dep_path))
+
+ if name != dep_pj.get_name():
+ raise PackageJsonWorkspaceError(
+ "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(
+ name, dep_pj.get_name(), self.path
+ )
+ )
+
+ ws_deps.append(dep_pj)
+
+ return ws_deps
+
+ def get_workspace_map(self, ignore_self=False):
+ """
+ Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth.
+ :param ignore_self: whether path of the current module will be excluded
+ :type ignore_self: bool
+ :rtype: dict of (PackageJson, int)
+ """
+ ws_deps = {}
+ # list of (pj, depth)
+ pj_queue = [(self, 0)]
+
+ while len(pj_queue):
+ (pj, depth) = pj_queue.pop()
+ pj_dir = os.path.dirname(pj.path)
+ if pj_dir in ws_deps:
+ continue
+
+ if not ignore_self or pj != self:
+ ws_deps[pj_dir] = (pj, depth)
+
+ for dep_pj in pj.get_workspace_deps():
+ pj_queue.append((dep_pj, depth + 1))
+
+ return ws_deps
+
+ def get_dep_paths_by_names(self):
+ """
+ Returns dict of {dependency_name: dependency_path}
+ """
+ ws_map = self.get_workspace_map()
+ return {pj.get_name(): path for path, (pj, _) in ws_map.items()}
diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py
new file mode 100644
index 0000000000..d3da1d3579
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_manager.py
@@ -0,0 +1,141 @@
+import os
+import sys
+import subprocess
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+from .constants import NPM_REGISTRY_URL
+from .package_json import PackageJson
+from .utils import build_nm_path, build_pj_path
+
+
+class PackageManagerError(RuntimeError):
+ pass
+
+
+class PackageManagerCommandError(PackageManagerError):
+ def __init__(self, cmd, code, stdout, stderr):
+ self.cmd = cmd
+ self.code = code
+ self.stdout = stdout
+ self.stderr = stderr
+
+ msg = "package manager exited with code {} while running {}:\n{}\n{}".format(code, cmd, stdout, stderr)
+ super(PackageManagerCommandError, self).__init__(msg)
+
+
+@add_metaclass(ABCMeta)
+class BasePackageManager(object):
+ def __init__(self, build_root, build_path, sources_path, nodejs_bin_path, script_path, contribs_path, module_path=None, sources_root=None):
+ self.module_path = build_path[len(build_root) + 1:] if module_path is None else module_path
+ self.build_path = build_path
+ self.sources_path = sources_path
+ self.build_root = build_root
+ self.sources_root = sources_path[:-len(self.module_path) - 1] if sources_root is None else sources_root
+ self.nodejs_bin_path = nodejs_bin_path
+ self.script_path = script_path
+ self.contribs_path = contribs_path
+
+ @classmethod
+ def load_package_json(cls, path):
+ """
+ :param path: path to package.json
+ :type path: str
+ :rtype: PackageJson
+ """
+ return PackageJson.load(path)
+
+ @classmethod
+ def load_package_json_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with package.json
+ :type dir_path: str
+ :rtype: PackageJson
+ """
+ return cls.load_package_json(build_pj_path(dir_path))
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile(cls, path):
+ pass
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ pass
+
+ @abstractmethod
+ def create_node_modules(self):
+ pass
+
+ @abstractmethod
+ def calc_node_modules_inouts(self):
+ pass
+
+ @abstractmethod
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ pass
+
+ def get_local_peers_from_package_json(self):
+ """
+ Returns paths of direct workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ return self.load_package_json_from_dir(self.sources_path).get_workspace_dep_paths(base_path=self.module_path)
+
+ def get_peers_from_package_json(self):
+ """
+ Returns paths of workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+ prefix_len = len(self.sources_root) + 1
+
+ return [p[prefix_len:] for p in pj.get_workspace_map(ignore_self=True).keys()]
+
+ def _exec_command(self, args, include_defaults=True):
+ if not self.nodejs_bin_path:
+ raise PackageManagerError("Unable to execute command: nodejs_bin_path is not configured")
+
+ cmd = [self.nodejs_bin_path, self.script_path] + args + (self._get_default_options() if include_defaults else [])
+ p = subprocess.Popen(
+ cmd,
+ cwd=self.build_path,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = p.communicate()
+
+ if p.returncode != 0:
+ self._dump_debug_log()
+
+ raise PackageManagerCommandError(cmd, p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8"))
+
+ def _nm_path(self, *parts):
+ return os.path.join(build_nm_path(self.build_path), *parts)
+
+ def _contrib_tarball_path(self, pkg):
+ return os.path.join(self.contribs_path, pkg.tarball_path)
+
+ def _contrib_tarball_url(self, pkg):
+ return "file:" + self._contrib_tarball_path(pkg)
+
+ def _get_default_options(self):
+ return ["--registry", NPM_REGISTRY_URL]
+
+ def _get_debug_log_path(self):
+ return None
+
+ def _dump_debug_log(self):
+ log_path = self._get_debug_log_path()
+
+ if not log_path:
+ return
+
+ try:
+ with open(log_path) as f:
+ sys.stderr.write("Package manager log {}:\n{}\n".format(log_path, f.read()))
+ except Exception:
+ sys.stderr.write("Failed to dump package manager log {}.\n".format(log_path))
diff --git a/build/plugins/lib/nots/package_manager/base/tests/package_json.py b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
new file mode 100644
index 0000000000..42aab85b26
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
@@ -0,0 +1,152 @@
+import os
+import pytest
+
+from build.plugins.lib.nots.package_manager.base.package_json import PackageJson, PackageJsonWorkspaceError
+
+
+def test_get_workspace_dep_spec_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_spec_paths = pj.get_workspace_dep_spec_paths()
+
+ assert ws_dep_spec_paths == [
+ ("@yandex-int/bar", "../bar"),
+ ("@yandex-int/baz", "../baz"),
+ ]
+
+
+def test_get_workspace_dep_spec_paths_invalid_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:*",
+ },
+ }
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_dep_spec_paths()
+
+ assert str(e.value) == "Expected relative path specifier for workspace dependency, but got 'workspace:*' for @yandex-int/bar in /packages/foo/package.json"
+
+
+def test_get_workspace_dep_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths()
+
+ assert ws_dep_paths == [
+ "/packages/bar",
+ "/packages/baz",
+ ]
+
+
+def test_get_workspace_dep_paths_with_custom_base_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths(base_path="custom/dir")
+
+ assert ws_dep_paths == [
+ "custom/bar",
+ "custom/baz",
+ ]
+
+
+def test_get_workspace_deps_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ ws_deps = pj.get_workspace_deps()
+
+ assert len(ws_deps) == 2
+ assert ws_deps[0].path == "/packages/bar/package.json"
+ assert ws_deps[1].path == "/packages/baz/package.json"
+
+
+def test_get_workspace_deps_with_wrong_name():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@shouldbe/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_deps()
+
+ assert str(e.value) == "Workspace dependency name mismatch, found '@yandex-int/bar' instead of '@shouldbe/bar' in /packages/foo/package.json"
+
+
+def test_get_workspace_map_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ name = os.path.basename(os.path.dirname(path))
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(name),
+ "dependencies": ({"@yandex-int/qux": "workspace:../qux"} if name == "bar" else {}),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ ws_map = pj.get_workspace_map()
+
+ assert len(ws_map) == 3
+ assert ws_map["/packages/foo"][0].path == "/packages/foo/package.json"
+ assert ws_map["/packages/foo"][1] == 0
+ assert ws_map["/packages/bar"][0].path == "/packages/bar/package.json"
+ assert ws_map["/packages/bar"][1] == 1
+ assert ws_map["/packages/qux"][0].path == "/packages/qux/package.json"
+ assert ws_map["/packages/qux"][1] == 2
diff --git a/build/plugins/lib/nots/package_manager/base/tests/utils.py b/build/plugins/lib/nots/package_manager/base/tests/utils.py
new file mode 100644
index 0000000000..4287beec47
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/utils.py
@@ -0,0 +1,15 @@
+from build.plugins.lib.nots.package_manager.base import utils
+
+
+def test_extract_package_name_from_path():
+ happy_checklist = [
+ ("@yandex-int/foo-bar-baz/some/path/inside/the/package", "@yandex-int/foo-bar-baz"),
+ ("@yandex-int/foo-bar-buzz", "@yandex-int/foo-bar-buzz"),
+ ("package-wo-scope", "package-wo-scope"),
+ ("p", "p"),
+ ("", ""),
+ ]
+
+ for item in happy_checklist:
+ package_name = utils.extract_package_name_from_path(item[0])
+ assert package_name == item[1]
diff --git a/build/plugins/lib/nots/package_manager/base/tests/ya.make b/build/plugins/lib/nots/package_manager/base/tests/ya.make
new file mode 100644
index 0000000000..1bece69c33
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/ya.make
@@ -0,0 +1,14 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ package_json.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/base/utils.py b/build/plugins/lib/nots/package_manager/base/utils.py
new file mode 100644
index 0000000000..017bf4ca41
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/utils.py
@@ -0,0 +1,29 @@
+import os
+
+from .constants import PACKAGE_JSON_FILENAME, NODE_MODULES_DIRNAME, NODE_MODULES_BUNDLE_FILENAME
+
+
+def s_rooted(p):
+ return os.path.join("$S", p)
+
+
+def b_rooted(p):
+ return os.path.join("$B", p)
+
+
+def build_pj_path(p):
+ return os.path.join(p, PACKAGE_JSON_FILENAME)
+
+
+def build_nm_path(p):
+ return os.path.join(p, NODE_MODULES_DIRNAME)
+
+
+def build_nm_bundle_path(p):
+ return os.path.join(p, NODE_MODULES_BUNDLE_FILENAME)
+
+
+def extract_package_name_from_path(p):
+ # if we have scope prefix then we are using the first two tokens, otherwise - only the first one
+ parts = p.split("/", 2)
+ return "/".join(parts[:2]) if p.startswith("@") else parts[0]
diff --git a/build/plugins/lib/nots/package_manager/base/ya.make b/build/plugins/lib/nots/package_manager/base/ya.make
new file mode 100644
index 0000000000..4b7f22f05a
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/ya.make
@@ -0,0 +1,23 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ node_modules_bundler.py
+ package_json.py
+ package_manager.py
+ utils.py
+)
+
+PEERDIR(
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/__init__.py b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
new file mode 100644
index 0000000000..b3a3c20c02
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
@@ -0,0 +1,12 @@
+from . import constants
+from .lockfile import PnpmLockfile
+from .package_manager import PnpmPackageManager
+from .workspace import PnpmWorkspace
+
+
+__all__ = [
+ "constants",
+ "PnpmLockfile",
+ "PnpmPackageManager",
+ "PnpmWorkspace",
+]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/constants.py b/build/plugins/lib/nots/package_manager/pnpm/constants.py
new file mode 100644
index 0000000000..e84a78c55e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/constants.py
@@ -0,0 +1,2 @@
+PNPM_WS_FILENAME = "pnpm-workspace.yaml"
+PNPM_LOCKFILE_FILENAME = "pnpm-lock.yaml"
diff --git a/build/plugins/lib/nots/package_manager/pnpm/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
new file mode 100644
index 0000000000..19c748e55b
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
@@ -0,0 +1,162 @@
+import base64
+import binascii
+import yaml
+import os
+
+from six.moves.urllib import parse as urlparse
+from six import iteritems
+
+from ..base import PackageJson, BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+
+
+class PnpmLockfile(BaseLockfile):
+ IMPORTER_KEYS = PackageJson.DEP_KEYS + ("specifiers",)
+
+ def read(self):
+ with open(self.path, "r") as f:
+ self.data = yaml.load(f, Loader=yaml.CSafeLoader)
+
+ def write(self, path=None):
+ """
+ :param path: path to store lockfile, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ yaml.dump(self.data, f, Dumper=yaml.CSafeDumper)
+
+ def get_packages_meta(self):
+ """
+ Extracts packages meta from lockfile.
+ :rtype: list of LockfilePackageMeta
+ """
+ packages = self.data.get("packages", {})
+
+ return map(lambda x: _parse_package_meta(*x), iteritems(packages))
+
+ def update_tarball_resolutions(self, fn):
+ """
+ :param fn: maps `LockfilePackageMeta` instance to new `resolution.tarball` value
+ :type fn: lambda
+ """
+ packages = self.data.get("packages", {})
+
+ for key, meta in iteritems(packages):
+ meta["resolution"]["tarball"] = fn(_parse_package_meta(key, meta))
+ packages[key] = meta
+
+ def get_importers(self):
+ """
+ Returns "importers" section from the lockfile or creates similar structure from "dependencies" and "specifiers".
+ :rtype: dict of dict of dict of str
+ """
+ importers = self.data.get("importers")
+ if importers is not None:
+ return importers
+
+ importer = {k: self.data[k] for k in self.IMPORTER_KEYS if k in self.data}
+
+ return ({".": importer} if importer else {})
+
+ def merge(self, lf):
+ """
+ Merges two lockfiles:
+ 1. Converts the lockfile to monorepo-like lockfile with "importers" section instead of "dependencies" and "specifiers".
+ 2. Merges `lf`'s dependencies and specifiers to importers.
+ 3. Merges `lf`'s packages to the lockfile.
+ :param lf: lockfile to merge
+ :type lf: PnpmLockfile
+ """
+ importers = self.get_importers()
+ build_path = os.path.dirname(self.path)
+
+ for [importer, imports] in iteritems(lf.get_importers()):
+ importer_path = os.path.normpath(os.path.join(os.path.dirname(lf.path), importer))
+ importer_rel_path = os.path.relpath(importer_path, build_path)
+ importers[importer_rel_path] = imports
+
+ self.data["importers"] = importers
+
+ for k in self.IMPORTER_KEYS:
+ self.data.pop(k, None)
+
+ packages = self.data.get("packages", {})
+ for k, v in iteritems(lf.data.get("packages", {})):
+ if k not in packages:
+ packages[k] = v
+ self.data["packages"] = packages
+
+
+def _parse_package_meta(key, meta):
+ """
+ :param key: uniq package key from lockfile
+ :type key: string
+ :param meta: package meta dict from lockfile
+ :type meta: dict
+ :rtype: LockfilePackageMetaInvalidError
+ """
+ try:
+ name, version = _parse_package_key(key)
+ sky_id = _parse_sky_id_from_tarball_url(meta["resolution"]["tarball"])
+ integrity_algorithm, integrity = _parse_package_integrity(meta["resolution"]["integrity"])
+ except KeyError as e:
+ raise TypeError("Invalid package meta for key {}, missing {} key".format(key, e))
+ except LockfilePackageMetaInvalidError as e:
+ raise TypeError("Invalid package meta for key {}, parse error: {}".format(key, e))
+
+ return LockfilePackageMeta(name, version, sky_id, integrity, integrity_algorithm)
+
+
+def _parse_package_key(key):
+ """
+ Returns tuple of scoped package name and version.
+ :param key: package key in format "/({scope}/)?{package_name}/{package_version}(_{peer_dependencies})?"
+ :type key: string
+ :rtype: (str, str)
+ """
+ try:
+ tokens = key.split("/")[1:]
+ version = tokens.pop().split("_", 1)[0]
+
+ if len(tokens) < 1 or len(tokens) > 2:
+ raise TypeError()
+ except (IndexError, TypeError):
+ raise LockfilePackageMetaInvalidError("Invalid package key")
+
+ return ("/".join(tokens), version)
+
+
+def _parse_sky_id_from_tarball_url(tarball_url):
+ """
+ :param tarball_url: tarball url
+ :type tarball_url: string
+ :rtype: string
+ """
+ if tarball_url.startswith("file:"):
+ return ""
+
+ rbtorrent_param = urlparse.parse_qs(urlparse.urlparse(tarball_url).query).get("rbtorrent")
+
+ if rbtorrent_param is None:
+ raise LockfilePackageMetaInvalidError("Missing rbtorrent param in tarball url {}".format(tarball_url))
+
+ return "rbtorrent:{}".format(rbtorrent_param[0])
+
+
+def _parse_package_integrity(integrity):
+ """
+ Returns tuple of algorithm and hash (hex).
+ :param integrity: package integrity in format "{algo}-{base64_of_hash}"
+ :type integrity: string
+ :rtype: (str, str)
+ """
+ algo, hash_b64 = integrity.split("-", 1)
+
+ try:
+ hash_hex = binascii.hexlify(base64.b64decode(hash_b64))
+ except TypeError as e:
+ raise LockfilePackageMetaInvalidError("Invalid package integrity encoding, integrity: {}, error: {}".format(integrity, e))
+
+ return (algo, hash_hex)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
new file mode 100644
index 0000000000..5d41185336
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
@@ -0,0 +1,215 @@
+import os
+import yaml
+
+from six import iteritems
+
+from ..base import BasePackageManager, PackageManagerError
+from ..base.utils import build_pj_path, build_nm_path, build_nm_bundle_path, s_rooted, b_rooted
+from ..base.node_modules_bundler import bundle_node_modules
+from ..base.constants import NODE_MODULES_BUNDLE_FILENAME
+from .lockfile import PnpmLockfile
+from .workspace import PnpmWorkspace
+from .utils import build_lockfile_path, build_ws_config_path
+
+
+class PnpmPackageManager(BasePackageManager):
+ _STORE_NM_PATH = os.path.join(".pnpm", "store")
+ _VSTORE_NM_PATH = os.path.join(".pnpm", "virtual-store")
+ _STORE_VER = "v3"
+
+ @classmethod
+ def load_lockfile(cls, path):
+ """
+ :param path: path to lockfile
+ :type path: str
+ :rtype: PnpmLockfile
+ """
+ return PnpmLockfile.load(path)
+
+ @classmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with lockfile
+ :type dir_path: str
+ :rtype: PnpmLockfile
+ """
+ return cls.load_lockfile(build_lockfile_path(dir_path))
+
+ def create_node_modules(self):
+ """
+ Creates node_modules directory according to the lockfile.
+ """
+ ws = self._prepare_workspace()
+ self._exec_command(
+ [
+ "install",
+ "--offline",
+ "--frozen-lockfile",
+ "--public-hoist-pattern",
+ "",
+ "--store-dir",
+ self._nm_path(self._STORE_NM_PATH),
+ "--virtual-store-dir",
+ self._nm_path(self._VSTORE_NM_PATH),
+ "--no-verify-store-integrity",
+ "--package-import-method",
+ "hardlink",
+ "--ignore-pnpmfile",
+ "--ignore-scripts",
+ "--strict-peer-dependencies",
+ ]
+ )
+ self._fix_stores_in_modules_yaml()
+
+ bundle_node_modules(
+ build_root=self.build_root,
+ node_modules_path=self._nm_path(),
+ peers=ws.get_paths(base_path=self.module_path, ignore_self=True),
+ bundle_path=NODE_MODULES_BUNDLE_FILENAME,
+ )
+
+ def calc_node_modules_inouts(self):
+ """
+ Returns input and output paths for command that creates `node_modules` bundle.
+ Inputs:
+ - source package.json and lockfile,
+ - built package.jsons of all deps,
+ - merged lockfiles and workspace configs of direct non-leave deps,
+ - tarballs.
+ Outputs:
+ - merged lockfile,
+ - generated workspace config,
+ - created node_modules bundle.
+ :rtype: (list of str, list of str)
+ """
+ ins = [
+ s_rooted(build_pj_path(self.module_path)),
+ s_rooted(build_lockfile_path(self.module_path)),
+ ]
+ outs = [
+ b_rooted(build_lockfile_path(self.module_path)),
+ b_rooted(build_ws_config_path(self.module_path)),
+ b_rooted(build_nm_bundle_path(self.module_path)),
+ ]
+
+ # Source lockfiles are used only to get tarballs info.
+ src_lf_paths = [build_lockfile_path(self.sources_path)]
+ pj = self.load_package_json_from_dir(self.sources_path)
+
+ for [dep_src_path, (_, depth)] in iteritems(pj.get_workspace_map(ignore_self=True)):
+ dep_mod_path = dep_src_path[len(self.sources_root) + 1 :]
+ # pnpm requires all package.jsons.
+ ins.append(b_rooted(build_pj_path(dep_mod_path)))
+
+ dep_lf_src_path = build_lockfile_path(dep_src_path)
+ if not os.path.isfile(dep_lf_src_path):
+ # It is ok for leaves.
+ continue
+ src_lf_paths.append(dep_lf_src_path)
+
+ if depth == 1:
+ ins.append(b_rooted(build_ws_config_path(dep_mod_path)))
+ ins.append(b_rooted(build_lockfile_path(dep_mod_path)))
+
+ for pkg in self.extract_packages_meta_from_lockfiles(src_lf_paths):
+ ins.append(b_rooted(self._contrib_tarball_path(pkg)))
+
+ return (ins, outs)
+
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ """
+ :type lf_paths: iterable of BaseLockfile
+ :rtype: iterable of LockfilePackageMeta
+ """
+ tarballs = set()
+
+ for lf_path in lf_paths:
+ try:
+ for pkg in self.load_lockfile(lf_path).get_packages_meta():
+ if pkg.tarball_path not in tarballs:
+ tarballs.add(pkg.tarball_path)
+ yield pkg
+ except Exception as e:
+ raise PackageManagerError("Unable to process lockfile {}: {}".format(lf_path, e))
+
+ def _prepare_workspace(self):
+ """
+ :rtype: PnpmWorkspace
+ """
+ pj = self._build_package_json()
+ ws = PnpmWorkspace(build_ws_config_path(self.build_path))
+ ws.set_from_package_json(pj)
+ dep_paths = ws.get_paths(ignore_self=True)
+ self._build_merged_workspace_config(ws, dep_paths)
+ self._build_merged_lockfile(dep_paths)
+
+ return ws
+
+ def _build_package_json(self):
+ """
+ :rtype: PackageJson
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+
+ if not os.path.exists(self.build_path):
+ os.makedirs(self.build_path, exist_ok=True)
+
+ pj.path = build_pj_path(self.build_path)
+ pj.write()
+
+ return pj
+
+ def _build_merged_lockfile(self, dep_paths):
+ """
+ :type dep_paths: list of str
+ :rtype: PnpmLockfile
+ """
+ lf = self.load_lockfile_from_dir(self.sources_path)
+ # Change to the output path for correct path calcs on merging.
+ lf.path = build_lockfile_path(self.build_path)
+
+ for dep_path in dep_paths:
+ lf_path = build_lockfile_path(dep_path)
+ if os.path.isfile(lf_path):
+ lf.merge(self.load_lockfile(lf_path))
+
+ lf.update_tarball_resolutions(lambda p: self._contrib_tarball_url(p))
+ lf.write()
+
+ def _build_merged_workspace_config(self, ws, dep_paths):
+ """
+ NOTE: This method mutates `ws`.
+ :type ws: PnpmWorkspaceConfig
+ :type dep_paths: list of str
+ """
+ for dep_path in dep_paths:
+ ws_config_path = build_ws_config_path(dep_path)
+ if os.path.isfile(ws_config_path):
+ ws.merge(PnpmWorkspace.load(ws_config_path))
+
+ ws.write()
+
+ def _fix_stores_in_modules_yaml(self):
+ """
+ Ensures that store paths are the same as would be after installing deps in the source dir.
+ This is required to reuse `node_modules` after build.
+ """
+ with open(self._nm_path(".modules.yaml"), "r+") as f:
+ data = yaml.load(f, Loader=yaml.CSafeLoader)
+ # NOTE: pnpm requires absolute store path here.
+ data["storeDir"] = os.path.join(build_nm_path(self.sources_path), self._STORE_NM_PATH, self._STORE_VER)
+ data["virtualStoreDir"] = self._VSTORE_NM_PATH
+ f.seek(0)
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+ f.truncate()
+
+ def _get_default_options(self):
+ return super(PnpmPackageManager, self)._get_default_options() + [
+ "--stream",
+ "--reporter",
+ "append-only",
+ "--no-color",
+ ]
+
+ def _get_debug_log_path(self):
+ return self._nm_path(".pnpm-debug.log")
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py
new file mode 100644
index 0000000000..06315a4992
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py
@@ -0,0 +1,320 @@
+import pytest
+
+from build.plugins.lib.nots.package_manager.pnpm.lockfile import PnpmLockfile
+
+
+def test_lockfile_get_packages_meta_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.name == "@babel/cli"
+ assert pkg.version == "7.6.2"
+ assert pkg.sky_id == "rbtorrent:cb1849da3e4947e56a8f6bde6a1ec42703ddd187"
+ assert pkg.integrity == b"24367e4ff6ebf693df4f696600c272a490d34d31ccf5e3c3fc40f5d13463473255744572f89077891961cd8993b796243601efc561a55159cbb5dbfaaee883ad"
+ assert pkg.integrity_algorithm == "sha512"
+
+
+def test_lockfile_get_packages_empty():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {}
+
+ assert len(list(lf.get_packages_meta())) == 0
+
+
+def test_package_meta_invalid_key():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "in/valid": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key in/valid, parse error: Invalid package key"
+
+
+def test_package_meta_missing_resolution():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'resolution' key"
+
+
+def test_package_meta_missing_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {},
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'tarball' key"
+
+
+def test_package_meta_missing_rbtorrent():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {
+ "tarball": "valid-1.2.3.tgz",
+ },
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, parse error: Missing rbtorrent param in tarball url valid-1.2.3.tgz"
+
+
+def test_lockfile_meta_file_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "file:/some/abs/path.tgz",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.name == "@babel/cli"
+ assert pkg.version == "7.6.2"
+ assert pkg.sky_id == ""
+
+
+def test_lockfile_update_tarball_resolutions_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ lf.update_tarball_resolutions(lambda p: p.name)
+
+ assert lf.data["packages"]["/@babel/cli/7.6.2_@babel+core@7.6.2"]["resolution"]["tarball"] == "@babel/cli"
+
+
+def test_lockfile_merge():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf3 = PnpmLockfile(path="/another/baz/pnpm-lock.yaml")
+ lf3.data = {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+ lf4 = PnpmLockfile(path="/another/quux/pnpm-lock.yaml")
+ lf4.data = {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ }
+
+ lf1.merge(lf2)
+ lf1.merge(lf3)
+ lf1.merge(lf4)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ },
+ "../another/baz": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../another/qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ "../another/quux": {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+
+def test_lockfile_merge_dont_overrides_packages():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {
+ "overriden": True,
+ },
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf1.merge(lf2)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ },
+ }
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py
new file mode 100644
index 0000000000..5d11dd9e5d
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py
@@ -0,0 +1,68 @@
+from build.plugins.lib.nots.package_manager.base import PackageJson
+from build.plugins.lib.nots.package_manager.pnpm.workspace import PnpmWorkspace
+
+
+def test_workspace_get_paths():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_get_paths_with_custom_base_path_without_self():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths(base_path="some/custom/dir", ignore_self=True)) == [
+ "some/another/baz",
+ "some/custom/bar",
+ ]
+
+
+def test_workspace_set_from_package_json():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ pj = PackageJson(path="/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@a/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@a/baz": "workspace:../../another/baz",
+ },
+ "peerDependencies": {
+ "@a/qux": "workspace:../../another/qux",
+ },
+ "optionalDependencies": {
+ "@a/quux": "workspace:../../another/quux",
+ }
+ }
+
+ ws.set_from_package_json(pj)
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/another/quux",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_merge():
+ ws1 = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws1.packages = set([".", "../bar", "../../another/baz"])
+ ws2 = PnpmWorkspace(path="/another/baz/pnpm-workspace.yaml")
+ ws2.packages = set([".", "../qux"])
+
+ ws1.merge(ws2)
+
+ assert sorted(ws1.get_paths()) == [
+ "/another/baz",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
new file mode 100644
index 0000000000..44877dfc1b
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
@@ -0,0 +1,15 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ lockfile.py
+ workspace.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/pnpm/utils.py b/build/plugins/lib/nots/package_manager/pnpm/utils.py
new file mode 100644
index 0000000000..1fa4291b9d
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/utils.py
@@ -0,0 +1,11 @@
+import os
+
+from .constants import PNPM_LOCKFILE_FILENAME, PNPM_WS_FILENAME
+
+
+def build_lockfile_path(p):
+ return os.path.join(p, PNPM_LOCKFILE_FILENAME)
+
+
+def build_ws_config_path(p):
+ return os.path.join(p, PNPM_WS_FILENAME)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
new file mode 100644
index 0000000000..9df0d2de0c
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
@@ -0,0 +1,75 @@
+import os
+import yaml
+
+
+class PnpmWorkspace(object):
+ @classmethod
+ def load(cls, path):
+ ws = cls(path)
+ ws.read()
+
+ return ws
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ # NOTE: pnpm requires relative workspace paths.
+ self.packages = set()
+
+ def read(self):
+ with open(self.path) as f:
+ self.packages = set(yaml.load(f, Loader=yaml.CSafeLoader).get("packages", []))
+
+ def write(self, path=None):
+ if not path:
+ path = self.path
+
+ with open(path, "w") as f:
+ data = {
+ "packages": list(self.packages),
+ }
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+
+ def get_paths(self, base_path=None, ignore_self=False):
+ """
+ Returns absolute paths of the workspace packages.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :param ignore_self: whether path of the current module will be excluded (if present)
+ :type ignore_self: bool
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(base_path, pkg_path))
+ for pkg_path in self.packages if not ignore_self or pkg_path != "."]
+
+ def set_from_package_json(self, package_json):
+ """
+ Sets packages to "workspace" deps from given package.json.
+ :param package_json: package.json of workspace
+ :type package_json: PackageJson
+ """
+ if os.path.dirname(package_json.path) != os.path.dirname(self.path):
+ raise TypeError(
+ "package.json should be in workspace directory {}, given: {}".format(os.path.dirname(self.path), package_json.path))
+
+ self.packages = set(path for _, path in package_json.get_workspace_dep_spec_paths())
+ # Add relative path to self.
+ self.packages.add(".")
+
+ def merge(self, ws):
+ """
+ Adds `ws`'s packages to the workspace.
+ :param ws: workspace to merge
+ :type ws: PnpmWorkspace
+ """
+ dir_path = os.path.dirname(self.path)
+ ws_dir_path = os.path.dirname(ws.path)
+
+ for p_rel_path in ws.packages:
+ p_path = os.path.normpath(os.path.join(ws_dir_path, p_rel_path))
+ self.packages.add(os.path.relpath(p_path, dir_path))
diff --git a/build/plugins/lib/nots/package_manager/pnpm/ya.make b/build/plugins/lib/nots/package_manager/pnpm/ya.make
new file mode 100644
index 0000000000..f57ae4a2ba
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/ya.make
@@ -0,0 +1,24 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ package_manager.py
+ workspace.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ contrib/python/PyYAML
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/ya.make b/build/plugins/lib/nots/package_manager/ya.make
new file mode 100644
index 0000000000..3ac1ea9103
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/semver/__init__.py b/build/plugins/lib/nots/semver/__init__.py
new file mode 100644
index 0000000000..05dd8e9b5a
--- /dev/null
+++ b/build/plugins/lib/nots/semver/__init__.py
@@ -0,0 +1,5 @@
+from .semver import Version, Operator, VersionRange
+
+__all__ = [
+ "Version", "Operator", "VersionRange",
+]
diff --git a/build/plugins/lib/nots/semver/semver.py b/build/plugins/lib/nots/semver/semver.py
new file mode 100644
index 0000000000..d7dcc26c2c
--- /dev/null
+++ b/build/plugins/lib/nots/semver/semver.py
@@ -0,0 +1,228 @@
+import re
+
+
+class Version:
+ """
+ This class is intended to provide utility methods to work with semver ranges.
+ Right now it is limited to the simplest case: a ">=" operator followed by an exact version with no prerelease or build specification.
+ Example: ">= 1.2.3"
+ """
+
+ @classmethod
+ def from_str(cls, input):
+ """
+ :param str input: save exact formatted version e.g. 1.2.3
+ :rtype: Version
+ :raises: ValueError
+ """
+ parts = input.strip().split(".", 2)
+ major = int(parts[0])
+ minor = int(parts[1])
+ patch = int(parts[2])
+
+ return cls(major, minor, patch)
+
+ STABLE_VERSION_RE = re.compile(r'^\d+\.\d+\.\d+$')
+
+ @classmethod
+ def is_stable(cls, v):
+ """
+ Verifies that the version is in a supported format.
+
+ :param v:string with the version
+ :return: bool
+ """
+ return cls.STABLE_VERSION_RE.match(v) is not None
+
+ @classmethod
+ def cmp(cls, a, b):
+ """
+ Compare two versions. Should be used with "cmp_to_key" wrapper in sorted(), min(), max()...
+
+ For example:
+ sorted(["1.2.3", "2.4.2", "1.2.7"], key=cmp_to_key(Version.cmp))
+
+ :param a:string with version or Version instance
+ :param b:string with version or Version instance
+ :return: int
+ :raises: ValueError
+ """
+ a_version = a if isinstance(a, cls) else cls.from_str(a)
+ b_version = b if isinstance(b, cls) else cls.from_str(b)
+
+ if a_version > b_version:
+ return 1
+ elif a_version < b_version:
+ return -1
+ else:
+ return 0
+
+ __slots__ = ("_values")
+
+ def __init__(self, major, minor, patch):
+ """
+ :param int major
+ :param int minor
+ :param int patch
+ :raises ValueError
+ """
+ version_parts = {
+ "major": major,
+ "minor": minor,
+ "patch": patch,
+ }
+
+ for name, value in version_parts.items():
+ value = int(value)
+ version_parts[name] = value
+ if value < 0:
+ raise ValueError(
+ "{!r} is negative. A version can only be positive.".format(name)
+ )
+
+ self._values = (version_parts["major"], version_parts["minor"], version_parts["patch"])
+
+ def __str__(self):
+ return "{}.{}.{}".format(self._values[0], self._values[1], self._values[2])
+
+ def __repr__(self):
+ return '<Version({})>'.format(self)
+
+ def __eq__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() == other.as_tuple()
+
+ def __ne__(self, other):
+ return self.as_tuple() != other.as_tuple()
+
+ def __gt__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() > other.as_tuple()
+
+ def __ge__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() >= other.as_tuple()
+
+ def __lt__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() < other.as_tuple()
+
+ def __le__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() <= other.as_tuple()
+
+ @property
+ def major(self):
+ """The major part of the version (read-only)."""
+ return self._values[0]
+
+ @major.setter
+ def major(self, value):
+ raise AttributeError("Attribute 'major' is readonly")
+
+ @property
+ def minor(self):
+ """The minor part of the version (read-only)."""
+ return self._values[1]
+
+ @minor.setter
+ def minor(self, value):
+ raise AttributeError("Attribute 'minor' is readonly")
+
+ @property
+ def patch(self):
+ """The patch part of the version (read-only)."""
+ return self._values[2]
+
+ @patch.setter
+ def patch(self, value):
+ raise AttributeError("Attribute 'patch' is readonly")
+
+ def as_tuple(self):
+ """
+ :rtype: tuple
+ """
+ return self._values
+
+
+class Operator:
+ EQ = "="
+ GT = ">"
+ GE = ">="
+ LT = "<"
+ LE = "<="
+
+
+class VersionRange:
+ @classmethod
+ def from_str(cls, input):
+ """
+ :param str input
+ :rtype: VersionRange
+ :raises: ValueError
+ """
+ parts = input.strip().split(Operator.GE) # the only supported range operator at the moment
+
+ if len(parts) != 2 or parts[0] != "":
+ raise ValueError("Unsupported version range: '{}'. Currently we only support ranges formatted like so: '>= 1.2.3'".format(input))
+
+ version = Version.from_str(parts[1])
+
+ return cls(Operator.GE, version)
+
+ __slots__ = ("_operator", "_version")
+
+ def __init__(self, operator, version):
+ """
+ :param str operator
+ :raises: ValueError
+ """
+ if operator != Operator.GE:
+ raise ValueError("Unsupported range operator '{}'".format(operator))
+
+ self._operator = operator
+ self._version = version
+
+ @property
+ def operator(self):
+ """The comparison operator to be used (read-only)."""
+ return self._operator
+
+ @operator.setter
+ def operator(self, value):
+ raise AttributeError("Attribute 'operator' is readonly")
+
+ @property
+ def version(self):
+ """Version to be used with the operator (read-only)."""
+ return self._version
+
+ @version.setter
+ def version(self, value):
+ raise AttributeError("Attribute 'version' is readonly")
+
+ def is_satisfied_by(self, version):
+ """
+ :param Version version
+ :rtype: bool
+ :raises: ValueError
+ """
+ if self._operator != Operator.GE:
+ raise ValueError("Unsupported operator '{}'".format(self._operator))
+
+ return version >= self._version
diff --git a/build/plugins/lib/nots/semver/tests/test_version.py b/build/plugins/lib/nots/semver/tests/test_version.py
new file mode 100644
index 0000000000..0fa49ea15b
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/test_version.py
@@ -0,0 +1,242 @@
+from functools import cmp_to_key
+
+from build.plugins.lib.nots.semver import Version
+
+
+def test_from_str():
+ # arrange
+ version_str = "1.2.3"
+
+ # act
+ version = Version.from_str(version_str)
+
+ # assert
+ assert version.major == 1
+ assert version.minor == 2
+ assert version.patch == 3
+
+
+def test_from_str_bad_version():
+ # arrange
+ version_str = "best version imaginable"
+ error = None
+
+ # act
+ try:
+ Version.from_str(version_str)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert error is not None
+
+
+def test_is_stable_true():
+ # arrange
+ version_str = "1.2.3"
+
+ # act + assert
+ assert Version.is_stable(version_str)
+
+
+def test_is_stable_false():
+ # arrange
+ version_str = "1.2.3-beta1"
+
+ # act + assert
+ assert not Version.is_stable(version_str)
+
+
+def test_is_stable_incorrect():
+ # arrange
+ version_str = "v1.2.3"
+
+ # act + assert
+ assert not Version.is_stable(version_str)
+
+
+def test_cmp_lt():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.5")
+
+ # act + assert
+ assert Version.cmp(a, b) == -1
+
+
+def test_cmp_gt():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.2")
+
+ # act + assert
+ assert Version.cmp(a, b) == 1
+
+
+def test_cmp_eq():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert Version.cmp(a, b) == 0
+
+
+def test_cmp_lt_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.5"
+
+ # act + assert
+ assert Version.cmp(a, b) == -1
+
+
+def test_cmp_gt_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.2"
+
+ # act + assert
+ assert Version.cmp(a, b) == 1
+
+
+def test_cmp_eq_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.3"
+
+ # act + assert
+ assert Version.cmp(a, b) == 0
+
+
+def test_cmp_usage_in_sorted_asc():
+ # arrange
+ unsorted = ["1.2.3", "2.4.2", "1.2.7"]
+
+ # act + assert
+ assert sorted(unsorted, key=cmp_to_key(Version.cmp)) == ["1.2.3", "1.2.7", "2.4.2"]
+
+
+def test_cmp_usage_in_sorted_desc():
+ # arrange
+ unsorted = ["1.2.3", "2.4.2", "1.2.7"]
+
+ # act + assert
+ assert sorted(unsorted, key=cmp_to_key(Version.cmp), reverse=True) == ["2.4.2", "1.2.7", "1.2.3"]
+
+
+def test_init_negative_numbers():
+ # arrange
+ major = 1
+ minor = -2
+ patch = 3
+
+ error = None
+
+ # act
+ try:
+ Version(major, minor, patch)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert isinstance(error, ValueError)
+ assert str(error) == "'minor' is negative. A version can only be positive."
+
+
+def test_eq():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a == version_b
+
+
+def test_eq_negative():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert not version_a == version_b
+
+
+def test_ne():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a != version_b
+
+
+def test_ne_negative():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert not version_a != version_b
+
+
+def test_gt():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a > version_b
+
+
+def test_ge_equals():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a >= version_b
+
+
+def test_ge_exceeds():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a >= version_b
+
+
+def test_lt():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert version_a < version_b
+
+
+def test_le_equals():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a <= version_b
+
+
+def test_le_is_less():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert version_a <= version_b
+
+
+def test_to_tuple():
+ # arrange
+ version = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version.as_tuple() == (1, 2, 3)
diff --git a/build/plugins/lib/nots/semver/tests/test_version_range.py b/build/plugins/lib/nots/semver/tests/test_version_range.py
new file mode 100644
index 0000000000..b2a5e556b5
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/test_version_range.py
@@ -0,0 +1,69 @@
+from build.plugins.lib.nots.semver import Version, Operator, VersionRange
+
+
+def test_from_str():
+ # arrange
+ range_str = ">= 1.2.3"
+
+ # act
+ range = VersionRange.from_str(range_str)
+
+ # assert
+ assert isinstance(range, VersionRange)
+ assert range.operator == Operator.GE
+
+
+def test_from_str_no_operator():
+ # arrange
+ range_str = r"¯\_(ツ)_/¯"
+ error = None
+
+ # act
+ try:
+ VersionRange.from_str(range_str)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert isinstance(error, ValueError)
+ assert str(error) == "Unsupported version range: '{}'. Currently we only support ranges formatted like so: '>= 1.2.3'".format(range_str)
+
+
+def test_init():
+ # arrange
+ operator = Operator.GE
+ version = Version.from_str("1.2.3")
+
+ # act
+ range = VersionRange(operator, version)
+
+ # assert
+ assert range.operator == Operator.GE
+ assert range.version == Version(1, 2, 3)
+
+
+def test_is_satisfied_by_starts_with():
+ # arrange
+ version = Version.from_str("1.2.3")
+ range = VersionRange.from_str(">= 1.2.3")
+
+ # act + assert
+ assert range.is_satisfied_by(version)
+
+
+def test_is_satisfied_by_includes():
+ # arrange
+ version = Version.from_str("5.8.2")
+ range = VersionRange.from_str(">= 1.2.3")
+
+ # act + assert
+ assert range.is_satisfied_by(version)
+
+
+def test_is_satisfied_by_not_includes():
+ # arrange
+ version = Version.from_str("1.2.2")
+ range = VersionRange.from_str(">= 1.2.3")
+
+ # act + assert
+ assert not range.is_satisfied_by(version)
diff --git a/build/plugins/lib/nots/semver/tests/ya.make b/build/plugins/lib/nots/semver/tests/ya.make
new file mode 100644
index 0000000000..b7605505f3
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/ya.make
@@ -0,0 +1,14 @@
+PY3TEST()
+
+OWNER(g:frontend-build-platform)
+
+PEERDIR(
+ build/plugins/lib/nots/semver
+)
+
+TEST_SRCS(
+ test_version_range.py
+ test_version.py
+)
+
+END()
diff --git a/build/plugins/lib/nots/semver/ya.make b/build/plugins/lib/nots/semver/ya.make
new file mode 100644
index 0000000000..7d2be228f2
--- /dev/null
+++ b/build/plugins/lib/nots/semver/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ semver.py
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/typescript/__init__.py b/build/plugins/lib/nots/typescript/__init__.py
new file mode 100644
index 0000000000..95f458ac5f
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/__init__.py
@@ -0,0 +1,9 @@
+from .ts_config import TsConfig
+from .ts_errors import TsError, TsValidationError
+
+
+__all__ = [
+ "TsConfig",
+ "TsError",
+ "TsValidationError",
+]
diff --git a/build/plugins/lib/nots/typescript/tests/ts_config.py b/build/plugins/lib/nots/typescript/tests/ts_config.py
new file mode 100644
index 0000000000..4b8fd675b3
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/ts_config.py
@@ -0,0 +1,86 @@
+import pytest
+
+from build.plugins.lib.nots.typescript import TsConfig, TsValidationError
+
+
+def test_ts_config_validate_valid():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "outDir": "./build",
+ },
+ }
+
+ cfg.validate()
+
+
+def test_ts_config_validate_empty():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ ]
+
+
+def test_ts_config_validate_invalid_common():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "preserveSymlinks": True,
+ "rootDirs": [],
+ "outFile": "./foo.js",
+ },
+ "references": [],
+ "files": [],
+ "include": [],
+ "exclude": [],
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ "'outFile' option is not supported",
+ "'preserveSymlinks' option is not supported due to pnpm limitations",
+ "'rootDirs' option is not supported, relative imports should have single root",
+ "'files' option is not supported, use 'include'",
+ "composite builds are not supported, use peerdirs in ya.make instead of 'references' option",
+ ]
+
+
+def test_ts_config_validate_invalid_subdirs():
+ cfg = TsConfig(path="/foo/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "/bar/src",
+ "outDir": "../bar/build",
+ },
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'outDir' should be a subdirectory of the module",
+ ]
+
+
+def test_ts_config_compiler_options():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ assert cfg.compiler_option("invalid") is None
+
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "src",
+ },
+ }
+
+ assert cfg.compiler_option("rootDir") == "src"
diff --git a/build/plugins/lib/nots/typescript/tests/ya.make b/build/plugins/lib/nots/typescript/tests/ya.make
new file mode 100644
index 0000000000..44798138bc
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/ya.make
@@ -0,0 +1,13 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ ts_config.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/nots/typescript/ts_config.py b/build/plugins/lib/nots/typescript/ts_config.py
new file mode 100644
index 0000000000..e3855b5f08
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_config.py
@@ -0,0 +1,300 @@
+import copy
+import os
+import json
+
+from .ts_errors import TsError, TsValidationError
+
+from ..package_manager.base import utils
+
+DEFAULT_TS_CONFIG_FILE = "tsconfig.json"
+
+
+def merge_dicts(d1, d2):
+ """
+ Merges two dicts recursively assuming that both have similar structure.
+ If d1.x.y.z has different type than d2.x.y.z then d2 will override d1 and result value res.x.y.z == d2.x.y.z.
+ If corresponding values are lists then the result will have a sum of those lists.
+ """
+ if isinstance(d1, dict) and isinstance(d2, dict):
+ for k in d2:
+ d1[k] = merge_dicts(d1[k], d2[k]) if k in d1 else d2[k]
+ else:
+ if isinstance(d1, list) and isinstance(d2, list):
+ return d1 + d2
+ else:
+ return d2
+ return d1
+
+
+class TsConfig(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: tsconfig.json path
+ :type path: str
+ :rtype: TsConfig
+ """
+ tsconfig = cls(path)
+ tsconfig.read()
+
+ return tsconfig
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = {}
+
+ def read(self):
+ try:
+ with open(self.path) as f:
+ self.data = json.load(f)
+ except Exception as e:
+ raise TsError("Failed to read tsconfig {}: {}".format(self.path, e))
+
+ def merge(self, rel_path, base_tsconfig):
+ """
+ :param rel_path: relative path to the configuration file we are merging in.
+ It is required to set the relative paths correctly.
+ :type rel_path: str
+ :param base_tsconfig: base TsConfig we are merging with our TsConfig instance
+ :type base_tsconfig: dict
+ """
+ if not base_tsconfig.data:
+ return
+
+ def relative_path(p):
+ return os.path.normpath(os.path.join(rel_path, p))
+
+ base_config_data = copy.deepcopy(base_tsconfig.data)
+
+ parameter_section_labels = ["compilerOptions", "typeAcquisition", "watchOptions"]
+ for opt_label in parameter_section_labels:
+ base_options = base_config_data.get(opt_label)
+ if not base_options:
+ continue
+
+ new_options = self.data.get(opt_label)
+ for key in base_options:
+ val = base_options[key]
+
+ # lists of paths
+ if key in ["extends", "outDir", "rootDir", "baseUrl", "include"]:
+ val = relative_path(val)
+
+ # path string
+ elif key in ["rootDirs", "excludeDirectories", "excludeFiles"]:
+ val = map(relative_path, val)
+
+ # dicts having paths as values
+ elif key in ["paths"]:
+ new_paths = new_options.get(key)
+ val = map(relative_path, val) + (new_paths if new_paths else [])
+
+ base_options[key] = val
+
+ if new_options and base_options:
+ base_options.update(new_options)
+ self.data[opt_label] = base_options
+
+ base_config_data.update(self.data)
+ self.data = base_config_data
+
+ def inline_extend(self, dep_paths):
+ """
+ Merges the tsconfig parameters from configuration file referred by "extends" if any.
+ Relative paths are adjusted, current parameter values are prioritized higer than
+ those coming from extension file (according to TSC mergin rules).
+ Returns list of file paths for config files merged into the current configuration
+ :param dep_paths: dict of dependency names to their paths
+ :type dep_paths: dict
+ :rtype: list of str
+ """
+ ext_value = self.data.get("extends")
+ if not ext_value:
+ return []
+
+ if ext_value.startswith("."):
+ base_config_path = ext_value
+
+ else:
+ dep_name = utils.extract_package_name_from_path(ext_value)
+ # the rest part is the ext config path
+ file_path_start = len(dep_name) + 1
+ file_path = ext_value[file_path_start:]
+ dep_path = dep_paths.get(dep_name)
+ if dep_path is None:
+ raise Exception(
+ "referenceing from {}, data: {}\n: Dependency '{}' not found in dep_paths: {}".format(
+ self.path, str(self.data), dep_name, dep_paths
+ )
+ )
+ base_config_path = os.path.join(dep_path, file_path)
+
+ rel_path = os.path.dirname(base_config_path)
+ tsconfig_curdir_path = os.path.join(os.path.dirname(self.path), base_config_path)
+ if os.path.isdir(tsconfig_curdir_path):
+ base_config_path = os.path.join(base_config_path, DEFAULT_TS_CONFIG_FILE)
+
+ # processing the base file recursively
+ base_config = TsConfig.load(os.path.join(os.path.dirname(self.path), base_config_path))
+ paths = [base_config_path] + base_config.inline_extend(dep_paths)
+
+ self.merge(rel_path, base_config)
+ del self.data["extends"]
+
+ return paths
+
+ def get_or_create_compiler_options(self):
+ """
+ Returns ref to the "compilerOptions" dict.
+ :rtype: dict
+ """
+ opts = self.data.get("compilerOptions")
+ if opts is None:
+ opts = {}
+ self.data["compilerOptions"] = opts
+
+ return opts
+
+ def prepend_include(self, value):
+ """
+ Prepends `value` to `include` list
+ :param value: value to prepend
+ :type value: str
+ """
+ includeList = self.data.get("include")
+ self.data["include"] = [value] + includeList
+
+ def compiler_option(self, name, default=None):
+ """
+ :param name: option key
+ :type name: str
+ :param default: default value
+ :type default: mixed
+ :rtype: mixed
+ """
+ return self.get_or_create_compiler_options().get(name, default)
+
+ def add_to_compiler_option(self, name, add_value):
+ """
+ Merges the existing value with add_value for the option with label=name.
+ Merge is done recursively if the value is of a dict instance.
+ :param name: option key
+ :type name: str
+ :param value: option value to set
+ :type value: mixed
+ """
+ default_value = {} if isinstance(add_value, dict) else []
+ opts = self.get_or_create_compiler_options()
+ opts[name] = merge_dicts(opts.get(name, default_value), add_value)
+
+ def inject_plugin(self, plugin):
+ """
+ :param plugin: plugin dict (ts-patch compatible, see https://github.com/nonara/ts-patch)
+ :type plugin: dict of str
+ """
+ opts = self.get_or_create_compiler_options()
+ if not opts.get("plugins"):
+ opts["plugins"] = []
+ opts["plugins"].append(plugin)
+
+ def validate(self):
+ """
+ Checks whether the config is compatible with current toolchain.
+ """
+ opts = self.get_or_create_compiler_options()
+ errors = []
+ root_dir = opts.get("rootDir")
+ out_dir = opts.get("outDir")
+ config_dir = os.path.dirname(self.path)
+
+ def is_mod_subdir(p):
+ return not os.path.isabs(p) and os.path.normpath(os.path.join(config_dir, p)).startswith(config_dir)
+
+ if root_dir is None:
+ errors.append("'rootDir' option is required")
+
+ if out_dir is None:
+ errors.append("'outDir' option is required")
+ elif not is_mod_subdir(out_dir):
+ errors.append("'outDir' should be a subdirectory of the module")
+
+ if opts.get("outFile") is not None:
+ errors.append("'outFile' option is not supported")
+
+ if opts.get("preserveSymlinks"):
+ errors.append("'preserveSymlinks' option is not supported due to pnpm limitations")
+
+ if opts.get("rootDirs") is not None:
+ errors.append("'rootDirs' option is not supported, relative imports should have single root")
+
+ if self.data.get("files") is not None:
+ errors.append("'files' option is not supported, use 'include'")
+
+ if self.data.get("references") is not None:
+ errors.append("composite builds are not supported, use peerdirs in ya.make instead of 'references' option")
+
+ if len(errors):
+ raise TsValidationError(self.path, errors)
+
+ def transform_paths(self, build_path, sources_path, package_rel_path, nodejs_bin_path):
+ """
+ Updates config with correct abs paths.
+ All source files/dirs will be mapped to `sources_path`, output files/dirs will be mapped to `build_path`.
+ :param build_path: module's build root
+ :type build_path: str
+ :param sources_path: module's source root
+ :type sources_path: str
+ :param package_rel_path: module's rel path to package root
+ :type package_rel_path: str
+ """
+ opts = self.get_or_create_compiler_options()
+
+ def sources_path_rel(x):
+ return os.path.normpath(os.path.join(sources_path, x))
+
+ def build_path_rel(x):
+ return os.path.normpath(os.path.join(build_path, x))
+
+ root_dir = opts["rootDir"]
+ out_dir = opts["outDir"]
+
+ opts["rootDir"] = sources_path_rel(root_dir)
+ opts["outDir"] = build_path_rel(out_dir)
+
+ if opts.get("typeRoots"):
+ opts["typeRoots"] = list(map(sources_path_rel, opts["typeRoots"])) + list(
+ map(build_path_rel, opts["typeRoots"])
+ )
+
+ opts["baseUrl"] = os.path.normpath(os.path.join(package_rel_path, "node_modules"))
+
+ include_dir_list = self.data.get("include")
+ if include_dir_list:
+ self.data["include"] = list(map(sources_path_rel, include_dir_list))
+
+ exclude_dir_list = self.data.get("exclude")
+ if exclude_dir_list:
+ self.data["exclude"] = list(map(sources_path_rel, exclude_dir_list))
+
+ if opts.get("sourceMap"):
+ opts["sourceRoot"] = os.path.relpath(root_dir, out_dir)
+
+ opts["skipLibCheck"] = True
+
+ node_types_path = os.path.join(os.path.dirname(nodejs_bin_path), "node_modules", "@types", "node")
+ # See: https://st.yandex-team.ru/FBP-47#62b4750775525b18f08205c7
+ self.add_to_compiler_option("paths", {"*": ["*", "./@types/*", node_types_path]})
+
+ def write(self, path=None, indent=None):
+ """
+ :param path: tsconfig path, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ json.dump(self.data, f, indent=indent)
diff --git a/build/plugins/lib/nots/typescript/ts_errors.py b/build/plugins/lib/nots/typescript/ts_errors.py
new file mode 100644
index 0000000000..105851d9ec
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_errors.py
@@ -0,0 +1,10 @@
+class TsError(RuntimeError):
+ pass
+
+
+class TsValidationError(TsError):
+ def __init__(self, path, errors):
+ self.path = path
+ self.errors = errors
+
+ super(TsValidationError, self).__init__("Invalid tsconfig {}:\n{}".format(path, "\n".join(errors)))
diff --git a/build/plugins/lib/nots/typescript/ya.make b/build/plugins/lib/nots/typescript/ya.make
new file mode 100644
index 0000000000..8847f9bbd3
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ya.make
@@ -0,0 +1,19 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ ts_errors.py
+ ts_config.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/ya.make b/build/plugins/lib/nots/ya.make
new file mode 100644
index 0000000000..b24c534033
--- /dev/null
+++ b/build/plugins/lib/nots/ya.make
@@ -0,0 +1,15 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+ build/plugins/lib/nots/semver
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/test_const/__init__.py b/build/plugins/lib/test_const/__init__.py
new file mode 100644
index 0000000000..a3229bad25
--- /dev/null
+++ b/build/plugins/lib/test_const/__init__.py
@@ -0,0 +1,522 @@
+# coding: utf-8
+import re
+
+
+RESTART_TEST_INDICATOR = '##restart-test##'
+INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
+
+RESTART_TEST_INDICATORS = [
+ RESTART_TEST_INDICATOR,
+ "network error",
+]
+
+UID_PREFIX_DELIMITER = '-'
+
+# testing
+BIN_DIRECTORY = 'bin'
+CANON_DATA_DIR_NAME = "canondata"
+CANON_RESULT_FILE_NAME = "result.json"
+CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
+COMMON_CONTEXT_FILE_NAME = "common_test.context"
+CONSOLE_SNIPPET_LIMIT = 5000
+FAKE_OUTPUT_EXTS = frozenset([".mf", ".fake", ".cpf", ".cpsf"])
+LIST_NODE_LOG_FILE = "test_list.log"
+LIST_NODE_RESULT_FILE = "test_list.json"
+LIST_RESULT_NODE_LOG_FILE = "list_result.log"
+LIST_TRACE_FILE_NAME = "ytest_list.report.trace"
+MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
+MAX_TEST_RESTART_COUNT = 3
+NO_LISTED_TESTS = "NO_LISTED_TESTS"
+REPORT_SNIPPET_LIMIT = 12000
+SANITIZER_ERROR_RC = 100
+SUITE_CONTEXT_FILE_NAME = "test.context"
+TEST_LIST_FILE = "test_names_list.json"
+TEST_SUBTEST_SEPARATOR = '::'
+TESTING_OUT_DIR_NAME = "testing_out_stuff"
+TESTING_OUT_RAM_DRIVE_DIR_NAME = "ram_drive_output"
+TESTING_OUT_TAR_NAME = TESTING_OUT_DIR_NAME + ".tar.zstd"
+TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+TRACE_FILE_NAME = "ytest.report.trace"
+TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, SUITE_CONTEXT_FILE_NAME, "run_test.log"}
+YT_RUN_TEST_DIR_NAME = "yt_run_test"
+YT_RUN_TEST_TAR_NAME = "yt_run_test.tar"
+COVERAGE_CFLAGS = ["-fprofile-instr-generate", "-fcoverage-mapping", "-DCLANG_COVERAGE"]
+COVERAGE_LDFLAGS = ["-fprofile-instr-generate", "-fcoverage-mapping"]
+
+MANDATORY_ENV_VAR_NAME = 'YA_MANDATORY_ENV_VARS'
+
+BUILD_FLAGS_ALLOWED_IN_CONTEXT = {
+ 'AUTOCHECK',
+ # Required for local test runs
+ 'TESTS_REQUESTED',
+ 'USE_ARCADIA_PYTHON',
+ 'USE_SYSTEM_PYTHON',
+}
+
+STYLE_TEST_TYPES = [
+ "classpath.clash",
+ "clang_tidy",
+ "eslint",
+ "flake8.py2",
+ "flake8.py3",
+ "gofmt",
+ "govet",
+ "java.style",
+ "ktlint",
+ "custom_lint",
+]
+
+REGULAR_TEST_TYPES = [
+ "benchmark",
+ "boost_test",
+ "exectest",
+ "fuzz",
+ "g_benchmark",
+ "go_bench",
+ "go_test",
+ "gtest",
+ "hermione",
+ "java",
+ "jest",
+ "py2test",
+ "py3test",
+ "pytest",
+ "unittest",
+]
+
+TEST_NODE_OUTPUT_RESULTS = [TESTING_OUT_TAR_NAME, YT_RUN_TEST_TAR_NAME]
+
+# kvm
+DEFAULT_RAM_REQUIREMENTS_FOR_KVM = 4
+MAX_RAM_REQUIREMENTS_FOR_KVM = 16
+
+# distbuild
+DISTBUILD_STATUS_REPORT_ENV_NAME = 'NODE_EXTENDED_STATUS_FILE_PATH'
+DEFAULT_TEST_NODE_TIMEOUT = 15 * 60
+TEST_NODE_FINISHING_TIME = 5 * 60
+
+# coverage
+COVERAGE_FUNCTION_ENTRIES_LIMIT = 2
+COVERAGE_PYTHON_EXTS = (".py", ".pyx", ".pxi", ".pxd")
+
+COVERAGE_RESOLVED_FILE_NAME_PATTERN = "coverage_resolved.{}.json"
+CPP_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("cpp")
+GO_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("go")
+JAVA_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("java")
+NLG_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("nlg")
+PYTHON2_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("py2")
+PYTHON3_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("py3")
+TS_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("ts")
+
+COVERAGE_CLANG_ENV_NAME = 'LLVM_PROFILE_FILE'
+COVERAGE_GCOV_ENV_NAME = 'GCOV_PREFIX'
+COVERAGE_GO_ENV_NAME = 'GO_COVERAGE_PREFIX'
+COVERAGE_PYTHON_ENV_NAME = 'PYTHON_COVERAGE_PREFIX'
+COVERAGE_TS_ENV_NAME = 'TS_COVERAGE_PREFIX'
+COVERAGE_NLG_ENV_NAME = 'NLG_COVERAGE_FILENAME'
+COVERAGE_ENV_VARS = (
+ COVERAGE_CLANG_ENV_NAME,
+ COVERAGE_GCOV_ENV_NAME,
+ COVERAGE_GO_ENV_NAME,
+ COVERAGE_NLG_ENV_NAME,
+ COVERAGE_PYTHON_ENV_NAME,
+ COVERAGE_TS_ENV_NAME,
+)
+PYTHON_COVERAGE_PREFIX_FILTER_ENV_NAME = 'PYTHON_COVERAGE_PREFIX_FILTER'
+PYTHON_COVERAGE_EXCLUDE_REGEXP_ENV_NAME = 'PYTHON_COVERAGE_EXCLUDE_REGEXP'
+
+CLANG_COVERAGE_TEST_TYPES = (
+ "boost_test",
+ "coverage_extractor",
+ "exectest",
+ "gtest",
+ # java tests might use shared libraries
+ "java",
+ "py2test",
+ "py3test",
+ "pytest",
+ "unittest",
+)
+COVERAGE_TABLE_CHUNKS = 20
+COVERAGE_TESTS_TIMEOUT_FACTOR = 1.5
+COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
+COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
+COVERAGE_YT_TABLE_PREFIX = "datatable"
+
+# fuzzing
+CORPUS_DATA_FILE_NAME = 'corpus.json'
+CORPUS_DATA_ROOT_DIR = 'fuzzing'
+CORPUS_DIR_NAME = 'corpus'
+FUZZING_COVERAGE_ARGS = ['--sanitize-coverage=trace-div,trace-gep']
+FUZZING_COMPRESSION_COEF = 1.1
+FUZZING_DEFAULT_TIMEOUT = 3600
+FUZZING_FINISHING_TIME = 600
+FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
+GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
+MAX_CORPUS_RESOURCES_ALLOWED = 5
+
+# hermione
+HERMIONE_REPORT_DIR_NAME = "hermione-report"
+HERMIONE_REPORT_TAR_NAME = HERMIONE_REPORT_DIR_NAME + ".tar"
+HERMIONE_REPORT_INDEX_FILE_NAME = "index.html"
+HERMIONE_REPORT_DB_URLS_FILE_NAME = "databaseUrls.json"
+HERMIONE_TESTS_READ_FILE_NAME = "tests.json"
+HERMIONE_TESTS_READ_STDOUT_FILE_NAME = "read_tests.out"
+HERMIONE_TESTS_READ_STDERR_FILE_NAME = "read_tests.err"
+HERMIONE_TESTS_RUN_FILE_NAME = "test_results.jsonl"
+HERMIONE_TESTS_RUN_STDOUT_FILE_NAME = "run_tests.out"
+HERMIONE_TESTS_RUN_STDERR_FILE_NAME = "run_tests.err"
+
+# yt
+YT_OPERATION_ID_SUBSTITUTION = '$OPERATION_ID'
+YT_SANDBOX_ROOT_PREFIX = '$(YT_SANDBOX_ROOT)'
+
+# sandbox
+SANDBOX_RUN_TEST_YT_TOKEN_VALUE_NAME = 'YA_MAKE_SANDBOX_RUN_TEST_YT_TOKEN'
+
+# global resources
+ANDROID_AVD_ROOT = 'ANDROID_AVD_RESOURCE_GLOBAL'
+ANDROID_SDK_ROOT = 'ANDROID_SDK_RESOURCE_GLOBAL'
+COVERAGE_PUSH_TOOL_LOCAL = 'USE_SYSTEM_COVERAGE_PUSH_TOOL'
+COVERAGE_PUSH_TOOL_RESOURCE = 'COVERAGE_PUSH_TOOL_RESOURCE_GLOBAL'
+COVERAGE_PUSH_TOOL_LB_LOCAL = 'USE_SYSTEM_COVERAGE_PUSH_TOOL_LB'
+COVERAGE_PUSH_TOOL_LB_RESOURCE = 'COVERAGE_PUSH_TOOL_LB_RESOURCE_GLOBAL'
+FLAKE8_PY2_RESOURCE = 'FLAKE8_PY2_RESOURCE_GLOBAL'
+FLAKE8_PY3_RESOURCE = 'FLAKE8_PY3_RESOURCE_GLOBAL'
+GO_TOOLS_RESOURCE = 'GO_TOOLS_RESOURCE_GLOBAL'
+JSTYLE_RUNNER_LIB = 'JSTYLE_LIB_RESOURCE_GLOBAL'
+NODEJS_RESOURCE = 'NODEJS_RESOURCE_GLOBAL'
+NYC_RESOURCE = 'NYC_RESOURCE_GLOBAL'
+TEST_TOOL3_HOST = 'TEST_TOOL3_HOST_RESOURCE_GLOBAL'
+TEST_TOOL3_HOST_LOCAL = 'TEST_TOOL3_HOST_LOCAL'
+TEST_TOOL_HOST = 'TEST_TOOL_HOST_RESOURCE_GLOBAL'
+TEST_TOOL_HOST_LOCAL = 'TEST_TOOL_HOST_LOCAL'
+TEST_TOOL_TARGET = 'TEST_TOOL_TARGET_RESOURCE_GLOBAL'
+TEST_TOOL_TARGET_LOCAL = 'TEST_TOOL_TARGET_LOCAL'
+XCODE_TOOLS_RESOURCE = 'XCODE_TOOLS_ROOT_RESOURCE_GLOBAL'
+WINE_TOOL = 'WINE_TOOL_RESOURCE_GLOBAL'
+WINE32_TOOL = 'WINE32_TOOL_RESOURCE_GLOBAL'
+
+
+class Enum(object):
+ @classmethod
+ def enumerate(cls):
+ return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
+
+
+class TestRequirements(Enum):
+ Container = 'container'
+ Cpu = 'cpu'
+ DiskUsage = 'disk_usage'
+ Dns = 'dns'
+ Kvm = 'kvm'
+ Network = 'network'
+ Ram = 'ram'
+ RamDisk = 'ram_disk'
+ SbVault = 'sb_vault'
+ YavSecret = 'yav'
+
+
+class TestRequirementsConstants(Enum):
+ All = 'all'
+ AllCpuValue = 50
+ AllRamDiskValue = 50
+ MinCpu = 1
+ MinRam = 1
+ MinRamDisk = 0
+
+ @classmethod
+ def is_all_cpu(cls, value):
+ return value == cls.All
+
+ @classmethod
+ def get_cpu_value(cls, value):
+ return cls.AllCpuValue if cls.is_all_cpu(value) else value
+
+ @classmethod
+ def is_all_ram_disk(cls, value):
+ return value == cls.All
+
+ @classmethod
+ def get_ram_disk_value(cls, value):
+ return cls.AllRamDiskValue if cls.is_all_ram_disk(value) else value
+
+
+class TestSize(Enum):
+ Small = 'small'
+ Medium = 'medium'
+ Large = 'large'
+
+ DefaultTimeouts = {
+ Small: 60,
+ Medium: 600,
+ Large: 3600,
+ }
+
+ DefaultPriorities = {
+ Small: -1,
+ Medium: -2,
+ Large: -3,
+ }
+
+ DefaultRequirements = {
+ Small: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 8,
+ # TestRequirements.Ram: 2,
+ TestRequirements.RamDisk: 0,
+ },
+ Medium: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 8,
+ # TestRequirements.Ram: 4,
+ TestRequirements.RamDisk: 0,
+ },
+ Large: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 8,
+ # TestRequirements.Ram: 8,
+ TestRequirements.RamDisk: 0,
+ },
+ }
+
+ MaxRequirements = {
+ Small: {
+ TestRequirements.Cpu: 4,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 4,
+ TestRequirements.RamDisk: 32,
+ },
+ Medium: {
+ TestRequirements.Cpu: 4,
+ # TestRequirements.Cpu: 8,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 16,
+ TestRequirements.RamDisk: 32,
+ },
+ Large: {
+ TestRequirements.Cpu: 4,
+ TestRequirements.Ram: 32,
+ TestRequirements.RamDisk: 32,
+ },
+ }
+
+ LargeMarker = "TL"
+ MediumMarker = "TM"
+ SmallMarker = "TS"
+ SizeMarkers = (LargeMarker, MediumMarker, SmallMarker)
+
+ SizeShorthandMap = {
+ Large: LargeMarker,
+ Medium: MediumMarker,
+ Small: SmallMarker,
+ }
+
+ @classmethod
+ def sizes(cls):
+ return cls.DefaultTimeouts.keys()
+
+ @classmethod
+ def get_shorthand(cls, size):
+ return cls.SizeShorthandMap[size]
+
+ @classmethod
+ def is_test_shorthand(cls, name):
+ return name in cls.SizeMarkers
+
+ @classmethod
+ def get_default_timeout(cls, size):
+ if size in cls.DefaultTimeouts:
+ return cls.DefaultTimeouts[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_default_priorities(cls, size):
+ if size in cls.DefaultPriorities:
+ return cls.DefaultPriorities[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_default_requirements(cls, size):
+ if size in cls.DefaultRequirements:
+ return cls.DefaultRequirements[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_max_requirements(cls, size):
+ if size in cls.MaxRequirements:
+ return cls.MaxRequirements[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+
+class TestRunExitCode(Enum):
+ Skipped = 2
+ Failed = 3
+ TimeOut = 10
+ InfrastructureError = 12
+
+
+class YaTestTags(Enum):
+ AlwaysMinimize = "ya:always_minimize"
+ Dirty = "ya:dirty"
+ DumpNodeEnvironment = "ya:dump_node_env"
+ DumpTestEnvironment = "ya:dump_test_env"
+ ExoticPlatform = "ya:exotic_platform"
+ External = "ya:external"
+ Fat = "ya:fat"
+ ForceDistbuild = "ya:force_distbuild"
+ ForceSandbox = "ya:force_sandbox"
+ GoNoSubtestReport = "ya:go_no_subtest_report"
+ GoTotalReport = "ya:go_total_report"
+ HugeLogs = "ya:huge_logs"
+ Manual = "ya:manual"
+ MapRootUser = "ya:map_root_user"
+ NoFuse = "ya:nofuse"
+ NoGracefulShutdown = "ya:no_graceful_shutdown"
+ Norestart = "ya:norestart"
+ Noretries = "ya:noretries"
+ NotAutocheck = "ya:not_autocheck"
+ Notags = "ya:notags"
+ PerfTest = "ya:perftest"
+ Privileged = "ya:privileged"
+ ReportChunks = "ya:report_chunks"
+ RunWithAsserts = "ya:relwithdebinfo"
+ SandboxCoverage = "ya:sandbox_coverage"
+ SequentialRun = "ya:sequential_run"
+ TraceOutput = "ya:trace_output"
+ YtRunner = "ya:yt"
+
+
+class ServiceTags(Enum):
+ AnyTag = "ya:__any_tag"
+
+
+class Status(object):
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
+ SKIPPED = -100
+ NOT_LAUNCHED = -200
+ CANON_DIFF = -300
+ DESELECTED = -400
+ INTERNAL = -int(2 ** 31 - 1) # maxint
+ FLAKY = -50
+ # XFAILDIFF is internal status and should be replaced
+ # with XFAIL or XPASS during verification stage of canon data
+ XFAILDIFF = -90
+
+ BY_NAME = {
+ 'crashed': CRASHED,
+ 'deselected': DESELECTED,
+ 'diff': CANON_DIFF,
+ 'fail': FAIL,
+ 'flaky': FLAKY,
+ 'good': GOOD,
+ 'internal': INTERNAL,
+ 'missing': MISSING,
+ 'not_launched': NOT_LAUNCHED,
+ 'skipped': SKIPPED,
+ 'timeout': TIMEOUT,
+ 'xfail': XFAIL,
+ 'xfaildiff': XFAILDIFF,
+ 'xpass': XPASS,
+ }
+ TO_STR = {
+ CANON_DIFF: 'diff',
+ CRASHED: 'crashed',
+ DESELECTED: 'deselected',
+ FAIL: 'fail',
+ FLAKY: 'flaky',
+ GOOD: 'good',
+ INTERNAL: 'internal',
+ MISSING: 'missing',
+ NOT_LAUNCHED: 'not_launched',
+ SKIPPED: 'skipped',
+ TIMEOUT: 'timeout',
+ XFAIL: 'xfail',
+ XFAILDIFF: 'xfaildiff',
+ XPASS: 'xpass',
+ }
+
+
+class _Colors(object):
+
+ _NAMES = [
+ "blue",
+ "cyan",
+ "default",
+ "green",
+ "grey",
+ "magenta",
+ "red",
+ "white",
+ "yellow",
+ ]
+ _PREFIXES = ["", "light", "dark"]
+
+ def __init__(self):
+ self._table = {}
+ for prefix in self._PREFIXES:
+ for value in self._NAMES:
+ name = value
+ if prefix:
+ name = "{}_{}".format(prefix, value)
+ value = "{}-{}".format(prefix, value)
+ self.__add_color(name.upper(), value)
+
+ def __add_color(self, name, value):
+ self._table[name] = value
+ self.__setattr__(name, value)
+
+
+Colors = _Colors()
+
+
+class _Highlight(object):
+
+ _MARKERS = {
+ # special
+ "RESET": "rst",
+ "IMPORTANT": "imp",
+ "UNIMPORTANT": "unimp",
+ "BAD": "bad",
+ "WARNING": "warn",
+ "GOOD": "good",
+ "PATH": "path",
+ "ALTERNATIVE1": "alt1",
+ "ALTERNATIVE2": "alt2",
+ "ALTERNATIVE3": "alt3",
+ }
+
+ def __init__(self):
+ # setting attributes because __getattr__ is much slower
+ for attr, value in self._MARKERS.items():
+ self.__setattr__(attr, value)
+
+
+Highlight = _Highlight()
+
+
+class _StatusColorMap(object):
+ # There should be no XFAILDIFF, because it's internal status.
+ # It should be replaced with XFAIL or XPASS during verification of canon data.
+
+ _MAP = {
+ 'crashed': Highlight.WARNING,
+ 'deselected': Highlight.UNIMPORTANT,
+ 'diff': Highlight.BAD,
+ 'fail': Highlight.BAD,
+ 'flaky': Highlight.ALTERNATIVE3,
+ 'good': Highlight.GOOD,
+ 'internal': Highlight.BAD,
+ 'missing': Highlight.ALTERNATIVE1,
+ 'not_launched': Highlight.BAD,
+ 'skipped': Highlight.UNIMPORTANT,
+ 'timeout': Highlight.BAD,
+ 'xfail': Highlight.WARNING,
+ 'xpass': Highlight.WARNING,
+ }
+
+ def __getitem__(self, item):
+ return self._MAP[item]
+
+
+StatusColorMap = _StatusColorMap()
diff --git a/build/plugins/lib/test_const/ya.make b/build/plugins/lib/test_const/ya.make
new file mode 100644
index 0000000000..60f4867e46
--- /dev/null
+++ b/build/plugins/lib/test_const/ya.make
@@ -0,0 +1,9 @@
+OWNER(g:ymake)
+
+PY23_LIBRARY()
+
+PY_SRCS(
+ __init__.py
+)
+
+END()
diff --git a/build/plugins/lib/ya.make b/build/plugins/lib/ya.make
new file mode 100644
index 0000000000..7e61d12080
--- /dev/null
+++ b/build/plugins/lib/ya.make
@@ -0,0 +1,7 @@
+OWNER(g:ymake)
+
+PY23_LIBRARY()
+ PY_SRCS(
+ _metric_resolvers.py
+ )
+END()
diff --git a/build/plugins/linker_script.py b/build/plugins/linker_script.py
new file mode 100644
index 0000000000..bee9777a4e
--- /dev/null
+++ b/build/plugins/linker_script.py
@@ -0,0 +1,12 @@
+def onlinker_script(unit, *args):
+ """
+ @usage: LINKER_SCRIPT(Files...)
+
+ Specify files to be used as a linker script
+ """
+ for arg in args:
+ if not arg.endswith(".ld") and not arg.endswith(".ld.in"):
+ unit.message(['error', "Invalid linker script extension: {}".format(arg)])
+ return
+
+ unit.onglobal_srcs(list(args))
diff --git a/build/plugins/lj_archive.py b/build/plugins/lj_archive.py
new file mode 100644
index 0000000000..1d80bb98f3
--- /dev/null
+++ b/build/plugins/lj_archive.py
@@ -0,0 +1,44 @@
+def onlj_archive(unit, *args):
+ """
+ @usage: LJ_ARCHIVE(NAME Name LuaFiles...)
+ Precompile .lua files using LuaJIT and archive both sources and results using sources names as keys
+ """
+ def iter_luas(l):
+ for a in l:
+ if a.endswith('.lua'):
+ yield a
+
+ def iter_objs(l):
+ for a in l:
+ s = a[:-3] + 'raw'
+ unit.on_luajit_objdump(['OUT', s, a])
+ yield s
+
+ luas = list(iter_luas(args))
+ objs = list(iter_objs(luas))
+
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaScripts.inc', 'KEYS', ':'.join(luas)] + objs)
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaSources.inc', 'KEYS', ':'.join(luas)] + luas)
+
+def onlj_21_archive(unit, *args):
+ """
+ @usage: LJ_21_ARCHIVE(NAME Name LuaFiles...) # deprecated
+ Precompile .lua files using LuaJIT 2.1 and archive both sources and results using sources names as keys
+ """
+ def iter_luas(l):
+ for a in l:
+ if a.endswith('.lua'):
+ yield a
+
+ def iter_objs(l):
+ for a in l:
+ s = a[:-3] + 'raw'
+ unit.on_luajit_21_objdump(['OUT', s, a])
+ yield s
+
+ luas = list(iter_luas(args))
+ objs = list(iter_objs(luas))
+
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaScripts.inc', 'KEYS', ':'.join(luas)] + objs)
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaSources.inc', 'KEYS', ':'.join(luas)] + luas)
+
diff --git a/build/plugins/llvm_bc.py b/build/plugins/llvm_bc.py
new file mode 100644
index 0000000000..7666c21907
--- /dev/null
+++ b/build/plugins/llvm_bc.py
@@ -0,0 +1,33 @@
+import sys
+
+from _common import rootrel_arc_src, sort_by_keywords, skip_build_root, stripext
+
+
+def onllvm_bc(unit, *args):
+ free_args, kwds = sort_by_keywords({'SYMBOLS': -1, 'NAME': 1, 'NO_COMPILE': 0}, args)
+ name = kwds['NAME'][0]
+ symbols = kwds.get('SYMBOLS')
+ obj_suf = unit.get('OBJ_SUF')
+ skip_compile_step = 'NO_COMPILE' in kwds
+ merged_bc = name + '_merged' + obj_suf + '.bc'
+ out_bc = name + '_optimized' + obj_suf + '.bc'
+ bcs = []
+ for x in free_args:
+ rel_path = rootrel_arc_src(x, unit)
+ bc_path = '${ARCADIA_BUILD_ROOT}/' + skip_build_root(rel_path) + obj_suf + '.bc'
+ if not skip_compile_step:
+ if x.endswith('.c'):
+ llvm_compile = unit.onllvm_compile_c
+ elif x.endswith('.ll'):
+ llvm_compile = unit.onllvm_compile_ll
+ else:
+ llvm_compile = unit.onllvm_compile_cxx
+ llvm_compile([rel_path, bc_path])
+ bcs.append(bc_path)
+ unit.onllvm_link([merged_bc] + bcs)
+ opt_opts = ['-O2', '-globalopt', '-globaldce']
+ if symbols:
+ # XXX: '#' used instead of ',' to overcome ymake tendency to split everything by comma
+ opt_opts += ['-internalize', '-internalize-public-api-list=' + '#'.join(symbols)]
+ unit.onllvm_opt([merged_bc, out_bc] + opt_opts)
+ unit.onresource([out_bc, '/llvm_bc/' + name])
diff --git a/build/plugins/macros_with_error.py b/build/plugins/macros_with_error.py
new file mode 100644
index 0000000000..e82fb56d2c
--- /dev/null
+++ b/build/plugins/macros_with_error.py
@@ -0,0 +1,29 @@
+import sys
+
+import _common
+
+import ymake
+
+
+def onmacros_with_error(unit, *args):
+ print >> sys.stderr, 'This macros will fail'
+ raise Exception('Expected fail in MACROS_WITH_ERROR')
+
+
+def onrestrict_path(unit, *args):
+ if args:
+ if 'MSG' in args:
+ pos = args.index('MSG')
+ paths, msg = args[:pos], args[pos + 1:]
+ msg = ' '.join(msg)
+ else:
+ paths, msg = args, 'forbidden'
+ if not _common.strip_roots(unit.path()).startswith(paths):
+ error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format(unit.path(), msg, ', '.join(paths))
+ ymake.report_configure_error(error_msg)
+
+def onassert(unit, *args):
+ val = unit.get(args[0])
+ if val and val.lower() == "no":
+ msg = ' '.join(args[1:])
+ ymake.report_configure_error(msg)
diff --git a/build/plugins/mx_archive.py b/build/plugins/mx_archive.py
new file mode 100644
index 0000000000..56b0d4d16e
--- /dev/null
+++ b/build/plugins/mx_archive.py
@@ -0,0 +1,16 @@
+def onmx_formulas(unit, *args):
+ """
+ @usage: MX_FORMULAS(BinFiles...) # deprecated, matrixnet
+ Create MatrixNet formulas archive
+ """
+ def iter_infos():
+ for a in args:
+ if a.endswith('.bin'):
+ unit.on_mx_bin_to_info([a])
+ yield a[:-3] + 'info'
+ else:
+ yield a
+
+ infos = list(iter_infos())
+ unit.onarchive_asm(['NAME', 'MxFormulas'] + infos)
+ unit.on_mx_gen_table(infos)
diff --git a/build/plugins/nots.py b/build/plugins/nots.py
new file mode 100644
index 0000000000..77f4074a91
--- /dev/null
+++ b/build/plugins/nots.py
@@ -0,0 +1,309 @@
+import fnmatch
+import os
+import ytest
+
+from _common import to_yesno, rootrel_arc_src
+
+
+def _build_cmd_input_paths(paths, hide=False):
+ return " ".join(["${{input{}:\"{}\"}}".format(";hide" if hide else "", p) for p in paths])
+
+
+def _create_pm(unit):
+ from lib.nots.package_manager import manager
+
+ sources_path = unit.path()
+ module_path = unit.get("MODDIR")
+ if unit.get("TS_TEST_FOR"):
+ sources_path = unit.get("TS_TEST_FOR_DIR")
+ module_path = unit.get("TS_TEST_FOR_PATH")
+
+ return manager(
+ sources_path=unit.resolve(sources_path),
+ build_root="$B",
+ build_path=unit.path().replace("$S", "$B", 1),
+ contribs_path=unit.get("NPM_CONTRIBS_PATH"),
+ nodejs_bin_path=None,
+ script_path=None,
+ module_path=module_path,
+ )
+
+
+def on_from_npm_lockfiles(unit, *args):
+ pm = _create_pm(unit)
+ lf_paths = []
+
+ for lf_path in args:
+ abs_lf_path = unit.resolve(unit.resolve_arc_path(lf_path))
+ if not abs_lf_path:
+ raise Exception("lockfile not found: {}".format(lf_path))
+ lf_paths.append(abs_lf_path)
+
+ for pkg in pm.extract_packages_meta_from_lockfiles(lf_paths):
+ unit.onfrom_npm([pkg.name, pkg.version, pkg.sky_id, pkg.integrity, pkg.integrity_algorithm, pkg.tarball_path])
+
+
+def onnode_modules(unit):
+ pm = _create_pm(unit)
+ unit.onpeerdir(pm.get_local_peers_from_package_json())
+ ins, outs = pm.calc_node_modules_inouts()
+ unit.on_node_modules(["IN"] + sorted(ins) + ["OUT"] + sorted(outs))
+
+
+def on_ts_configure(unit, tsconfig_path):
+ from lib.nots.package_manager.base import PackageJson
+ from lib.nots.package_manager.base.utils import build_pj_path
+ from lib.nots.typescript import TsConfig
+
+ abs_tsconfig_path = unit.resolve(unit.resolve_arc_path(tsconfig_path))
+ if not abs_tsconfig_path:
+ raise Exception("tsconfig not found: {}".format(tsconfig_path))
+
+ tsconfig = TsConfig.load(abs_tsconfig_path)
+ cur_dir = unit.get("TS_TEST_FOR_PATH") if unit.get("TS_TEST_FOR") else unit.get("MODDIR")
+ pj_path = build_pj_path(unit.resolve(unit.resolve_arc_path(cur_dir)))
+ dep_paths = PackageJson.load(pj_path).get_dep_paths_by_names()
+ config_files = tsconfig.inline_extend(dep_paths)
+
+ mod_dir = unit.get("MODDIR")
+ config_files = _resolve_module_files(unit, mod_dir, config_files)
+ tsconfig.validate()
+
+ unit.set(["TS_CONFIG_FILES", _build_cmd_input_paths(config_files, hide=True)])
+ unit.set(["TS_CONFIG_ROOT_DIR", tsconfig.compiler_option("rootDir")])
+ unit.set(["TS_CONFIG_OUT_DIR", tsconfig.compiler_option("outDir")])
+ unit.set(["TS_CONFIG_SOURCE_MAP", to_yesno(tsconfig.compiler_option("sourceMap"))])
+ unit.set(["TS_CONFIG_DECLARATION", to_yesno(tsconfig.compiler_option("declaration"))])
+ unit.set(["TS_CONFIG_DECLARATION_MAP", to_yesno(tsconfig.compiler_option("declarationMap"))])
+ unit.set(["TS_CONFIG_PRESERVE_JSX", to_yesno(tsconfig.compiler_option("jsx") == "preserve")])
+
+ _set_nodejs_root(unit)
+ _setup_eslint(unit)
+
+
+def _is_tests_enabled(unit):
+ if unit.get("TIDY") == "yes":
+ return False
+
+ return True
+
+
+def on_ts_test_configure(unit):
+ if not _is_tests_enabled(unit):
+ return
+
+ test_runner_handlers = _get_test_runner_handlers()
+ test_runner = unit.get("TS_TEST_RUNNER")
+
+ if test_runner not in test_runner_handlers:
+ raise Exception("Test runner: {} is not available, try to use one of these: {}"
+ .format(test_runner, ", ".join(test_runner_handlers.keys())))
+
+ if not test_runner:
+ raise Exception("Test runner is not specified")
+
+ test_files = ytest.get_values_list(unit, "_TS_TEST_SRCS_VALUE")
+ if not test_files:
+ raise Exception("No tests found in {}".format(unit.path()))
+
+ config_path = unit.get(unit.get("TS_TEST_CONFIG_PATH_VAR"))
+ abs_config_path = unit.resolve(unit.resolve_arc_path(config_path))
+ if not abs_config_path:
+ raise Exception("{} config not found: {}".format(test_runner, config_path))
+
+ mod_dir = unit.get("MODDIR")
+ test_files = _resolve_module_files(unit, mod_dir, test_files)
+ data_dirs = list(set([os.path.dirname(rootrel_arc_src(p, unit))
+ for p in (ytest.get_values_list(unit, "_TS_TEST_DATA_VALUE") or [])]))
+
+ deps = _create_pm(unit).get_peers_from_package_json()
+ test_record = {
+ "TS-TEST-FOR-PATH": unit.get("TS_TEST_FOR_PATH"),
+ "TS-ROOT-DIR": unit.get("TS_CONFIG_ROOT_DIR"),
+ "TS-OUT-DIR": unit.get("TS_CONFIG_OUT_DIR"),
+ "TS-TEST-DATA-DIRS": ytest.serialize_list(data_dirs),
+ "TS-TEST-DATA-DIRS-RENAME": unit.get("_TS_TEST_DATA_DIRS_RENAME_VALUE"),
+ "CONFIG-PATH": config_path,
+ }
+
+ add_ts_test = test_runner_handlers[test_runner]
+ add_ts_test(unit, test_runner, test_files, deps, test_record)
+
+
+def _get_test_runner_handlers():
+ return {
+ "jest": _add_jest_ts_test,
+ "hermione": _add_hermione_ts_test,
+ }
+
+
+def _add_jest_ts_test(unit, test_runner, test_files, deps, test_record):
+ nots_plugins_path = os.path.join(unit.get("NOTS_PLUGINS_PATH"), "jest")
+ deps.append(nots_plugins_path)
+ test_record["NOTS-PLUGINS-PATH"] = nots_plugins_path
+
+ _add_test(unit, test_runner, test_files, deps, test_record)
+
+
+def _add_hermione_ts_test(unit, test_runner, test_files, deps, test_record):
+ test_tags = list(set(["ya:fat", "ya:external"] + ytest.get_values_list(unit, "TEST_TAGS_VALUE")))
+ test_requirements = list(set(["network:full"] + ytest.get_values_list(unit, "TEST_REQUIREMENTS_VALUE")))
+
+ if not len(test_record["TS-TEST-DATA-DIRS"]):
+ _add_default_hermione_test_data(unit, test_record)
+
+ test_record.update({
+ "SIZE": "LARGE",
+ "TAG": ytest.serialize_list(test_tags),
+ "REQUIREMENTS": ytest.serialize_list(test_requirements),
+ })
+
+ _add_test(unit, test_runner, test_files, deps, test_record)
+
+
+def _add_default_hermione_test_data(unit, test_record):
+ mod_dir = unit.get("MODDIR")
+ root_dir = test_record["TS-ROOT-DIR"]
+ out_dir = test_record["TS-OUT-DIR"]
+ test_for_path = test_record["TS-TEST-FOR-PATH"]
+
+ abs_root_dir = os.path.normpath(os.path.join(unit.resolve(unit.path()), root_dir))
+ file_paths = _find_file_paths(abs_root_dir, "**/screens/*/*/*.png")
+ file_dirs = [os.path.dirname(f) for f in file_paths]
+
+ rename_from, rename_to = [os.path.relpath(os.path.normpath(os.path.join(mod_dir, d)), test_for_path)
+ for d in [root_dir, out_dir]]
+
+ test_record.update({
+ "TS-TEST-DATA-DIRS": ytest.serialize_list(_resolve_module_files(unit, mod_dir, file_dirs)),
+ "TS-TEST-DATA-DIRS-RENAME": "{}:{}".format(rename_from, rename_to),
+ })
+
+
+def _setup_eslint(unit):
+ if not _is_tests_enabled(unit):
+ return
+
+ if unit.get("_NO_LINT_VALUE") == "none":
+ return
+
+ lint_files = ytest.get_values_list(unit, "_TS_LINT_SRCS_VALUE")
+ if not lint_files:
+ return
+
+ mod_dir = unit.get("MODDIR")
+ lint_files = _resolve_module_files(unit, mod_dir, lint_files)
+ deps = _create_pm(unit).get_peers_from_package_json()
+ test_record = {
+ "ESLINT_CONFIG_NAME": unit.get("ESLINT_CONFIG_NAME"),
+ }
+
+ _add_test(unit, "eslint", lint_files, deps, test_record, mod_dir)
+
+
+def _resolve_module_files(unit, mod_dir, file_paths):
+ resolved_files = []
+
+ for path in file_paths:
+ resolved = rootrel_arc_src(path, unit)
+ if resolved.startswith(mod_dir):
+ resolved = resolved[len(mod_dir) + 1:]
+ resolved_files.append(resolved)
+
+ return resolved_files
+
+
+def _find_file_paths(abs_path, pattern):
+ file_paths = []
+ _, ext = os.path.splitext(pattern)
+
+ for root, _, filenames in os.walk(abs_path):
+ if not any(f.endswith(ext) for f in filenames):
+ continue
+
+ abs_file_paths = [os.path.join(root, f) for f in filenames]
+
+ for file_path in fnmatch.filter(abs_file_paths, pattern):
+ file_paths.append(file_path)
+
+ return file_paths
+
+
+def _add_test(unit, test_type, test_files, deps=None, test_record=None, test_cwd=None):
+ from lib.nots.package_manager import constants
+
+ if deps:
+ unit.ondepends(deps)
+
+ test_dir = ytest.get_norm_unit_path(unit)
+ full_test_record = {
+ "TEST-NAME": test_type.lower(),
+ "TEST-TIMEOUT": unit.get("TEST_TIMEOUT") or "",
+ "TEST-ENV": ytest.prepare_env(unit.get("TEST_ENV_VALUE")),
+ "TESTED-PROJECT-NAME": os.path.splitext(unit.filename())[0],
+ "SCRIPT-REL-PATH": test_type,
+ "SOURCE-FOLDER-PATH": test_dir,
+ "BUILD-FOLDER-PATH": test_dir,
+ "BINARY-PATH": os.path.join(test_dir, unit.filename()),
+ "SPLIT-FACTOR": unit.get("TEST_SPLIT_FACTOR") or "",
+ "FORK-MODE": unit.get("TEST_FORK_MODE") or "",
+ "SIZE": "SMALL",
+ "TEST-FILES": ytest.serialize_list(test_files),
+ "TEST-CWD": test_cwd or "",
+ "TAG": ytest.serialize_list(ytest.get_values_list(unit, "TEST_TAGS_VALUE")),
+ "REQUIREMENTS": ytest.serialize_list(ytest.get_values_list(unit, "TEST_REQUIREMENTS_VALUE")),
+ "NODEJS-ROOT-VAR-NAME": unit.get("NODEJS_ROOT_VAR_NAME"),
+ "NODE-MODULES-BUNDLE-FILENAME": constants.NODE_MODULES_WORKSPACE_BUNDLE_FILENAME,
+ "CUSTOM-DEPENDENCIES": " ".join(deps) if deps else "",
+ }
+
+ if test_record:
+ full_test_record.update(test_record)
+
+ data = ytest.dump_test(unit, full_test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
+def _set_nodejs_root(unit):
+ pm = _create_pm(unit)
+
+ # example: >= 12.18.4
+ version_range = pm.load_package_json_from_dir(pm.sources_path).get_nodejs_version()
+
+ # example: Version(12, 18, 4)
+ node_version = _select_matching_node_version(version_range)
+
+ # example: NODEJS_12_18_4_RESOURCE_GLOBAL
+ yamake_node_version_var = "NODEJS_{}_RESOURCE_GLOBAL".format(str(node_version).replace(".", "_"))
+
+ unit.set(["NODEJS_ROOT", "${}".format(yamake_node_version_var)])
+ unit.set(["NODEJS_ROOT_VAR_NAME", yamake_node_version_var])
+
+
+def _select_matching_node_version(range_str):
+ """
+ :param str range_str:
+ :rtype: Version
+ """
+ from lib.nots.constants import SUPPORTED_NODE_VERSIONS, DEFAULT_NODE_VERSION
+ from lib.nots.semver import VersionRange
+
+ if range_str is None:
+ return DEFAULT_NODE_VERSION
+
+ try:
+ range = VersionRange.from_str(range_str)
+
+ # assuming SUPPORTED_NODE_VERSIONS is sorted from the lowest to highest version
+ # we stop the loop as early as possible and hence return the lowest compatible version
+ for version in SUPPORTED_NODE_VERSIONS:
+ if range.is_satisfied_by(version):
+ return version
+
+ raise ValueError("There is no allowed version to satisfy this range: '{}'".format(range_str))
+ except Exception as error:
+ raise Exception(
+ "Requested nodejs version range '{}'' could not be satisfied. Please use a range that would include one of the following: {}.\nFor further details please visit the link: {}\nOriginal error: {}"
+ .format(range_str, map(str, SUPPORTED_NODE_VERSIONS), "https://nda.ya.ru/t/ulU4f5Ru5egzHV", str(error))
+ )
diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py
new file mode 100644
index 0000000000..a26b3fb8eb
--- /dev/null
+++ b/build/plugins/pybuild.py
@@ -0,0 +1,682 @@
+import os
+import collections
+from hashlib import md5
+
+import ymake
+from _common import stripext, rootrel_arc_src, tobuilddir, listid, resolve_to_ymake_path, generate_chunks, pathid
+
+
+YA_IDE_VENV_VAR = 'YA_IDE_VENV'
+PY_NAMESPACE_PREFIX = 'py/namespace'
+BUILTIN_PROTO = 'builtin_proto'
+
+
+def is_arc_src(src, unit):
+ return (
+ src.startswith('${ARCADIA_ROOT}/') or
+ src.startswith('${CURDIR}/') or
+ unit.resolve_arc_path(src).startswith('$S/')
+ )
+
+
+def is_extended_source_search_enabled(path, unit):
+ if not is_arc_src(path, unit):
+ return False
+ if unit.get('NO_EXTENDED_SOURCE_SEARCH') == 'yes':
+ return False
+ return True
+
+
+def to_build_root(path, unit):
+ if is_arc_src(path, unit):
+ return '${ARCADIA_BUILD_ROOT}/' + rootrel_arc_src(path, unit)
+ return path
+
+
+def uniq_suffix(path, unit):
+ upath = unit.path()
+ if '/' not in path:
+ return ''
+ return '.{}'.format(pathid(upath)[:4])
+
+
+def pb2_arg(suf, path, mod, unit):
+ return '{path}__int__{suf}={mod}{modsuf}'.format(
+ path=stripext(to_build_root(path, unit)),
+ suf=suf,
+ mod=mod,
+ modsuf=stripext(suf)
+ )
+
+
+def proto_arg(path, mod, unit):
+ return '{}.proto={}'.format(stripext(to_build_root(path, unit)), mod)
+
+
+def pb_cc_arg(suf, path, unit):
+ return '{}{suf}'.format(stripext(to_build_root(path, unit)), suf=suf)
+
+
+def ev_cc_arg(path, unit):
+ return '{}.ev.pb.cc'.format(stripext(to_build_root(path, unit)))
+
+
+def ev_arg(path, mod, unit):
+ return '{}__int___ev_pb2.py={}_ev_pb2'.format(stripext(to_build_root(path, unit)), mod)
+
+
+def mangle(name):
+ if '.' not in name:
+ return name
+ return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
+
+
+def parse_pyx_includes(filename, path, source_root, seen=None):
+ normpath = lambda *x: os.path.normpath(os.path.join(*x))
+
+ abs_path = normpath(source_root, filename)
+ seen = seen or set()
+ if abs_path in seen:
+ return
+ seen.add(abs_path)
+
+ if not os.path.exists(abs_path):
+ # File might be missing, because it might be generated
+ return
+
+ with open(abs_path, 'rb') as f:
+ # Don't parse cimports and etc - irrelevant for cython, it's linker work
+ includes = ymake.parse_cython_includes(f.read())
+
+ abs_dirname = os.path.dirname(abs_path)
+ # All includes are relative to the file which include
+ path_dirname = os.path.dirname(path)
+ file_dirname = os.path.dirname(filename)
+
+ for incfile in includes:
+ abs_path = normpath(abs_dirname, incfile)
+ if os.path.exists(abs_path):
+ incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile)
+ yield (incname, incpath)
+ # search for includes in the included files
+ for e in parse_pyx_includes(incname, incpath, source_root, seen):
+ yield e
+ else:
+ # There might be arcadia root or cython relative include.
+ # Don't treat such file as missing, because there must be PEERDIR on py_library
+ # which contains it.
+ for path in [
+ source_root,
+ source_root + "/contrib/tools/cython/Cython/Includes",
+ ]:
+ if os.path.exists(normpath(path, incfile)):
+ break
+ else:
+ ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path))
+
+
+def has_pyx(args):
+ return any(arg.endswith('.pyx') for arg in args)
+
+
+def get_srcdir(path, unit):
+ return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
+
+
+def add_python_lint_checks(unit, py_ver, files):
+ def get_resolved_files():
+ resolved_files = []
+ for path in files:
+ resolved = unit.resolve_arc_path([path])
+ if resolved.startswith('$S'): # path was resolved as source file.
+ resolved_files.append(resolved)
+ return resolved_files
+
+ if unit.get('_NO_LINT_VALUE') == "none":
+
+ no_lint_allowed_paths = (
+ "contrib/",
+ "devtools/",
+ "junk/",
+ # temporary allowed, TODO: remove
+ "taxi/uservices/",
+ "travel/",
+ "market/report/lite/", # MARKETOUT-38662, deadline: 2021-08-12
+ "passport/backend/oauth/", # PASSP-35982
+ "testenv/", # CI-3229
+ )
+
+ upath = unit.path()[3:]
+
+ if not upath.startswith(no_lint_allowed_paths):
+ ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths))
+
+ if files and unit.get('_NO_LINT_VALUE') not in ("none", "none_internal"):
+ resolved_files = get_resolved_files()
+ if resolved_files:
+ flake8_cfg = 'build/config/tests/flake8/flake8.conf'
+ unit.onadd_check(["flake8.py{}".format(py_ver), flake8_cfg] + resolved_files)
+
+ if files and unit.get('STYLE_PYTHON_VALUE') == 'yes' and is_py3(unit):
+ resolved_files = get_resolved_files()
+ if resolved_files:
+ black_cfg = unit.get('STYLE_PYTHON_PYPROJECT_VALUE') or 'devtools/ya/handlers/style/python_style_config.toml'
+ params = ['black', 'tools/black_linter/black_linter']
+ params += ['FILES'] + resolved_files
+ params += ['CONFIGS', black_cfg]
+ unit.on_add_linter_check(params)
+
+
+def is_py3(unit):
+ return unit.get("PYTHON3") == "yes"
+
+
+def on_py_program(unit, *args):
+ py_program(unit, is_py3(unit))
+
+
+def py_program(unit, py3):
+ """
+ Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/#modulpyprogramimakrospymain
+ """
+ if py3:
+ peers = ['library/python/runtime_py3/main']
+ if unit.get('PYTHON_SQLITE3') != 'no':
+ peers.append('contrib/tools/python3/src/Modules/_sqlite')
+ else:
+ peers = ['library/python/runtime/main']
+ if unit.get('PYTHON_SQLITE3') != 'no':
+ peers.append('contrib/tools/python/src/Modules/_sqlite')
+ unit.onpeerdir(peers)
+ if unit.get('MODULE_TYPE') == 'PROGRAM': # can not check DLL
+ unit.onadd_check_py_imports()
+
+
+def onpy_srcs(unit, *args):
+ """
+ @usage PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} Files...)
+
+ PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive.
+ The main disadvantage is the lack of IDE support; There is also no readline yet.
+ The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files.
+ At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path.
+ By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C.
+ Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them
+ __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh.
+
+ Example of library declaration with PY_SRCS():
+ PY2_LIBRARY(mymodule)
+ PY_SRCS(a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg)
+ END()
+
+ PY_REGISTER honors Python2 and Python3 differences and adjusts itself to Python version of a current module
+ Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipylibrarypy3libraryimakrospysrcs
+ """
+ # Each file arg must either be a path, or "${...}/buildpath=modname", where
+ # "${...}/buildpath" part will be used as a file source in a future macro,
+ # and "modname" will be used as a module name.
+
+ upath = unit.path()[3:]
+ py3 = is_py3(unit)
+ py_main_only = unit.get('PROCESS_PY_MAIN_ONLY')
+ with_py = not unit.get('PYBUILD_NO_PY')
+ with_pyc = not unit.get('PYBUILD_NO_PYC')
+ in_proto_library = unit.get('PY_PROTO') or unit.get('PY3_PROTO')
+ venv = unit.get(YA_IDE_VENV_VAR)
+ need_gazetteer_peerdir = False
+ trim = 0
+
+ if not upath.startswith('contrib/tools/python') and not upath.startswith('library/python/runtime') and unit.get('NO_PYTHON_INCLS') != 'yes':
+ unit.onpeerdir(['contrib/libs/python'])
+
+ unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL')
+ if unit_needs_main:
+ py_program(unit, py3)
+
+ py_namespace_value = unit.get('PY_NAMESPACE_VALUE')
+ if py_namespace_value == ".":
+ ns = ""
+ else:
+ ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.'
+
+ cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes'
+ cythonize_py = False
+ optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes'
+
+ cython_directives = []
+ if cython_coverage:
+ cython_directives += ['-X', 'linetrace=True']
+
+ pyxs_c = []
+ pyxs_c_h = []
+ pyxs_c_api_h = []
+ pyxs_cpp = []
+ pyxs_cpp_h = []
+ pyxs = pyxs_cpp
+ swigs_c = []
+ swigs_cpp = []
+ swigs = swigs_cpp
+ pys = []
+ protos = []
+ evs = []
+ fbss = []
+ py_namespaces = {}
+
+ dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR')
+ dump_output = None
+ if dump_dir:
+ import thread
+ pid = os.getpid()
+ tid = thread.get_ident()
+ dump_name = '{}-{}.dump'.format(pid, tid)
+ dump_output = open(os.path.join(dump_dir, dump_name), 'a')
+
+ args = iter(args)
+ for arg in args:
+ # Namespace directives.
+ if arg == 'TOP_LEVEL':
+ ns = ''
+ elif arg == 'NAMESPACE':
+ ns = next(args) + '.'
+ # Cython directives.
+ elif arg == 'CYTHON_C':
+ pyxs = pyxs_c
+ elif arg == 'CYTHON_C_H':
+ pyxs = pyxs_c_h
+ elif arg == 'CYTHON_C_API_H':
+ pyxs = pyxs_c_api_h
+ elif arg == 'CYTHON_CPP':
+ pyxs = pyxs_cpp
+ elif arg == 'CYTHON_CPP_H':
+ pyxs = pyxs_cpp_h
+ elif arg == 'CYTHON_DIRECTIVE':
+ cython_directives += ['-X', next(args)]
+ elif arg == 'CYTHONIZE_PY':
+ cythonize_py = True
+ # SWIG.
+ elif arg == 'SWIG_C':
+ swigs = swigs_c
+ elif arg == 'SWIG_CPP':
+ swigs = swigs_cpp
+ # Unsupported but legal PROTO_LIBRARY arguments.
+ elif arg == 'GLOBAL' or not in_proto_library and arg.endswith('.gztproto'):
+ pass
+ elif arg == '_MR':
+ # GLOB support: convert arcadia-root-relative paths to module-relative
+ # srcs are assumed to start with ${ARCADIA_ROOT}
+ trim = len(unit.path()) + 14
+ # Sources.
+ else:
+ main_mod = arg == 'MAIN'
+ if main_mod:
+ arg = next(args)
+
+ if '=' in arg:
+ main_py = False
+ path, mod = arg.split('=', 1)
+ else:
+ if trim:
+ arg = arg[trim:]
+ if arg.endswith('.gztproto'):
+ need_gazetteer_peerdir = True
+ path = '{}.proto'.format(arg[:-9])
+ else:
+ path = arg
+ main_py = (path == '__main__.py' or path.endswith('/__main__.py'))
+ if not py3 and unit_needs_main and main_py:
+ mod = '__main__'
+ else:
+ if arg.startswith('../'):
+ ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg))
+ if arg.startswith('/'):
+ ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg))
+ continue
+ mod_name = stripext(arg).replace('/', '.')
+ if py3 and path.endswith('.py') and is_extended_source_search_enabled(path, unit):
+ # Dig out real path from the file path. Unit.path is not enough because of SRCDIR and ADDINCL
+ root_rel_path = rootrel_arc_src(path, unit)
+ mod_root_path = root_rel_path[:-(len(path) + 1)]
+ py_namespaces.setdefault(mod_root_path, set()).add(ns if ns else '.')
+ mod = ns + mod_name
+
+ if main_mod:
+ py_main(unit, mod + ":main")
+ elif py3 and unit_needs_main and main_py:
+ py_main(unit, mod)
+
+ if py_main_only:
+ continue
+
+ if py3 and mod == '__main__':
+ ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM')
+
+ pathmod = (path, mod)
+
+ if dump_output is not None:
+ dump_output.write('{path}\t{module}\t{py3}\n'.format(path=rootrel_arc_src(path, unit), module=mod, py3=1 if py3 else 0))
+
+ if path.endswith('.py'):
+ if cythonize_py:
+ pyxs.append(pathmod)
+ else:
+ pys.append(pathmod)
+ elif path.endswith('.pyx'):
+ pyxs.append(pathmod)
+ elif path.endswith('.proto'):
+ protos.append(pathmod)
+ elif path.endswith('.ev'):
+ evs.append(pathmod)
+ elif path.endswith('.swg'):
+ swigs.append(pathmod)
+ # Allow pyi files in PY_SRCS for autocomplete in IDE, but skip it during building
+ elif path.endswith('.pyi'):
+ pass
+ elif path.endswith('.fbs'):
+ fbss.append(pathmod)
+ else:
+ ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path))
+
+ if dump_output is not None:
+ dump_output.close()
+
+ if pyxs:
+ py_files2res = set()
+ cpp_files2res = set()
+ # Include map stores files which were included in the processing pyx file,
+ # to be able to find source code of the included file inside generated file
+ # for currently processing pyx file.
+ include_map = collections.defaultdict(set)
+
+ if cython_coverage:
+ def process_pyx(filename, path, out_suffix, with_ext):
+ # skip generated files
+ if not is_arc_src(path, unit):
+ return
+ # source file
+ py_files2res.add((filename, path))
+ # generated
+ if with_ext is None:
+ cpp_files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix))
+ else:
+ cpp_files2res.add((filename + with_ext + out_suffix, path + with_ext + out_suffix))
+ # used includes
+ for entry in parse_pyx_includes(filename, path, unit.resolve('$S')):
+ py_files2res.add(entry)
+ include_arc_rel = entry[0]
+ include_map[filename].add(include_arc_rel)
+ else:
+ def process_pyx(filename, path, out_suffix, with_ext):
+ pass
+
+ obj_suff = unit.get('OBJ_SUF')
+ assert obj_suff is not None
+ for pyxs, cython, out_suffix, with_ext in [
+ (pyxs_c, unit.on_buildwith_cython_c_dep, ".c", obj_suff),
+ (pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", None),
+ (pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", None),
+ (pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", obj_suff),
+ (pyxs_cpp_h, unit.on_buildwith_cython_cpp_h, ".cpp", None),
+ ]:
+ for path, mod in pyxs:
+ filename = rootrel_arc_src(path, unit)
+ cython_args = [path]
+
+ dep = path
+ if path.endswith('.py'):
+ pxd = '/'.join(mod.split('.')) + '.pxd'
+ if unit.resolve_arc_path(pxd):
+ dep = pxd
+ cython_args.append(dep)
+
+ cython_args += [
+ '--module-name', mod,
+ '--init-suffix', mangle(mod),
+ '--source-root', '${ARCADIA_ROOT}',
+ # set arcadia root relative __file__ for generated modules
+ '-X', 'set_initial_path={}'.format(filename),
+ ] + cython_directives
+
+ cython(cython_args)
+ py_register(unit, mod, py3)
+ process_pyx(filename, path, out_suffix, with_ext)
+
+ if py_files2res:
+ # Compile original and generated sources into target for proper cython coverage calculation
+ for files2res in (py_files2res, cpp_files2res):
+ unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)])
+
+ if include_map:
+ data = []
+ prefix = 'resfs/cython/include'
+ for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()):
+ data += ['-', line]
+ unit.onresource(data)
+
+ for swigs, on_swig_python in [
+ (swigs_c, unit.on_swig_python_c),
+ (swigs_cpp, unit.on_swig_python_cpp),
+ ]:
+ for path, mod in swigs:
+ # Make output prefix basename match swig module name.
+ prefix = path[:path.rfind('/') + 1] + mod.rsplit('.', 1)[-1]
+ swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix)
+ on_swig_python([path, prefix])
+ onpy_register(unit, mod + '_swg')
+ onpy_srcs(unit, swg_py + '=' + mod)
+
+ if pys:
+ pys_seen = set()
+ pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))}
+ if pys_dups:
+ ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups))
+
+ res = []
+
+ if py3:
+ mod_list_md5 = md5()
+ for path, mod in pys:
+ mod_list_md5.update(mod)
+ if not (venv and is_extended_source_search_enabled(path, unit)):
+ dest = 'py/' + mod.replace('.', '/') + '.py'
+ if with_py:
+ res += ['DEST', dest, path]
+ if with_pyc:
+ root_rel_path = rootrel_arc_src(path, unit)
+ dst = path + uniq_suffix(path, unit)
+ unit.on_py3_compile_bytecode([root_rel_path + '-', path, dst])
+ res += ['DEST', dest + '.yapyc3', dst + '.yapyc3']
+
+ if py_namespaces:
+ # Note: Add md5 to key to prevent key collision if two or more PY_SRCS() used in the same ya.make
+ ns_res = []
+ for path, ns in sorted(py_namespaces.items()):
+ key = '{}/{}/{}'.format(PY_NAMESPACE_PREFIX, mod_list_md5.hexdigest(), path)
+ namespaces = ':'.join(sorted(ns))
+ ns_res += ['-', '{}="{}"'.format(key, namespaces)]
+ unit.onresource(ns_res)
+
+ unit.onresource_files(res)
+ add_python_lint_checks(unit, 3, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split())
+ else:
+ for path, mod in pys:
+ root_rel_path = rootrel_arc_src(path, unit)
+ if with_py:
+ key = '/py_modules/' + mod
+ res += [
+ path, key,
+ '-', 'resfs/src/{}={}'.format(key, root_rel_path),
+ ]
+ if with_pyc:
+ src = unit.resolve_arc_path(path) or path
+ dst = path + uniq_suffix(path, unit)
+ unit.on_py_compile_bytecode([root_rel_path + '-', src, dst])
+ res += [dst + '.yapyc', '/py_code/' + mod]
+
+ unit.onresource(res)
+ add_python_lint_checks(unit, 2, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split())
+
+ use_vanilla_protoc = unit.get('USE_VANILLA_PROTOC') == 'yes'
+ if use_vanilla_protoc:
+ cpp_runtime_path = 'contrib/libs/protobuf_std'
+ py_runtime_path = 'contrib/python/protobuf_std'
+ builtin_proto_path = cpp_runtime_path + '/' + BUILTIN_PROTO
+ else:
+ cpp_runtime_path = 'contrib/libs/protobuf'
+ py_runtime_path = 'contrib/python/protobuf'
+ builtin_proto_path = cpp_runtime_path + '/' + BUILTIN_PROTO
+
+ if protos:
+ if not upath.startswith(py_runtime_path) and not upath.startswith(builtin_proto_path):
+ unit.onpeerdir(py_runtime_path)
+
+ unit.onpeerdir(unit.get("PY_PROTO_DEPS").split())
+
+ proto_paths = [path for path, mod in protos]
+ unit.on_generate_py_protos_internal(proto_paths)
+ unit.onpy_srcs([
+ pb2_arg(py_suf, path, mod, unit)
+ for path, mod in protos
+ for py_suf in unit.get("PY_PROTO_SUFFIXES").split()
+ ])
+
+ if optimize_proto and need_gazetteer_peerdir:
+ unit.onpeerdir(['kernel/gazetteer/proto'])
+
+ if evs:
+ unit.onpeerdir([cpp_runtime_path])
+ unit.on_generate_py_evs_internal([path for path, mod in evs])
+ unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs])
+
+ if fbss:
+ unit.onpeerdir(unit.get('_PY_FBS_DEPS').split())
+ pysrc_base_name = listid(fbss)
+ unit.onfbs_to_pysrc([pysrc_base_name] + [path for path, _ in fbss])
+ unit.onsrcs(['GLOBAL', '{}.fbs.pysrc'.format(pysrc_base_name)])
+
+
+def _check_test_srcs(*args):
+ used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"}
+ if used:
+ param = list(used)[0]
+ ymake.report_configure_error('in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param))
+
+
+def ontest_srcs(unit, *args):
+ _check_test_srcs(*args)
+ if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no':
+ unit.onpy_srcs(["NAMESPACE", "__tests__"] + list(args))
+
+
+def onpy_doctests(unit, *args):
+ """
+ @usage PY_DOCTESTS(Packages...)
+
+ Add to the test doctests for specified Python packages
+ The packages should be part of a test (listed as sources of the test or its PEERDIRs).
+ """
+ if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no':
+ unit.onresource(['-', 'PY_DOCTEST_PACKAGES="{}"'.format(' '.join(args))])
+
+
+def py_register(unit, func, py3):
+ if py3:
+ unit.on_py3_register([func])
+ else:
+ unit.on_py_register([func])
+
+
+def onpy_register(unit, *args):
+ """
+ @usage: PY_REGISTER([package.]module_name)
+
+ Python knows about which built-ins can be imported, due to their registration in the Assembly or at the start of the interpreter.
+ All modules from the sources listed in PY_SRCS() are registered automatically.
+ To register the modules from the sources in the SRCS(), you need to use PY_REGISTER().
+
+ PY_REGISTER(module_name) initializes module globally via call to initmodule_name()
+ PY_REGISTER(package.module_name) initializes module in the specified package
+ It renames its init function with CFLAGS(-Dinitmodule_name=init7package11module_name)
+ or CFLAGS(-DPyInit_module_name=PyInit_7package11module_name)
+
+ Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#makrospyregister
+ """
+
+ py3 = is_py3(unit)
+
+ for name in args:
+ assert '=' not in name, name
+ py_register(unit, name, py3)
+ if '.' in name:
+ shortname = name.rsplit('.', 1)[1]
+ if py3:
+ unit.oncflags(['-DPyInit_{}=PyInit_{}'.format(shortname, mangle(name))])
+ else:
+ unit.oncflags(['-Dinit{}=init{}'.format(shortname, mangle(name))])
+
+
+def py_main(unit, arg):
+ if unit.get('IGNORE_PY_MAIN'):
+ return
+ unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL')
+ if unit_needs_main:
+ py_program(unit, is_py3(unit))
+ unit.onresource(['-', 'PY_MAIN={}'.format(arg)])
+
+
+def onpy_main(unit, arg):
+ """
+ @usage: PY_MAIN(package.module[:func])
+
+ Specifies the module or function from which to start executing a python program
+
+ Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipyprogrampy3programimakrospymain
+ """
+
+ arg = arg.replace('/', '.')
+
+ if ':' not in arg:
+ arg += ':main'
+
+ py_main(unit, arg)
+
+
+def onpy_constructor(unit, arg):
+ """
+ @usage: PY_CONSTRUCTOR(package.module[:func])
+
+ Specifies the module or function which will be started before python's main()
+ init() is expected in the target module if no function is specified
+ Can be considered as __attribute__((constructor)) for python
+ """
+ if ':' not in arg:
+ arg = arg + '=init'
+ else:
+ arg[arg.index(':')] = '='
+ unit.onresource(['-', 'py/constructors/{}'.format(arg)])
+
+
+def onpy_enums_serialization(unit, *args):
+ ns = ''
+ args = iter(args)
+ for arg in args:
+ # Namespace directives.
+ if arg == 'NAMESPACE':
+ ns = next(args)
+ else:
+ unit.on_py_enum_serialization_to_json(arg)
+ unit.on_py_enum_serialization_to_py(arg)
+ filename = arg.rsplit('.', 1)[0] + '.py'
+ if len(ns) != 0:
+ onpy_srcs(unit, 'NAMESPACE', ns, filename)
+ else:
+ onpy_srcs(unit, filename)
+
+
+def oncpp_enums_serialization(unit, *args):
+ args = iter(args)
+ for arg in args:
+ # Namespace directives.
+ if arg == 'NAMESPACE':
+ next(args)
+ else:
+ unit.ongenerate_enum_serialization_with_header(arg)
diff --git a/build/plugins/res.py b/build/plugins/res.py
new file mode 100644
index 0000000000..c3fecce4dd
--- /dev/null
+++ b/build/plugins/res.py
@@ -0,0 +1,133 @@
+import json
+import os
+import six
+from _common import iterpair, listid, pathid, rootrel_arc_src, tobuilddir, filter_out_by_keyword
+import ymake
+
+
+def split(lst, limit):
+ # paths are specified with replaceable prefix
+ # real length is unknown at the moment, that why we use root_lenght
+ # as a rough estimation
+ root_lenght = 200
+ filepath = None
+ lenght = 0
+ bucket = []
+
+ for item in lst:
+ if filepath:
+ lenght += root_lenght + len(filepath) + len(item)
+ if lenght > limit and bucket:
+ yield bucket
+ bucket = []
+ lenght = 0
+
+ bucket.append(filepath)
+ bucket.append(item)
+ filepath = None
+ else:
+ filepath = item
+
+ if bucket:
+ yield bucket
+
+
+def remove_prefix(text, prefix):
+ if text.startswith(prefix):
+ return text[len(prefix):]
+ return text
+
+
+def onresource_files(unit, *args):
+ """
+ @usage: RESOURCE_FILES([DONT_PARSE] [PREFIX {prefix}] [STRIP prefix_to_strip] {path})
+
+ This macro expands into
+ RESOURCE([DONT_PARSE] {path} resfs/file/{prefix}{path}
+ - resfs/src/resfs/file/{prefix}{remove_prefix(path, prefix_to_strip)}={rootrel_arc_src(path)}
+ )
+
+ resfs/src/{key} stores a source root (or build root) relative path of the
+ source of the value of the {key} resource.
+
+ resfs/file/{key} stores any value whose source was a file on a filesystem.
+ resfs/src/resfs/file/{key} must store its path.
+
+ DONT_PARSE disables parsing for source code files (determined by extension)
+ Please don't abuse: use separate DONT_PARSE macro call only for files subject to parsing
+
+ This form is for use from other plugins:
+ RESOURCE_FILES([DEST {dest}] {path}) expands into RESOURCE({path} resfs/file/{dest})
+
+ @see: https://wiki.yandex-team.ru/devtools/commandsandvars/resourcefiles/
+ """
+ prefix = ''
+ prefix_to_strip = None
+ dest = None
+ res = []
+ first = 0
+
+ if args and not unit.enabled('_GO_MODULE'):
+ # GO_RESOURCE currently doesn't support DONT_PARSE
+ res.append('DONT_PARSE')
+
+ if args and args[0] == 'DONT_PARSE':
+ first = 1
+
+ args = iter(args[first:])
+ for arg in args:
+ if arg == 'DONT_PARSE':
+ # ignore explicit specification
+ continue
+ if arg == 'PREFIX':
+ prefix, dest = next(args), None
+ elif arg == 'DEST':
+ dest, prefix = next(args), None
+ elif arg == 'STRIP':
+ prefix_to_strip = next(args)
+ else:
+ path = arg
+ key = 'resfs/file/' + (dest or (prefix + (path if not prefix_to_strip else remove_prefix(path, prefix_to_strip))))
+ src = 'resfs/src/{}={}'.format(key, rootrel_arc_src(path, unit))
+ res += ['-', src, path, key]
+
+ if unit.enabled('_GO_MODULE'):
+ unit.on_go_resource(res)
+ else:
+ unit.onresource(res)
+
+def onall_resource_files(unit, *args):
+ # This is only validation, actual work is done in ymake.core.conf implementation
+ for arg in args:
+ if '*' in arg or '?' in arg:
+ ymake.report_configure_error('Wildcards in [[imp]]ALL_RESOURCE_FILES[[rst]] are not allowed')
+
+def on_ya_conf_json(unit, conf_file):
+ conf_abs_path = unit.resolve('$S/' + conf_file)
+ if not os.path.exists(conf_abs_path):
+ ymake.report_configure_error('File "{}" not found'.format(conf_abs_path))
+ return
+
+ # conf_file should be passed to the RESOURCE_FILES macro without path.
+ # To resolve it later by name only we must add it's path to SRCDIR().
+ conf_dir = os.path.dirname(conf_file)
+ if conf_dir:
+ unit.onsrcdir(conf_dir)
+ unit.onresource_files(os.path.basename(conf_file))
+
+ with open(conf_abs_path) as f:
+ conf = json.load(f)
+ formulas = set()
+ for bottle_name, bottle in conf['bottles'].items():
+ formula = bottle['formula']
+ if isinstance(formula, six.string_types):
+ if formula.startswith(conf_dir):
+ abs_path = unit.resolve('$S/' + formula)
+ if os.path.exists(abs_path):
+ formulas.add(formula)
+ else:
+ ymake.report_configure_error('File "{}" (referenced from bottle "{}" in "{}") is not found'.format(abs_path, bottle_name, conf_abs_path))
+ else:
+ ymake.report_configure_error('File "{}" (referenced from bottle "{}" in "{}") must be located in "{}" file tree'.format(formula, bottle_name, conf_file, conf_dir))
+ for formula in formulas:
+ unit.onresource_files(formula)
diff --git a/build/plugins/sandbox_registry.py b/build/plugins/sandbox_registry.py
new file mode 100644
index 0000000000..dc1be399b3
--- /dev/null
+++ b/build/plugins/sandbox_registry.py
@@ -0,0 +1,21 @@
+import os
+
+import ymake
+
+
+def onregister_sandbox_import(unit, *args):
+ args = iter(args)
+ for path in args:
+ path = os.path.normpath(path)
+ source = unit.resolve_arc_path(path)
+ abs_source = unit.resolve(source)
+ if not os.path.exists(abs_source):
+ ymake.report_configure_error('REGISTER_SANDBOX_IMPORT: File or directory {} does not exists'.format(path))
+ splited_path = path.split(os.sep)
+ l, r = 0, len(splited_path)
+ if splited_path[-1] == "__init__.py":
+ r -= 1
+ if not splited_path[0]:
+ l += 1
+ path = ".".join(splited_path[l:r])
+ unit.onresource(["-", "{}.{}={}".format("SANDBOX_TASK_REGISTRY", path, path)])
diff --git a/build/plugins/scarab_cant_clash.py b/build/plugins/scarab_cant_clash.py
new file mode 100644
index 0000000000..77dc303183
--- /dev/null
+++ b/build/plugins/scarab_cant_clash.py
@@ -0,0 +1,66 @@
+import _common as common
+
+
+def onacceleo(unit, *args):
+ if unit.get("YMAKE_JAVA_MODULES") == "yes":
+ return
+ flat, kv = common.sort_by_keywords(
+ {'XSD': -1, 'MTL': -1, 'MTL_ROOT': 1, 'LANG': -1, 'OUT': -1, 'OUT_NOAUTO': -1, 'OUTPUT_INCLUDES': -1, 'DEBUG': 0},
+ args
+ )
+
+ try:
+ mtlroot = kv['MTL_ROOT'][0]
+ except Exception:
+ mtlroot = unit.path().replace('$S/', '')
+
+ classpath = ['$SCARAB', ] # XXX special word for ya make to replace following paths with real classpath
+ classpath.append('tools/acceleo')
+
+ depends = []
+ if not unit.get('IDE_MSVS_CALL'):
+ for jar in classpath[1:]:
+ depends.append(jar)
+
+ classpath = ':'.join(classpath)
+
+ # Generate java cmd
+ cmd = [
+ '-classpath',
+ classpath,
+ '-Dfile.encoding=UTF-8',
+ 'ru.yandex.se.logsng.tool.Cli',
+ ]
+
+ for xsd in kv.get('XSD', []):
+ cmd += ['--xsd', xsd]
+
+ for mtl in kv.get('MTL', []):
+ cmd += ['--mtl', mtl]
+
+ for lang in kv.get('LANG', []):
+ cmd += ['--lang', lang]
+
+ cmd += ['--output-dir', unit.path().replace('$S/', '${ARCADIA_BUILD_ROOT}/')]
+ cmd += ['--build-root', '${ARCADIA_BUILD_ROOT}']
+ cmd += ['--source-root', '${ARCADIA_ROOT}']
+ cmd += ['--mtl-root', mtlroot]
+
+ # Generate RUN_JAVA args
+ run_java = cmd
+
+ if 'DEBUG' not in kv:
+ run_java += ['HIDE_OUTPUT']
+
+ inputs = kv.get('XSD', []) + kv.get('MTL', []) + kv.get('LANG', [])
+ if inputs:
+ run_java += ['IN'] + inputs
+
+ for k in 'OUT', 'OUT_NOAUTO', 'OUTPUT_INCLUDES':
+ if kv.get(k):
+ run_java += [k] + kv[k]
+
+ if depends:
+ run_java += ['TOOL'] + depends
+
+ unit.on_run_java(run_java)
diff --git a/build/plugins/split_codegen.py b/build/plugins/split_codegen.py
new file mode 100644
index 0000000000..f1e60bc142
--- /dev/null
+++ b/build/plugins/split_codegen.py
@@ -0,0 +1,43 @@
+from _common import sort_by_keywords
+
+# This hard-coded many times in CppParts in various codegens
+_DEFAULT_CPP_PARTS = 20
+# See TCodegenParams::MethodStream usage in factor codegen
+_ADDITIONAL_STREAM_COUNT = 5
+
+
+def onsplit_codegen(unit, *args):
+ '''
+ @usage: SPLIT_CODEGEN(tool prefix opts... [OUT_NUM num] [OUTPUT_INCLUDES output_includes...])
+
+ Generator of a certain number of parts of the .cpp file + one header .h file from .in
+
+ Supports keywords:
+ 1. OUT_NUM <the number of generated Prefix.N.cpp default 25 (N varies from 0 to 24)>
+ 2. OUTPUT_INCLUDES <path to files that will be included in generalnyj of macro files>
+ '''
+ keywords = {"OUT_NUM": 1}
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+
+ num_outputs = _DEFAULT_CPP_PARTS + _ADDITIONAL_STREAM_COUNT
+ if "OUT_NUM" in spec_args:
+ num_outputs = int(spec_args["OUT_NUM"][0])
+
+ tool = flat_args[0]
+ prefix = flat_args[1]
+
+ cmd = [tool, prefix, 'OUT']
+ for num in range(num_outputs):
+ cmd.append('{}.{}.cpp'.format(prefix, num))
+
+ cpp_parts = int(num_outputs) - _ADDITIONAL_STREAM_COUNT
+ cpp_parts_args = ['--cpp-parts', str(cpp_parts)]
+
+ if len(flat_args) > 2:
+ if flat_args[2] != 'OUTPUT_INCLUDES':
+ cmd.append('OPTS')
+ cmd += cpp_parts_args + flat_args[2:]
+ else:
+ cmd += ['OPTS'] + cpp_parts_args
+
+ unit.on_split_codegen_base(cmd)
diff --git a/build/plugins/ssqls.py b/build/plugins/ssqls.py
new file mode 100644
index 0000000000..8c3ea86829
--- /dev/null
+++ b/build/plugins/ssqls.py
@@ -0,0 +1,40 @@
+from os.path import splitext
+
+import ymake
+from _common import resolve_includes
+
+
+class SSQLSParser(object):
+ def __init__(self, path, unit):
+ s = unit.resolve_arc_path(path)
+ assert s.startswith('$S/') and s.endswith('.ssqls'), s
+ h = '$B/' + s[3:-6] + '.h'
+
+ import xml.etree.cElementTree as ET
+ try:
+ doc = ET.parse(path)
+ except ET.ParseError as e:
+ unit.message(['error', 'malformed XML {}: {}'.format(path, e)])
+ doc = ET.Element('DbObject')
+ xmls, headers = self.parse_doc(doc)
+ self._includes = resolve_includes(unit, s, xmls)
+ self._induced = {'cpp': [h], 'h': resolve_includes(unit, h, headers)}
+
+ @staticmethod
+ def parse_doc(doc):
+ paths = lambda nodes: filter(None, (e.get('path') for e in nodes))
+ includes = doc.findall('include')
+ ancestors = paths(doc.findall('ancestors/ancestor'))
+ headers = [e.text.strip('<>""') for e in includes]
+ headers += [splitext(s)[0] + '.h' for s in ancestors]
+ return paths(includes) + ancestors, headers
+
+ def includes(self):
+ return self._includes
+
+ def induced_deps(self):
+ return self._induced
+
+
+def init():
+ ymake.addparser('ssqls', SSQLSParser)
diff --git a/build/plugins/suppressions.py b/build/plugins/suppressions.py
new file mode 100644
index 0000000000..6f4a1b4f03
--- /dev/null
+++ b/build/plugins/suppressions.py
@@ -0,0 +1,19 @@
+def onsuppressions(unit, *args):
+ """
+ SUPPRESSIONS() - allows to specify files with suppression notation which will be used by
+ address, leak or thread sanitizer runtime by default.
+ Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer
+ and tsan.supp for thread sanitizer suppressions respectively.
+ See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks
+ for details.
+ """
+ import os
+
+ valid = ("asan.supp", "tsan.supp", "lsan.supp")
+
+ if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"):
+ for x in args:
+ if os.path.basename(x) not in valid:
+ unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)])
+ return
+ unit.onsrcs(["GLOBAL"] + list(args))
diff --git a/build/plugins/tests/fake_ymake.py b/build/plugins/tests/fake_ymake.py
new file mode 100644
index 0000000000..a20d28525a
--- /dev/null
+++ b/build/plugins/tests/fake_ymake.py
@@ -0,0 +1,2 @@
+def addparser():
+ pass
diff --git a/build/plugins/tests/test_code_generator.py b/build/plugins/tests/test_code_generator.py
new file mode 100644
index 0000000000..771babc0c0
--- /dev/null
+++ b/build/plugins/tests/test_code_generator.py
@@ -0,0 +1,20 @@
+import sys
+from build.plugins.tests import fake_ymake
+sys.modules['ymake'] = fake_ymake
+
+from build.plugins import code_generator
+
+
+def test_include_parser():
+ template_file = """
+ @ from 'util/namespace.macro' import namespace, change_namespace, close_namespaces
+ @ import 'market/tools/code_generator/templates/serialization/json.macro' as json
+ @ import 'market/tools/code_generator/templates/serialization/request_parameters.macro' as rp
+ #include <sss/abcdefg.h>
+ #include<fff/asd>
+ #include "hhh/quququ.h"
+ """
+
+ includes, induced = code_generator.CodeGeneratorTemplateParser.parse_includes(template_file.split('\n'))
+ assert includes == ['util/namespace.macro', 'market/tools/code_generator/templates/serialization/json.macro', 'market/tools/code_generator/templates/serialization/request_parameters.macro']
+ assert induced == ['sss/abcdefg.h', 'fff/asd', 'hhh/quququ.h']
diff --git a/build/plugins/tests/test_common.py b/build/plugins/tests/test_common.py
new file mode 100644
index 0000000000..e1780354f8
--- /dev/null
+++ b/build/plugins/tests/test_common.py
@@ -0,0 +1,49 @@
+import pytest
+
+import build.plugins._common as pc
+
+
+def test_sort_by_keywords():
+ keywords = {'KEY1': 2, 'KEY2': 0, 'KEY3': 1}
+ args = 'aaaa bbbb KEY2 KEY1 kkk10 kkk11 ccc ddd KEY3 kkk3 eee'.split()
+ flat, spec = pc.sort_by_keywords(keywords, args)
+ assert flat == ['aaaa', 'bbbb', 'ccc', 'ddd', 'eee']
+ assert spec == {'KEY1': ['kkk10', 'kkk11'], 'KEY2': True, 'KEY3': ['kkk3']}
+
+ keywords = {'KEY1': 0, 'KEY2': 4}
+ args = 'aaaa KEY2 eee'.split()
+ flat, spec = pc.sort_by_keywords(keywords, args)
+ assert flat == ['aaaa']
+ assert spec == {'KEY2': ['eee']}
+
+ keywords = {'KEY1': 2, 'KEY2': 2}
+ args = 'KEY1 k10 KEY2 k20 KEY1 k11 KEY2 k21 KEY1 k13'.split()
+ flat, spec = pc.sort_by_keywords(keywords, args)
+ assert flat == []
+ assert spec == {'KEY1': ['k10', 'k11', 'k13'], 'KEY2': ['k20', 'k21']}
+
+
+def test_filter_out_by_keyword():
+ assert pc.filter_out_by_keyword([], 'A') == []
+ assert pc.filter_out_by_keyword(['x'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A', 'B'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A', 'B', 'y'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'y'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y', 'A'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y', 'A', 'F', 'z'], 'A') == ['x', 'y', 'z']
+
+
+test_data = [
+ [[1, 2, 3], 1, [[1], [2], [3]]],
+ [[1, 2, 3], 2, [[1, 2], [3]]],
+ [[1, 2, 3, 4], 2, [[1, 2], [3, 4]]],
+ [[1], 5, [[1]]],
+]
+
+
+@pytest.mark.parametrize('lst, chunk_size, expected', test_data, ids=[str(num + 1) for num in range(len(test_data))])
+def test_generate_chunks(lst, chunk_size, expected):
+ assert list(pc.generate_chunks(lst, chunk_size)) == expected
diff --git a/build/plugins/tests/test_requirements.py b/build/plugins/tests/test_requirements.py
new file mode 100644
index 0000000000..e571f0c704
--- /dev/null
+++ b/build/plugins/tests/test_requirements.py
@@ -0,0 +1,67 @@
+import pytest
+
+import build.plugins._requirements as requirements
+import build.plugins.lib.test_const as consts
+
+
+class TestRequirements(object):
+ @pytest.mark.parametrize('test_size', consts.TestSize.sizes())
+ def test_cpu(self, test_size):
+ max_cpu = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu)
+ min_cpu = consts.TestRequirementsConstants.MinCpu
+ assert requirements.check_cpu(-1, test_size)
+ assert requirements.check_cpu(min_cpu - 1, test_size)
+ assert requirements.check_cpu("unknown", test_size)
+ assert not requirements.check_cpu(1, test_size)
+ assert not requirements.check_cpu(3, test_size)
+ assert requirements.check_cpu(1000, test_size)
+ if max_cpu != consts.TestRequirementsConstants.All:
+ assert requirements.check_cpu(max_cpu + 1, test_size)
+ assert requirements.check_cpu(max_cpu + 4, test_size)
+ assert requirements.check_cpu(consts.TestRequirementsConstants.All, test_size)
+ else:
+ assert not requirements.check_cpu(consts.TestRequirementsConstants.All, test_size)
+
+ @pytest.mark.parametrize('test_size', consts.TestSize.sizes())
+ def test_ram(self, test_size):
+ max_ram = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram)
+ min_ram = consts.TestRequirementsConstants.MinRam
+ assert requirements.check_ram(-1, test_size)
+ assert requirements.check_ram(min_ram - 1, test_size)
+ assert requirements.check_ram(max_ram + 1, test_size)
+ assert not requirements.check_ram(1, test_size)
+ assert not requirements.check_ram(4, test_size)
+ assert not requirements.check_ram(5, test_size)
+ assert not requirements.check_ram(32, consts.TestSize.Large)
+ assert requirements.check_ram(48, consts.TestSize.Large)
+
+ assert not requirements.check_ram(1, test_size, is_kvm=True)
+ assert not requirements.check_ram(4, test_size, is_kvm=True)
+ assert not requirements.check_ram(16, test_size, is_kvm=True)
+ assert requirements.check_ram(32, test_size, is_kvm=True)
+
+ @pytest.mark.parametrize('test_size', consts.TestSize.sizes())
+ def test_ram_disk(self, test_size):
+ max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk)
+ min_ram_disk = consts.TestRequirementsConstants.MinRamDisk
+ assert requirements.check_ram_disk(-1, test_size)
+ assert requirements.check_ram_disk(min_ram_disk - 1, test_size)
+ assert requirements.check_ram_disk(max_ram_disk + 1, test_size)
+ assert requirements.check_ram_disk(33, test_size)
+ assert not requirements.check_ram_disk(32, test_size)
+ assert not requirements.check_ram_disk(1, test_size)
+ assert not requirements.check_ram_disk(4, test_size)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '0', test_size, False, True, False, False, False, 1)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, False, False, 1)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, True, True, False, False, False, 0)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, False, False, False, False, 0)
+ if test_size != consts.TestSize.Large:
+ assert requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, False, False, 0)
+ assert requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, True, False, False, 0)
+ assert requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, True, False, 0)
+ assert requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, False, True, 0)
+ else:
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, False, False, 0)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, True, False, False, 0)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, True, False, 0)
+ assert not requirements.validate_ram_disk_requirement('ram_disk', '1', test_size, False, True, False, False, True, 0)
diff --git a/build/plugins/tests/test_ssqls.py b/build/plugins/tests/test_ssqls.py
new file mode 100644
index 0000000000..08798b1591
--- /dev/null
+++ b/build/plugins/tests/test_ssqls.py
@@ -0,0 +1,27 @@
+import sys
+from build.plugins.tests import fake_ymake
+sys.modules['ymake'] = fake_ymake
+
+import xml.etree.cElementTree as ET
+
+from build.plugins import ssqls
+
+
+example = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<DbObject>
+ <include path="A.ssqls">&lt;a.h&gt;</include>
+ <include>"b.h"</include>
+
+ <ancestors>
+ <ancestor path="C.ssqls"/>
+ </ancestors>
+</DbObject>
+'''
+
+
+def test_include_parser():
+ doc = ET.fromstring(example)
+ xmls, headers = ssqls.SSQLSParser.parse_doc(doc)
+ assert headers == ['a.h', 'b.h', 'C.h']
+ assert xmls == ['A.ssqls', 'C.ssqls']
diff --git a/build/plugins/tests/ya.make b/build/plugins/tests/ya.make
new file mode 100644
index 0000000000..0c54899cc9
--- /dev/null
+++ b/build/plugins/tests/ya.make
@@ -0,0 +1,25 @@
+PY2TEST()
+
+OWNER(g:yatool)
+
+PEERDIR(
+ build/plugins
+)
+
+PY_SRCS(
+ fake_ymake.py
+)
+
+TEST_SRCS(
+ test_code_generator.py
+ test_common.py
+ test_requirements.py
+ test_ssqls.py
+)
+
+NO_CHECK_IMPORTS(
+ build.plugins.code_generator
+ build.plugins.ssqls
+)
+
+END()
diff --git a/build/plugins/uservices.py b/build/plugins/uservices.py
new file mode 100644
index 0000000000..9a8deb18d4
--- /dev/null
+++ b/build/plugins/uservices.py
@@ -0,0 +1,26 @@
+import json
+import ymake
+
+def on_process_usrv_files(unit, *args):
+ if args[0] == 'NO_DEPS':
+ for f in args[1:]:
+ if f.endswith('.cpp'):
+ unit.on_move([f + '.usrv', 'OUT', f])
+ else:
+ unit.on_move([f + '.usrv', 'OUT_NOAUTO', f])
+ return
+
+ deps_file = unit.resolve(unit.resolve_arc_path(args[0]))
+ try:
+ all_deps = json.load(open(deps_file, 'r'))
+ except Exception as e:
+ ymake.report_configure_error('Malformed dependencies JSON `{}`: {}'.format(args[0], e.__repr__()))
+ return
+ for f in args[1:]:
+ try:
+ deps = all_deps[f]
+ except KeyError:
+ ymake.report_configure_error('Dependencies for {} not found in {}'.format(f, args[0]))
+ unit.on_usrv_mv_with_deps([f])
+ return
+ unit.on_move([f + '.usrv', 'OUT', f, 'CPP_DEPS'] + deps)
diff --git a/build/plugins/ya.make b/build/plugins/ya.make
new file mode 100644
index 0000000000..1bde4b81bf
--- /dev/null
+++ b/build/plugins/ya.make
@@ -0,0 +1,23 @@
+OWNER(g:ymake)
+
+PY2_LIBRARY()
+
+PY_SRCS(
+ code_generator.py
+ ssqls.py
+
+ _common.py
+ _requirements.py
+)
+
+PEERDIR(
+ build/plugins/lib
+ build/plugins/lib/test_const
+)
+
+END()
+
+RECURSE(
+ tests
+ lib/test_const
+)
diff --git a/build/plugins/yabs_generate_conf.py b/build/plugins/yabs_generate_conf.py
new file mode 100644
index 0000000000..49e0982fdb
--- /dev/null
+++ b/build/plugins/yabs_generate_conf.py
@@ -0,0 +1,61 @@
+from _common import sort_by_keywords
+
+
+def get_or_default(kv, name, default):
+ if name in kv:
+ return kv[name][0]
+ return default
+
+
+def onyabs_generate_conf(unit, *args):
+ flat, kv = sort_by_keywords(
+ {'MODE': 1, 'SCRIPT': 1, 'SRC': 1, 'TOOL': 1, 'CONF_DIR': 1, 'DEST': 1}, args
+ )
+ src = get_or_default(kv, 'SRC', 'yabs/server/phantom')
+ mode = get_or_default(kv, 'MODE', 'production')
+
+ script = src + "/" + get_or_default(kv, 'SCRIPT', 'mkconf.py')
+ conf = src + "/" + get_or_default(kv, 'CONF_DIR', 'conf-tmpl')
+ tool = src + "/" + get_or_default(kv, 'TOOL', 'yabs_conf')
+
+ for name in flat:
+ filename = "/".join([conf, name])
+ unit.onpython([
+ script,
+ "--cluster-conf-binary", tool,
+ "--mode", mode,
+ "--dest-dir", "${BINDIR}",
+ filename,
+ "IN", filename,
+ "OUT_NOAUTO", "${BINDIR}/%s" % name,
+ "TOOL", tool
+ ])
+
+
+def onyabs_generate_phantom_conf_patch(unit, *args):
+ flat, kv = sort_by_keywords(
+ {'SRC': 1, 'DST': 1}, args
+ )
+ src = '${ARCADIA_BUILD_ROOT}/' + get_or_default(kv, 'SRC', 'yabs/server/phantom/conf')
+ dst = '${ARCADIA_BUILD_ROOT}/' + get_or_default(kv, 'DST', 'yabs/server/phantom/conf-test')
+ for f in flat:
+ lhs = src + '/' + f
+ rhs = dst + '/' + f
+ unit.onpython([
+ 'mkdiff.py',
+ lhs, rhs,
+ 'IN', lhs,
+ 'IN', rhs,
+ 'STDOUT', f + ".patch"
+ ])
+
+
+def onyabs_generate_phantom_conf_test_check(unit, *args):
+ yabs_path = args[0]
+ for name in args[1:]:
+ unit.onpython("""
+ build/scripts/wrapper.py mkcheckconf.sh ${{ARCADIA_BUILD_ROOT}}/{yabs_path}/phantom/conf-test/yabs-{role}.conf yabs-check-{role}.conf
+ IN mkcheckconf.sh ${{ARCADIA_BUILD_ROOT}}/{yabs_path}/phantom/conf-test/yabs-{role}.conf
+ OUT yabs-check-{role}.conf
+""".format(yabs_path=yabs_path, role=name).split() # noqa
+ )
diff --git a/build/plugins/yql_python_udf.py b/build/plugins/yql_python_udf.py
new file mode 100644
index 0000000000..72584c3e62
--- /dev/null
+++ b/build/plugins/yql_python_udf.py
@@ -0,0 +1,55 @@
+from _common import sort_by_keywords
+
+
+def get_or_default(kv, name, default):
+ if name in kv:
+ return kv[name][0]
+ return default
+
+
+def onregister_yql_python_udf(unit, *args):
+ flat, kv = sort_by_keywords({'NAME': 1, 'RESOURCE_NAME': 1, 'ADD_LIBRA_MODULES': 1}, args)
+ assert len(flat) == 0
+ name = get_or_default(kv, 'NAME', 'CustomPython')
+ resource_name = get_or_default(kv, 'RESOURCE_NAME', name)
+ add_libra_modules = get_or_default(kv, 'ADD_LIBRA_MODULES', 'no') == 'yes'
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == 'yes'
+ py3 = unit.get('PYTHON3') == 'yes'
+
+ unit.onyql_abi_version(['2', '14', '0'])
+ unit.onpeerdir(['yql/udfs/common/python/python_udf'])
+ unit.onpeerdir(['ydb/library/yql/public/udf'])
+
+ if add_libra_modules:
+ unit.onpeerdir(['quality/user_sessions/libra_arc/noyql'])
+ unit.onpeerdir(['yql/udfs/quality/libra/module'])
+
+ if use_arcadia_python:
+ flavor = 'Arcadia'
+ unit.onpeerdir([
+ 'library/python/runtime',
+ 'yql/udfs/common/python/main'
+ ] if not py3 else [
+ 'library/python/runtime_py3',
+ 'yql/udfs/common/python/main_py3'
+ ])
+ else:
+ flavor = 'System'
+
+ output_includes = [
+ 'yql/udfs/common/python/python_udf/python_udf.h',
+ 'ydb/library/yql/public/udf/udf_registrator.h',
+ ]
+ if add_libra_modules:
+ output_includes.append('yql/udfs/quality/libra/module/module.h')
+
+ path = name + '.yql_python_udf.cpp'
+ libra_flag = '1' if add_libra_modules else '0'
+ unit.onpython([
+ 'build/scripts/gen_yql_python_udf.py',
+ flavor, name, resource_name, path, libra_flag,
+ 'OUT', path,
+ 'OUTPUT_INCLUDES',
+ ] + output_includes
+ )
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
new file mode 100644
index 0000000000..3b98011922
--- /dev/null
+++ b/build/plugins/ytest.py
@@ -0,0 +1,1203 @@
+import os
+import re
+import sys
+import json
+import copy
+import base64
+import shlex
+import _common
+import lib.test_const as consts
+import _requirements as reqs
+import StringIO
+import subprocess
+import collections
+
+import ymake
+
+
+MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+MDS_SCHEME = 'mds'
+CANON_DATA_DIR_NAME = 'canondata'
+CANON_OUTPUT_STORAGE = 'canondata_storage'
+CANON_RESULT_FILE_NAME = 'result.json'
+CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)')
+CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
+
+BLOCK_SEPARATOR = '============================================================='
+SPLIT_FACTOR_MAX_VALUE = 1000
+SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
+PARTITION_MODS = ('SEQUENTIAL', 'MODULO')
+DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml"
+DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json"
+PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json"
+
+
+tidy_config_map = None
+
+def ontest_data(unit, *args):
+ ymake.report_configure_error("TEST_DATA is removed in favour of DATA")
+
+
+def prepare_recipes(data):
+ data = data.replace('"USE_RECIPE_DELIM"', "\n")
+ data = data.replace("$TEST_RECIPES_VALUE", "")
+ return base64.b64encode(data or "")
+
+
+def prepare_env(data):
+ data = data.replace("$TEST_ENV_VALUE", "")
+ return serialize_list(shlex.split(data))
+
+
+def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + perf test for configure
+ pool_re = re.compile(r"""['"]*pool['"]*\s*?=""")
+ cypress_root_re = re.compile(r"""['"]*cypress_root['"]*\s*=""")
+ with open(filename, 'r') as afile:
+ yt_spec = afile.read()
+ return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec)
+
+
+def validate_test(unit, kw):
+ def get_list(key):
+ return deserialize_list(kw.get(key, ""))
+
+ valid_kw = copy.deepcopy(kw)
+ errors = []
+ warnings = []
+
+ if valid_kw.get('SCRIPT-REL-PATH') == 'boost.test':
+ project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
+ if not project_path.startswith(("contrib", "mail", "maps", "tools/idl", "metrika", "devtools", "mds", "yandex_io", "smart_devices")):
+ errors.append("BOOSTTEST is not allowed here")
+ elif valid_kw.get('SCRIPT-REL-PATH') == 'gtest':
+ project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
+ if not project_path.startswith(("contrib", "devtools", "mail", "mds")):
+ errors.append("GTEST_UGLY is not allowed here, use GTEST instead")
+
+ size_timeout = collections.OrderedDict(sorted(consts.TestSize.DefaultTimeouts.items(), key=lambda t: t[1]))
+
+ size = valid_kw.get('SIZE', consts.TestSize.Small).lower()
+ tags = set(get_list("TAG"))
+ requirements_orig = get_list("REQUIREMENTS")
+ in_autocheck = consts.YaTestTags.NotAutocheck not in tags and consts.YaTestTags.Manual not in tags
+ is_fat = consts.YaTestTags.Fat in tags
+ is_force_sandbox = consts.YaTestTags.ForceDistbuild not in tags and is_fat
+ is_ytexec_run = consts.YaTestTags.YtRunner in tags
+ is_fuzzing = valid_kw.get("FUZZING", False)
+ is_kvm = 'kvm' in requirements_orig
+ requirements = {}
+ secret_requirements = ('sb_vault', 'yav')
+ list_requirements = secret_requirements
+ for req in requirements_orig:
+ if req in ('kvm', ):
+ requirements[req] = str(True)
+ continue
+
+ if ":" in req:
+ req_name, req_value = req.split(":", 1)
+ if req_name in list_requirements:
+ requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
+ else:
+ if req_name in requirements:
+ if req_value in ["0"]:
+ warnings.append("Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ del requirements[req_name]
+ elif requirements[req_name] != req_value:
+ warnings.append("Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ requirements[req_name] = req_value
+ else:
+ requirements[req_name] = req_value
+ else:
+ errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req))
+
+ if not errors:
+ for req_name, req_value in requirements.items():
+ error_msg = reqs.validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, requirements)
+ if error_msg:
+ errors += [error_msg]
+
+ invalid_requirements_for_distbuild = [requirement for requirement in requirements.keys() if requirement not in ('ram', 'ram_disk', 'cpu', 'network')]
+ sb_tags = [tag for tag in tags if tag.startswith('sb:')]
+
+ if is_fat:
+ if size != consts.TestSize.Large:
+ errors.append("Only LARGE test may have ya:fat tag")
+
+ if in_autocheck and not is_force_sandbox:
+ if invalid_requirements_for_distbuild:
+ errors.append("'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format(invalid_requirements_for_distbuild))
+ if sb_tags:
+ errors.append("You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format(sb_tags))
+ if consts.YaTestTags.SandboxCoverage in tags:
+ errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.")
+ if is_ytexec_run:
+ errors.append("Running LARGE tests over YT (ya:yt) on Distbuild (ya:force_distbuild) is forbidden. Consider removing TAG(ya:force_distbuild).")
+ else:
+ if is_force_sandbox:
+ errors.append('ya:force_sandbox can be used with LARGE tests only')
+ if consts.YaTestTags.NoFuse in tags:
+ errors.append('ya:nofuse can be used with LARGE tests only')
+ if consts.YaTestTags.Privileged in tags:
+ errors.append("ya:privileged can be used with LARGE tests only")
+ if in_autocheck and size == consts.TestSize.Large:
+ errors.append("LARGE test must have ya:fat tag")
+
+ if consts.YaTestTags.Privileged in tags and 'container' not in requirements:
+ errors.append("Only tests with 'container' requirement can have 'ya:privileged' tag")
+
+ if size not in size_timeout:
+ errors.append("Unknown test size: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(size.upper(), ", ".join([sz.upper() for sz in size_timeout.keys()])))
+ else:
+ try:
+ timeout = int(valid_kw.get('TEST-TIMEOUT', size_timeout[size]) or size_timeout[size])
+ script_rel_path = valid_kw.get('SCRIPT-REL-PATH')
+ if timeout < 0:
+ raise Exception("Timeout must be > 0")
+ if size_timeout[size] < timeout and in_autocheck and script_rel_path != 'java.style':
+ suggested_size = None
+ for s, t in size_timeout.items():
+ if timeout <= t:
+ suggested_size = s
+ break
+
+ if suggested_size:
+ suggested_size = ", suggested size: [[imp]]{}[[rst]]".format(suggested_size.upper())
+ else:
+ suggested_size = ""
+ errors.append("Max allowed timeout for test size [[imp]]{}[[rst]] is [[imp]]{} sec[[rst]]{}".format(size.upper(), size_timeout[size], suggested_size))
+ except Exception as e:
+ errors.append("Error when parsing test timeout: [[bad]]{}[[rst]]".format(e))
+
+ requirements_list = []
+ for req_name, req_value in requirements.iteritems():
+ requirements_list.append(req_name + ":" + req_value)
+ valid_kw['REQUIREMENTS'] = serialize_list(requirements_list)
+
+ # Mark test with ya:external tag if it requests any secret from external storages
+ # It's not stable and nonreproducible by definition
+ for x in secret_requirements:
+ if x in requirements:
+ tags.add(consts.YaTestTags.External)
+
+ if valid_kw.get("FUZZ-OPTS"):
+ for option in get_list("FUZZ-OPTS"):
+ if not option.startswith("-"):
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
+ break
+ eqpos = option.find("=")
+ if eqpos == -1 or len(option) == eqpos + 1:
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
+ break
+ if option[eqpos - 1] == " " or option[eqpos + 1] == " ":
+ errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
+ break
+ if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"):
+ errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
+ break
+
+ if valid_kw.get("YT-SPEC"):
+ if not is_ytexec_run:
+ errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag")
+ else:
+ for filename in get_list("YT-SPEC"):
+ filename = unit.resolve('$S/' + filename)
+ if not os.path.exists(filename):
+ errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename))
+ continue
+ if not is_yt_spec_contain_pool_info(filename):
+ tags.add(consts.YaTestTags.External)
+ tags.add("ya:yt_research_pool")
+
+ if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test":
+ errors.append("PYTEST_SCRIPT is deprecated")
+
+ partition = valid_kw.get('TEST_PARTITION', 'SEQUENTIAL')
+ if partition not in PARTITION_MODS:
+ raise ValueError('partition mode should be one of {}, detected: {}'.format(PARTITION_MODS, partition))
+
+ if valid_kw.get('SPLIT-FACTOR'):
+ if valid_kw.get('FORK-MODE') == 'none':
+ errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro')
+
+ value = 1
+ try:
+ value = int(valid_kw.get('SPLIT-FACTOR'))
+ if value <= 0:
+ raise ValueError("must be > 0")
+ if value > SPLIT_FACTOR_MAX_VALUE:
+ raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
+ except ValueError as e:
+ errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e))
+
+ if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
+ nfiles = count_entries(valid_kw.get('TEST-FILES'))
+ if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
+ errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
+ nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
+
+ if tags:
+ valid_kw['TAG'] = serialize_list(tags)
+
+ unit_path = get_norm_unit_path(unit)
+ if not is_fat and consts.YaTestTags.Noretries in tags and not is_ytexec_run \
+ and not unit_path.startswith("devtools/dummy_arcadia/test/noretries"):
+ errors.append("Only LARGE tests can have 'ya:noretries' tag")
+
+ if errors:
+ return None, warnings, errors
+
+ return valid_kw, warnings, errors
+
+
+def get_norm_unit_path(unit, extra=None):
+ path = _common.strip_roots(unit.path())
+ if extra:
+ return '{}/{}'.format(path, extra)
+ return path
+
+
+def dump_test(unit, kw):
+ valid_kw, warnings, errors = validate_test(unit, kw)
+ for w in warnings:
+ unit.message(['warn', w])
+ for e in errors:
+ ymake.report_configure_error(e)
+ if valid_kw is None:
+ return None
+ string_handler = StringIO.StringIO()
+ for k, v in valid_kw.iteritems():
+ print >>string_handler, k + ': ' + v
+ print >>string_handler, BLOCK_SEPARATOR
+ data = string_handler.getvalue()
+ string_handler.close()
+ return data
+
+
+def serialize_list(lst):
+ lst = filter(None, lst)
+ return '\"' + ';'.join(lst) + '\"' if lst else ''
+
+
+def deserialize_list(val):
+ return filter(None, val.replace('"', "").split(";"))
+
+
+def count_entries(x):
+ # see (de)serialize_list
+ assert x is None or isinstance(x, str), type(x)
+ if not x:
+ return 0
+ return x.count(";") + 1
+
+
+def get_values_list(unit, key):
+ res = map(str.strip, (unit.get(key) or '').replace('$' + key, '').strip().split())
+ return [r for r in res if r and r not in ['""', "''"]]
+
+
+def get_norm_paths(unit, key):
+ # return paths without trailing (back)slash
+ return [x.rstrip('\\/').replace('${ARCADIA_ROOT}/', '') for x in get_values_list(unit, key)]
+
+
+def get_unit_list_variable(unit, name):
+ items = unit.get(name)
+ if items:
+ items = items.split(' ')
+ assert items[0] == "${}".format(name), (items, name)
+ return items[1:]
+ return []
+
+
+def implies(a, b):
+ return bool((not a) or b)
+
+
+def match_coverage_extractor_requirements(unit):
+ # we shouldn't add test if
+ return all([
+ # tests are not requested
+ unit.get("TESTS_REQUESTED") == "yes",
+ # build doesn't imply clang coverage, which supports segment extraction from the binaries
+ unit.get("CLANG_COVERAGE") == "yes",
+ # contrib wasn't requested
+ implies(get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes"),
+ ])
+
+
+def get_tidy_config_map(unit, map_path):
+ config_map_path = unit.resolve(os.path.join("$S", map_path))
+ config_map = {}
+ try:
+ with open(config_map_path, 'r') as afile:
+ config_map = json.load(afile)
+ except ValueError:
+ ymake.report_configure_error("{} is invalid json".format(map_path))
+ except Exception as e:
+ ymake.report_configure_error(str(e))
+ return config_map
+
+
+def get_default_tidy_config(unit):
+ unit_path = get_norm_unit_path(unit)
+ tidy_default_config_map = get_tidy_config_map(unit, DEFAULT_TIDY_CONFIG_MAP_PATH)
+ for project_prefix, config_path in tidy_default_config_map.items():
+ if unit_path.startswith(project_prefix):
+ return config_path
+ return DEFAULT_TIDY_CONFIG
+
+
+ordered_tidy_map = None
+
+
+def get_project_tidy_config(unit):
+ global ordered_tidy_map
+ if ordered_tidy_map is None:
+ ordered_tidy_map = list(reversed(sorted(get_tidy_config_map(unit, PROJECT_TIDY_CONFIG_MAP_PATH).items())))
+ unit_path = get_norm_unit_path(unit)
+
+ for project_prefix, config_path in ordered_tidy_map:
+ if unit_path.startswith(project_prefix):
+ return config_path
+ else:
+ return get_default_tidy_config(unit)
+
+
+def get_coverage_filter_regexp(pattern, cache={}):
+ return cache[pattern] if pattern in cache else cache.setdefault(pattern, re.compile(pattern))
+
+
+def onset_cpp_coverage_flags(unit):
+ coverage_target_regexp = unit.get("COVERAGE_TARGET_REGEXP")
+ cov_re = get_coverage_filter_regexp(coverage_target_regexp)
+ unit_path = get_norm_unit_path(unit)
+ paths_to_exclude = ("contrib",)
+ included_path = not unit_path.startswith(paths_to_exclude)
+
+ if unit.get("CLANG_COVERAGE") != "no" and (included_path and re.match(cov_re, unit_path) is not None):
+ cflags = unit.get("CFLAGS")
+ ldflags = unit.get("LDFLAGS")
+ changed = False
+ for flag in consts.COVERAGE_CFLAGS:
+ if flag not in cflags:
+ cflags = cflags + ' ' + flag
+ changed = True
+ if changed:
+ unit.set(["CFLAGS", cflags])
+ changed = False
+ for flag in consts.COVERAGE_LDFLAGS:
+ if flag not in ldflags:
+ ldflags = ldflags + ' ' + flag
+ changed = True
+ if changed:
+ unit.set(["LDFLAGS", ldflags])
+
+
+
+def onadd_ytest(unit, *args):
+ keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
+ "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
+ flat_args, spec_args = _common.sort_by_keywords(keywords, args)
+ if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
+ unit.ondata_files(get_norm_unit_path(unit))
+
+ test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
+
+ if flat_args[1] == "fuzz.test":
+ unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
+ elif flat_args[1] == "go.test":
+ data, _ = get_canonical_test_resources(unit)
+ test_data += data
+ elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
+ # XXX
+ # Current ymake implementation doesn't allow to call macro inside the 'when' body
+ # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
+ return
+ elif flat_args[1] == "clang_tidy" and unit.get("TIDY_ENABLED") != "yes":
+ # Graph is not prepared
+ return
+ elif unit.get("TIDY") == "yes" and unit.get("TIDY_ENABLED") != "yes":
+ # clang_tidy disabled for module
+ return
+ elif flat_args[1] == "no.test":
+ return
+ test_size = ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or ''
+ test_tags = serialize_list(_get_test_tags(unit, spec_args))
+ test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or ''
+ test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+
+ if flat_args[1] != "clang_tidy" and unit.get("TIDY_ENABLED") == "yes":
+ # graph changed for clang_tidy tests
+ if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"):
+ flat_args[1] = "clang_tidy"
+ test_size = 'SMALL'
+ test_tags = ''
+ test_timeout = "60"
+ test_requirements = []
+ unit.set(["TEST_YT_SPEC_VALUE", ""])
+ else:
+ return
+
+ if flat_args[1] == "clang_tidy" and unit.get("TIDY_ENABLED") == "yes":
+ if unit.get("TIDY_CONFIG"):
+ default_config_path = unit.get("TIDY_CONFIG")
+ project_config_path = unit.get("TIDY_CONFIG")
+ else:
+ default_config_path = get_default_tidy_config(unit)
+ project_config_path = get_project_tidy_config(unit)
+
+ unit.set(["DEFAULT_TIDY_CONFIG", default_config_path])
+ unit.set(["PROJECT_TIDY_CONFIG", project_config_path])
+
+ fork_mode = []
+ if 'FORK_SUBTESTS' in spec_args:
+ fork_mode.append('subtests')
+ if 'FORK_TESTS' in spec_args:
+ fork_mode.append('tests')
+ fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
+ fork_mode = ' '.join(fork_mode) if fork_mode else ''
+
+ unit_path = get_norm_unit_path(unit)
+
+ test_record = {
+ 'TEST-NAME': flat_args[0],
+ 'SCRIPT-REL-PATH': flat_args[1],
+ 'TESTED-PROJECT-NAME': unit.name(),
+ 'TESTED-PROJECT-FILENAME': unit.filename(),
+ 'SOURCE-FOLDER-PATH': unit_path,
+ # TODO get rid of BUILD-FOLDER-PATH
+ 'BUILD-FOLDER-PATH': unit_path,
+ 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
+ 'GLOBAL-LIBRARY-PATH': unit.global_filename(),
+ 'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ # 'TEST-PRESERVE-ENV': 'da',
+ 'TEST-DATA': serialize_list(test_data),
+ 'TEST-TIMEOUT': test_timeout,
+ 'FORK-MODE': fork_mode,
+ 'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
+ 'SIZE': test_size,
+ 'TAG': test_tags,
+ 'REQUIREMENTS': serialize_list(test_requirements),
+ 'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
+ 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
+ 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
+ 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
+ 'BLOB': unit.get('TEST_BLOB_DATA') or '',
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '',
+ 'TEST_IOS_RUNTIME_TYPE': unit.get('TEST_IOS_RUNTIME_TYPE_VALUE') or '',
+ 'ANDROID_APK_TEST_ACTIVITY': unit.get('ANDROID_APK_TEST_ACTIVITY_VALUE') or '',
+ 'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL',
+ 'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '',
+ }
+
+ if flat_args[1] == "go.bench":
+ if "ya:run_go_benchmark" not in test_record["TAG"]:
+ return
+ else:
+ test_record["TEST-NAME"] += "_bench"
+
+ if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
+ test_record['FUZZING'] = '1'
+ # use all cores if fuzzing requested
+ test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))
+
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
+def java_srcdirs_to_data(unit, var):
+ extra_data = []
+ for srcdir in (unit.get(var) or '').replace('$' + var, '').split():
+ if srcdir == '.':
+ srcdir = unit.get('MODDIR')
+ if srcdir.startswith('${ARCADIA_ROOT}/') or srcdir.startswith('$ARCADIA_ROOT/'):
+ srcdir = srcdir.replace('${ARCADIA_ROOT}/', '$S/')
+ srcdir = srcdir.replace('$ARCADIA_ROOT/', '$S/')
+ if srcdir.startswith('${CURDIR}') or srcdir.startswith('$CURDIR'):
+ srcdir = srcdir.replace('${CURDIR}', os.path.join('$S', unit.get('MODDIR')))
+ srcdir = srcdir.replace('$CURDIR', os.path.join('$S', unit.get('MODDIR')))
+ srcdir = unit.resolve_arc_path(srcdir)
+ if not srcdir.startswith('$'):
+ srcdir = os.path.join('$S', unit.get('MODDIR'), srcdir)
+ if srcdir.startswith('$S'):
+ extra_data.append(srcdir.replace('$S', 'arcadia'))
+ return serialize_list(extra_data)
+
+
+def onadd_check(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1,
+ "REQUIREMENTS": -1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
+ "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
+ check_type = flat_args[0]
+
+ if check_type in ("check.data", "check.resource") and unit.get('VALIDATE_DATA') == "no":
+ return
+
+ test_dir = get_norm_unit_path(unit)
+
+ test_timeout = ''
+ fork_mode = ''
+ extra_test_data = ''
+ extra_test_dart_data = {}
+ ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ uid_ext = ''
+ script_rel_path = check_type
+ test_files = flat_args[1:]
+
+ supported_no_lint_values = ('none', 'none_internal', 'ktlint')
+ no_lint_value = unit.get('_NO_LINT_VALUE')
+ if no_lint_value and no_lint_value not in supported_no_lint_values:
+ ymake.report_configure_error('Unsupported value for NO_LINT macro: {}'.format(no_lint_value))
+
+ if check_type in ["check.data", "check.resource"]:
+ uid_ext = unit.get("SBR_UID_EXT").split(" ", 1)[-1] # strip variable name
+
+ if check_type in ["flake8.py2", "flake8.py3", "black"]:
+ fork_mode = unit.get('TEST_FORK_MODE') or ''
+ elif check_type == "JAVA_STYLE":
+ if ymake_java_test and not unit.get('ALL_SRCDIRS') or '':
+ return
+ if len(flat_args) < 2:
+ raise Exception("Not enough arguments for JAVA_STYLE check")
+ check_level = flat_args[1]
+ allowed_levels = {
+ 'base': '/yandex_checks.xml',
+ 'strict': '/yandex_checks_strict.xml',
+ 'extended': '/yandex_checks_extended.xml',
+ 'library': '/yandex_checks_library.xml',
+ }
+ if check_level not in allowed_levels:
+ raise Exception("'{}' is not allowed in LINT(), use one of {}".format(check_level, allowed_levels.keys()))
+ test_files[0] = allowed_levels[check_level] # replace check_level with path to config file
+ script_rel_path = "java.style"
+ test_timeout = '240'
+ fork_mode = unit.get('TEST_FORK_MODE') or ''
+ if ymake_java_test:
+ extra_test_data = java_srcdirs_to_data(unit, 'ALL_SRCDIRS')
+
+ # jstyle should use the latest jdk
+ unit.onpeerdir([unit.get('JDK_LATEST_PEERDIR')])
+ extra_test_dart_data['JDK_LATEST_VERSION'] = unit.get('JDK_LATEST_VERSION')
+ # TODO remove when ya-bin will be released (https://st.yandex-team.ru/DEVTOOLS-9611)
+ extra_test_dart_data['JDK_RESOURCE'] = 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT')
+ elif check_type == "gofmt":
+ if test_files:
+ test_dir = os.path.dirname(test_files[0]).lstrip("$S/")
+ elif check_type == "check.data":
+ data_re = re.compile(r"sbr:/?/?(\d+)=?.*")
+ data = flat_args[1:]
+ resources = []
+ for f in data:
+ matched = re.match(data_re, f)
+ if matched:
+ resources.append(matched.group(1))
+ if resources:
+ test_files = resources
+ else:
+ return
+
+ serialized_test_files = serialize_list(test_files)
+
+ test_record = {
+ 'TEST-NAME': check_type.lower(),
+ 'TEST-TIMEOUT': test_timeout,
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
+ 'TEST-DATA': extra_test_data,
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ 'SBR-UID-EXT': uid_ext,
+ 'SPLIT-FACTOR': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': fork_mode,
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
+ 'REQUIREMENTS': " ".join(spec_args.get('REQUIREMENTS', [])),
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': serialized_test_files,
+ 'TEST-FILES': serialized_test_files,
+ 'NO_JBUILD': 'yes' if ymake_java_test else 'no',
+ }
+ test_record.update(extra_test_dart_data)
+
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
+def on_register_no_check_imports(unit):
+ s = unit.get('NO_CHECK_IMPORTS_FOR_VALUE')
+ if s not in ('', 'None'):
+ unit.onresource(['-', 'py/no_check_imports/{}="{}"'.format(_common.pathid(s), s)])
+
+
+def onadd_check_py_imports(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ if unit.get('NO_CHECK_IMPORTS_FOR_VALUE').strip() == "":
+ return
+ unit.onpeerdir(['library/python/testing/import_test'])
+ check_type = "py.imports"
+ test_dir = get_norm_unit_path(unit)
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
+ test_record = {
+ 'TEST-NAME': "pyimports",
+ 'TEST-TIMEOUT': '',
+ 'SCRIPT-REL-PATH': check_type,
+ 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': '',
+ 'TEST-DATA': '',
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ 'SPLIT-FACTOR': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': '',
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
+ }
+ if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None":
+ test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"])
+ else:
+ test_record["NO-CHECK"] = ''
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
+def onadd_pytest_script(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ unit.set(["PYTEST_BIN", "no"])
+ custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
+ timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
+ if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
+ unit.ondata_files(get_norm_unit_path(unit))
+
+ if timeout:
+ timeout = timeout[0]
+ else:
+ timeout = '0'
+ test_type = args[0]
+ fork_mode = unit.get('TEST_FORK_MODE').split() or ''
+ split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
+ test_size = unit.get('TEST_SIZE_NAME') or ''
+
+ test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
+ tags = _get_test_tags(unit)
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+ python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
+ binary_path = os.path.join(get_norm_unit_path(unit), unit.filename())
+ test_cwd = unit.get('TEST_CWD_VALUE') or ''
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
+
+
+def onadd_pytest_bin(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ flat, kws = _common.sort_by_keywords({'RUNNER_BIN': 1}, args)
+ if flat:
+ ymake.report_configure_error(
+ 'Unknown arguments found while processing add_pytest_bin macro: {!r}'
+ .format(flat)
+ )
+
+ runner_bin = kws.get('RUNNER_BIN', [None])[0]
+ test_type = 'py3test.bin' if (unit.get("PYTHON3") == 'yes') else "pytest.bin"
+
+ add_test_to_dart(unit, test_type, runner_bin=runner_bin)
+
+
+def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
+ unit.ondata_files(get_norm_unit_path(unit))
+ custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
+ timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
+ if timeout:
+ timeout = timeout[0]
+ else:
+ timeout = '0'
+ fork_mode = unit.get('TEST_FORK_MODE').split() or ''
+ split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
+ test_size = unit.get('TEST_SIZE_NAME') or ''
+ test_cwd = unit.get('TEST_CWD_VALUE') or ''
+
+ unit_path = unit.path()
+ test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
+ tags = _get_test_tags(unit)
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+ python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
+ yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
+ if not binary_path:
+ binary_path = os.path.join(unit_path, unit.filename())
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
+
+
+def extract_java_system_properties(unit, args):
+ if len(args) % 2:
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
+
+ props = []
+ for x, y in zip(args[::2], args[1::2]):
+ if x == 'FILE':
+ if y.startswith('${BINDIR}') or y.startswith('${ARCADIA_BUILD_ROOT}') or y.startswith('/'):
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y)
+
+ y = _common.rootrel_arc_src(y, unit)
+ if not os.path.exists(unit.resolve('$S/' + y)):
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y)
+
+ y = '${ARCADIA_ROOT}/' + y
+ props.append({'type': 'file', 'path': y})
+ else:
+ props.append({'type': 'inline', 'key': x, 'value': y})
+
+ return props, None
+
+
+def onjava_test(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+
+ assert unit.get('MODULE_TYPE') is not None
+
+ if unit.get('MODULE_TYPE') == 'JTEST_FOR':
+ if not unit.get('UNITTEST_DIR'):
+ ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path()))
+ return
+
+ java_cp_arg_type = unit.get('JAVA_CLASSPATH_CMD_TYPE_VALUE') or 'MANIFEST'
+ if java_cp_arg_type not in ('MANIFEST', 'COMMAND_FILE', 'LIST'):
+ ymake.report_configure_error('{}: TEST_JAVA_CLASSPATH_CMD_TYPE({}) are invalid. Choose argument from MANIFEST, COMMAND_FILE or LIST)'.format(unit.path(), java_cp_arg_type))
+ return
+
+ unit_path = unit.path()
+ path = _common.strip_roots(unit_path)
+ if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes":
+ unit.ondata_files(get_norm_unit_path(unit))
+
+ test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
+ test_data.append('arcadia/build/scripts/run_junit.py')
+ test_data.append('arcadia/build/scripts/unpacking_jtest_runner.py')
+
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+
+ props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
+ if error_mgs:
+ ymake.report_configure_error(error_mgs)
+ return
+ for prop in props:
+ if prop['type'] == 'file':
+ test_data.append(prop['path'].replace('${ARCADIA_ROOT}', 'arcadia'))
+
+ props = base64.b64encode(json.dumps(props, encoding='utf-8'))
+
+ test_cwd = unit.get('TEST_CWD_VALUE') or '' # TODO: validate test_cwd value
+
+ if unit.get('MODULE_TYPE') == 'JUNIT5':
+ script_rel_path = 'junit5.test'
+ else:
+ script_rel_path = 'junit.test'
+
+ ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
+ test_record = {
+ 'SOURCE-FOLDER-PATH': path,
+ 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)]),
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '',
+ 'TESTED-PROJECT-NAME': path,
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ # 'TEST-PRESERVE-ENV': 'da',
+ 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))),
+ 'FORK-MODE': unit.get('TEST_FORK_MODE') or '',
+ 'SPLIT-FACTOR': unit.get('TEST_SPLIT_FACTOR') or '',
+ 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'TAG': serialize_list(_get_test_tags(unit)),
+ 'SIZE': unit.get('TEST_SIZE_NAME') or '',
+ 'REQUIREMENTS': serialize_list(get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')),
+ 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+
+ # JTEST/JTEST_FOR only
+ 'MODULE_TYPE': unit.get('MODULE_TYPE'),
+ 'UNITTEST_DIR': unit.get('UNITTEST_DIR') or '',
+ 'JVM_ARGS': serialize_list(get_values_list(unit, 'JVM_ARGS_VALUE')),
+ 'SYSTEM_PROPERTIES': props,
+ 'TEST-CWD': test_cwd,
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'JAVA_CLASSPATH_CMD_TYPE': java_cp_arg_type,
+ 'NO_JBUILD': 'yes' if ymake_java_test else 'no',
+ 'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT'),
+ 'JDK_FOR_TESTS': 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT') + '_FOR_TESTS',
+ 'YT-SPEC': serialize_list(get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
+ }
+ test_classpath_origins = unit.get('TEST_CLASSPATH_VALUE')
+ if test_classpath_origins:
+ test_record['TEST_CLASSPATH_ORIGINS'] = test_classpath_origins
+ test_record['TEST_CLASSPATH'] = '${TEST_CLASSPATH_MANAGED}'
+ elif ymake_java_test:
+ test_record['TEST_CLASSPATH'] = '${DART_CLASSPATH}'
+ test_record['TEST_CLASSPATH_DEPS'] = '${DART_CLASSPATH_DEPS}'
+ if unit.get('UNITTEST_DIR'):
+ test_record['TEST_JAR'] = '${UNITTEST_MOD}'
+ else:
+ test_record['TEST_JAR'] = '{}/{}.jar'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
+
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(['DART_DATA', data])
+
+
+def onjava_test_deps(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+
+ assert unit.get('MODULE_TYPE') is not None
+ assert len(args) == 1
+ mode = args[0]
+
+ path = get_norm_unit_path(unit)
+ ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
+
+ test_record = {
+ 'SOURCE-FOLDER-PATH': path,
+ 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path), 'dependencies']).strip('-'),
+ 'SCRIPT-REL-PATH': 'java.dependency.test',
+ 'TEST-TIMEOUT': '',
+ 'TESTED-PROJECT-NAME': path,
+ 'TEST-DATA': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': '',
+ 'SPLIT-FACTOR': '',
+ 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'TAG': '',
+ 'SIZE': 'SMALL',
+ 'IGNORE_CLASSPATH_CLASH': ' '.join(get_values_list(unit, 'JAVA_IGNORE_CLASSPATH_CLASH_VALUE')),
+ 'NO_JBUILD': 'yes' if ymake_java_test else 'no',
+
+ # JTEST/JTEST_FOR only
+ 'MODULE_TYPE': unit.get('MODULE_TYPE'),
+ 'UNITTEST_DIR': '',
+ 'SYSTEM_PROPERTIES': '',
+ 'TEST-CWD': '',
+ }
+ if mode == 'strict':
+ test_record['STRICT_CLASSPATH_CLASH'] = 'yes'
+
+ if ymake_java_test:
+ test_record['CLASSPATH'] = '$B/{}/{}.jar ${{DART_CLASSPATH}}'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
+
+ data = dump_test(unit, test_record)
+ unit.set_property(['DART_DATA', data])
+
+
+def _get_test_tags(unit, spec_args=None):
+ if spec_args is None:
+ spec_args = {}
+ tags = spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE')
+ tags = set(tags)
+ # DEVTOOLS-7571
+ if unit.get('SKIP_TEST_VALUE') and consts.YaTestTags.Fat in tags:
+ tags.add(consts.YaTestTags.NotAutocheck)
+
+ return tags
+
+
+def _dump_test(
+ unit,
+ test_type,
+ test_files,
+ timeout,
+ test_dir,
+ custom_deps,
+ test_data,
+ python_paths,
+ split_factor,
+ fork_mode,
+ test_size,
+ tags,
+ requirements,
+ binary_path='',
+ old_pytest=False,
+ test_cwd=None,
+ runner_bin=None,
+ yt_spec=None,
+ data_files=None
+):
+
+ if test_type == "PY_TEST":
+ script_rel_path = "py.test"
+ else:
+ script_rel_path = test_type
+
+ unit_path = unit.path()
+ fork_test_files = unit.get('FORK_TEST_FILES_MODE')
+ fork_mode = ' '.join(fork_mode) if fork_mode else ''
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ if test_cwd:
+ test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip()
+ test_name = os.path.basename(binary_path)
+ test_record = {
+ 'TEST-NAME': os.path.splitext(test_name)[0],
+ 'TEST-TIMEOUT': timeout,
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TESTED-PROJECT-NAME': test_name,
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': " ".join(custom_deps),
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ # 'TEST-PRESERVE-ENV': 'da',
+ 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))),
+ 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+ 'SPLIT-FACTOR': split_factor,
+ 'TEST_PARTITION': unit.get('TEST_PARTITION') or 'SEQUENTIAL',
+ 'FORK-MODE': fork_mode,
+ 'FORK-TEST-FILES': fork_test_files,
+ 'TEST-FILES': serialize_list(test_files),
+ 'SIZE': test_size,
+ 'TAG': serialize_list(tags),
+ 'REQUIREMENTS': serialize_list(requirements),
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'yes' if old_pytest else 'no',
+ 'PYTHON-PATHS': serialize_list(python_paths),
+ 'TEST-CWD': test_cwd or '',
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'BUILD-FOLDER-PATH': _common.strip_roots(unit_path),
+ 'BLOB': unit.get('TEST_BLOB_DATA') or '',
+ 'CANONIZE_SUB_PATH': unit.get('CANONIZE_SUB_PATH') or '',
+ }
+ if binary_path:
+ test_record['BINARY-PATH'] = _common.strip_roots(binary_path)
+ if runner_bin:
+ test_record['TEST-RUNNER-BIN'] = runner_bin
+ if yt_spec:
+ test_record['YT-SPEC'] = serialize_list(yt_spec)
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
+def onsetup_pytest_bin(unit, *args):
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == "yes"
+ if use_arcadia_python:
+ unit.onresource(['-', 'PY_MAIN={}'.format("library.python.pytest.main:main")]) # XXX
+ unit.onadd_pytest_bin(list(args))
+ else:
+ unit.onno_platform()
+ unit.onadd_pytest_script(["PY_TEST"])
+
+
+def onrun(unit, *args):
+ exectest_cmd = unit.get(["EXECTEST_COMMAND_VALUE"]) or ''
+ exectest_cmd += "\n" + subprocess.list2cmdline(args)
+ unit.set(["EXECTEST_COMMAND_VALUE", exectest_cmd])
+
+
+def onsetup_exectest(unit, *args):
+ command = unit.get(["EXECTEST_COMMAND_VALUE"])
+ if command is None:
+ ymake.report_configure_error("EXECTEST must have at least one RUN macro")
+ return
+ command = command.replace("$EXECTEST_COMMAND_VALUE", "")
+ if "PYTHON_BIN" in command:
+ unit.ondepends('contrib/tools/python')
+ unit.set(["TEST_BLOB_DATA", base64.b64encode(command)])
+ add_test_to_dart(unit, "exectest", binary_path=os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
+
+
+def onsetup_run_python(unit):
+ if unit.get("USE_ARCADIA_PYTHON") == "yes":
+ unit.ondepends('contrib/tools/python')
+
+
+def get_canonical_test_resources(unit):
+ unit_path = unit.path()
+ canon_data_dir = os.path.join(unit.resolve(unit_path), CANON_DATA_DIR_NAME, unit.get('CANONIZE_SUB_PATH') or '')
+
+ try:
+ _, dirs, files = next(os.walk(canon_data_dir))
+ except StopIteration:
+ # path doesn't exist
+ return [], []
+
+ if CANON_RESULT_FILE_NAME in files:
+ return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path)
+ return [], []
+
+
+def _load_canonical_file(filename, unit_path):
+ try:
+ with open(filename) as results_file:
+ return json.load(results_file)
+ except Exception as e:
+ print>>sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename)
+ return {}
+
+
+def _get_resource_from_uri(uri):
+ m = CANON_MDS_RESOURCE_REGEX.match(uri)
+ if m:
+ res_id = m.group(1)
+ return "{}:{}".format(MDS_SCHEME, res_id)
+
+ m = CANON_SBR_RESOURCE_REGEX.match(uri)
+ if m:
+ # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name
+ # That's why we use notation with '=' to specify specific path for resource
+ uri = m.group(1)
+ res_id = m.group(2)
+ return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id]))
+
+
+def _get_external_resources_from_canon_data(data):
+ # Method should work with both canonization versions:
+ # result.json: {'uri':X 'checksum':Y}
+ # result.json: {'testname': {'uri':X 'checksum':Y}}
+ # result.json: {'testname': [{'uri':X 'checksum':Y}]}
+ # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail
+ # That's why we check 'uri' and 'checksum' fields presence
+ # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format)
+ res = set()
+
+ if isinstance(data, dict):
+ if 'uri' in data and 'checksum' in data:
+ resource = _get_resource_from_uri(data['uri'])
+ if resource:
+ res.add(resource)
+ else:
+ for k, v in data.iteritems():
+ res.update(_get_external_resources_from_canon_data(v))
+ elif isinstance(data, list):
+ for e in data:
+ res.update(_get_external_resources_from_canon_data(e))
+
+ return res
+
+
+def _get_canonical_data_resources_v2(filename, unit_path):
+ return (_get_external_resources_from_canon_data(_load_canonical_file(filename, unit_path)), [filename])
+
+
+def on_add_linter_check(unit, *args):
+ if unit.get("TIDY") == "yes":
+ return
+ source_root_from_prefix = '${ARCADIA_ROOT}/'
+ source_root_to_prefix = '$S/'
+ unlimited = -1
+
+ if unit.get('_NO_LINT_VALUE') in ("none", "none_internal"):
+ return
+
+ keywords = {
+ "DEPENDS": unlimited,
+ "FILES": unlimited,
+ "CONFIGS": unlimited,
+ "GLOBAL_RESOURCES": unlimited,
+ "FILE_PROCESSING_TIME": 1,
+ "EXTRA_PARAMS": unlimited
+ }
+ flat_args, spec_args = _common.sort_by_keywords(keywords, args)
+ if len(flat_args) != 2:
+ unit.message(['ERROR', '_ADD_LINTER_CHECK params: expected 2 free parameters'])
+ return
+
+ configs = []
+ for cfg in spec_args.get('CONFIGS', []):
+ filename = unit.resolve(source_root_to_prefix + cfg)
+ if not os.path.exists(filename):
+ unit.message(['ERROR', 'Configuration file {} is not found'.format(filename)])
+ return
+ configs.append(cfg)
+ deps = []
+
+ lint_name, linter = flat_args
+ deps.append(os.path.dirname(linter))
+
+ test_files = []
+ for path in spec_args.get('FILES', []):
+ if path.startswith(source_root_from_prefix):
+ test_files.append(path.replace(source_root_from_prefix, source_root_to_prefix, 1))
+ elif path.startswith(source_root_to_prefix):
+ test_files.append(path)
+ if not test_files:
+ unit.message(['WARN', 'No files to lint for {}'.format(lint_name)])
+ return
+ for arg in spec_args.get('EXTRA_PARAMS', []):
+ if '=' not in arg:
+ unit.message(['WARN', 'Wrong EXTRA_PARAMS value: "{}". Values must have format "name=value".'.format(arg)])
+ return
+
+ deps += spec_args.get('DEPENDS', [])
+
+ for dep in deps:
+ unit.ondepends(dep)
+
+ for resource in spec_args.get('GLOBAL_RESOURCES', []):
+ unit.onpeerdir(resource)
+
+ test_record = {
+ 'TEST-NAME': lint_name,
+ 'SCRIPT-REL-PATH': 'custom_lint',
+ 'TESTED-PROJECT-NAME': unit.name(),
+ 'SOURCE-FOLDER-PATH': get_norm_unit_path(unit),
+ 'CUSTOM-DEPENDENCIES': " ".join(deps),
+ 'TEST-DATA': '',
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ 'TEST-TIMEOUT': '',
+ 'SPLIT-FACTOR': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': '',
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
+ 'USE_ARCADIA_PYTHON': unit.get('USE_ARCADIA_PYTHON') or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': serialize_list(test_files),
+ 'TEST-FILES': serialize_list(test_files),
+ # Linter specific parameters
+ # TODO Add configs to DATA. See YMAKE-427
+ 'LINT-CONFIGS': serialize_list(configs),
+ 'LINT-NAME': lint_name,
+ 'LINT-FILE-PROCESSING-TIME': spec_args.get('FILE_PROCESSING_TIME', [''])[0],
+ 'LINT-EXTRA-PARAMS': serialize_list(spec_args.get('EXTRA_PARAMS', [])),
+ 'LINTER': linter,
+ }
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
diff --git a/build/plugins/ytest2.py b/build/plugins/ytest2.py
new file mode 100644
index 0000000000..0a34263c35
--- /dev/null
+++ b/build/plugins/ytest2.py
@@ -0,0 +1,54 @@
+import os
+import _common
+
+
+def dir_stmts(unit, dir):
+ unit.onpeerdir(dir)
+ unit.onsrcdir(os.sep.join([dir, 'tests']))
+
+
+def pytest_base(unit, args):
+ related_prj_dir = args[0]
+ related_prj_name = args[1]
+ dir_stmts(unit, related_prj_dir)
+ ytest_base(unit, related_prj_dir, related_prj_name, args[2:])
+ unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test'])
+
+
+def ytest_base(unit, related_prj_dir, related_prj_name, args):
+ keywords = {"DEPENDS": -1, "DATA": -1}
+ flat_args, spec_args = _common.sort_by_keywords(keywords, args)
+ unit.set(['TEST-NAME', os.path.basename(flat_args[0])])
+ unit.set(['SCRIPT-REL-PATH', flat_args[1]])
+ unit.set(['SOURCE-FOLDER-PATH', related_prj_dir])
+ unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)])
+ unit.set(['TESTED-BINARY-PATH', flat_args[0]])
+
+ custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
+ unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
+ data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ')
+ data_lst.sort()
+ data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
+ unit.set(['TEST-DATA', data])
+
+ related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
+ related_dirs_value = []
+ for rel in related_dirs_list:
+ related_dirs_value.extend(['--test-related-path', rel])
+ unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)])
+ unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))])
+
+
+def on_unittest(unit, *args):
+ related_prj_name = args[0]
+ related_prj_dir = args[1][3:]
+ unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}'])
+ ytest_base(unit, related_prj_dir, related_prj_name, args)
+
+
+def on_ytest(unit, *args):
+ pytest_base(unit, args)
+
+
+def on_py_test(unit, *args):
+ pytest_base(unit, args)