diff options
author | alexv-smirnov <alex@ydb.tech> | 2023-06-13 11:05:01 +0300 |
---|---|---|
committer | alexv-smirnov <alex@ydb.tech> | 2023-06-13 11:05:01 +0300 |
commit | bf0f13dd39ee3e65092ba3572bb5b1fcd125dcd0 (patch) | |
tree | 1d1df72c0541a59a81439842f46d95396d3e7189 /build/plugins | |
parent | 8bfdfa9a9bd19bddbc58d888e180fbd1218681be (diff) | |
download | ydb-bf0f13dd39ee3e65092ba3572bb5b1fcd125dcd0.tar.gz |
add ymake export to ydb
Diffstat (limited to 'build/plugins')
87 files changed, 9600 insertions, 0 deletions
diff --git a/build/plugins/_common.py b/build/plugins/_common.py new file mode 100644 index 0000000000..7fc1593df0 --- /dev/null +++ b/build/plugins/_common.py @@ -0,0 +1,218 @@ +import sys +import hashlib +import base64 + + +class Result(object): + pass + + +def lazy(func): + result = Result() + + def wrapper(): + try: + return result._result + except AttributeError: + result._result = func() + + return result._result + + return wrapper + + +def pathid(path): + return base64.b32encode(hashlib.md5(path).digest()).lower().strip('=') + + +def listid(l): + return pathid(str(sorted(l))) + + +def unpair(lst): + for x, y in lst: + yield x + yield y + + +def iterpair(lst): + y = None + + for x in lst: + if y: + yield (y, x) + + y = None + else: + y = x + + +def stripext(fname): + return fname[: fname.rfind('.')] + + +def tobuilddir(fname): + if not fname: + return '$B' + if fname.startswith('$S'): + return fname.replace('$S', '$B', 1) + else: + return fname + + +def before(s, ss): + p = s.find(ss) + + if p == -1: + return s + + return s[:p] + + +def sort_by_keywords(keywords, args): + flat = [] + res = {} + + cur_key = None + limit = -1 + for arg in args: + if arg in keywords: + limit = keywords[arg] + if limit == 0: + res[arg] = True + cur_key = None + limit = -1 + else: + cur_key = arg + continue + if limit == 0: + cur_key = None + limit = -1 + if cur_key: + if cur_key in res: + res[cur_key].append(arg) + else: + res[cur_key] = [arg] + limit -= 1 + else: + flat.append(arg) + return (flat, res) + + +def get_norm_unit_path(unit, extra=None): + path = strip_roots(unit.path()) + if extra: + return '{}/{}'.format(path, extra) + return path + + +def resolve_common_const(path): + if path.startswith('${ARCADIA_ROOT}'): + return path.replace('${ARCADIA_ROOT}', '$S', 1) + if path.startswith('${ARCADIA_BUILD_ROOT}'): + return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1) + return path + + +def resolve_to_abs_path(path, source_root, build_root): + if path.startswith('$S') and source_root is not None: + return path.replace('$S', source_root, 1) + if path.startswith('$B') and build_root is not None: + return path.replace('$B', build_root, 1) + return path + + +def resolve_to_ymake_path(path): + return resolve_to_abs_path(path, '${ARCADIA_ROOT}', '${ARCADIA_BUILD_ROOT}') + + +def join_intl_paths(*args): + return '/'.join(args) + + +def get(fun, num): + return fun()[num][0] + + +def make_tuples(arg_list): + def tpl(): + for x in arg_list: + yield (x, []) + + return list(tpl()) + + +def resolve_includes(unit, src, paths): + return unit.resolve_include([src] + paths) if paths else [] + + +def rootrel_arc_src(src, unit): + if src.startswith('${ARCADIA_ROOT}/'): + return src[16:] + + if src.startswith('${ARCADIA_BUILD_ROOT}/'): + return src[22:] + + elif src.startswith('${CURDIR}/'): + return unit.path()[3:] + '/' + src[10:] + + else: + resolved = unit.resolve_arc_path(src) + + if resolved.startswith('$S/'): + return resolved[3:] + + return src # leave as is + + +def skip_build_root(x): + if x.startswith('${ARCADIA_BUILD_ROOT}'): + return x[len('${ARCADIA_BUILD_ROOT}') :].lstrip('/') + + return x + + +def get_interpreter_path(): + interpreter_path = [sys.executable] + if 'ymake' in interpreter_path[0]: + interpreter_path.append('--python') + return interpreter_path + + +def filter_out_by_keyword(test_data, keyword): + def _iterate(): + i = 0 + while i < len(test_data): + if test_data[i] == keyword: + i += 2 + else: + yield test_data[i] + i += 1 + + return list(_iterate()) + + +def generate_chunks(lst, chunk_size): + for i in xrange(0, len(lst), chunk_size): + yield lst[i : (i + chunk_size)] + + +def strip_roots(path): + for prefix in ["$B/", "$S/"]: + if path.startswith(prefix): + return path[len(prefix) :] + return path + + +def to_yesno(x): + return "yes" if x else "no" + + +def get_no_lint_value(unit): + import ymake + + supported_no_lint_values = ('none', 'none_internal', 'ktlint') + no_lint_value = unit.get('_NO_LINT_VALUE') + if no_lint_value and no_lint_value not in supported_no_lint_values: + ymake.report_configure_error('Unsupported value for NO_LINT macro: {}'.format(no_lint_value)) + return no_lint_value diff --git a/build/plugins/_requirements.py b/build/plugins/_requirements.py new file mode 100644 index 0000000000..40c50f8791 --- /dev/null +++ b/build/plugins/_requirements.py @@ -0,0 +1,176 @@ +import lib.test_const as consts +import re +import lib._metric_resolvers as mr + +CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+") +CANON_YAV_REGEX = re.compile(r"\w+=(value|file):sec-[a-z0-9]+:\w+") +VALID_DNS_REQUIREMENTS = ("default", "local", "dns64") +VALID_NETWORK_REQUIREMENTS = ("full", "restricted") + + +def check_cpu(suite_cpu_requirements, test_size, is_kvm=False): + min_cpu_requirements = consts.TestRequirementsConstants.MinCpu + max_cpu_requirements = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu) + if isinstance(suite_cpu_requirements, str): + if all( + consts.TestRequirementsConstants.is_all_cpu(req) for req in (max_cpu_requirements, suite_cpu_requirements) + ): + return None + return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format( + suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size + ) + + if not isinstance(suite_cpu_requirements, int): + return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements) + + if ( + suite_cpu_requirements < min_cpu_requirements + or suite_cpu_requirements > consts.TestRequirementsConstants.get_cpu_value(max_cpu_requirements) + ): + return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format( + suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size + ) + + return None + + +# TODO: Remove is_kvm param when there will be guarantees on RAM +def check_ram(suite_ram_requirements, test_size, is_kvm=False): + if not isinstance(suite_ram_requirements, int): + return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements) + min_ram_requirements = consts.TestRequirementsConstants.MinRam + max_ram_requirements = ( + consts.MAX_RAM_REQUIREMENTS_FOR_KVM + if is_kvm + else consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram) + ) + if suite_ram_requirements < min_ram_requirements or suite_ram_requirements > max_ram_requirements: + err_msg = "Wrong 'ram' requirements: {}, should be in [{}..{}] for {}-size tests".format( + suite_ram_requirements, min_ram_requirements, max_ram_requirements, test_size + ) + if is_kvm: + err_msg += ' with kvm requirements' + return err_msg + return None + + +def check_ram_disk(suite_ram_disk, test_size, is_kvm=False): + min_ram_disk = consts.TestRequirementsConstants.MinRamDisk + max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk) + if isinstance(suite_ram_disk, str): + if all(consts.TestRequirementsConstants.is_all_ram_disk(req) for req in (max_ram_disk, suite_ram_disk)): + return None + return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format( + suite_ram_disk, 0, max_ram_disk, test_size + ) + + if not isinstance(suite_ram_disk, int): + return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk) + + if suite_ram_disk < min_ram_disk or suite_ram_disk > consts.TestRequirementsConstants.get_ram_disk_value( + max_ram_disk + ): + return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format( + suite_ram_disk, min_ram_disk, max_ram_disk, test_size + ) + + return None + + +def validate_sb_vault(name, value): + if not CANON_SB_VAULT_REGEX.match(value): + return "sb_vault value '{}' should follow pattern <ENV_NAME>=<value|file>:<owner>:<vault key>".format(value) + + +def validate_yav_vault(name, value): + if not CANON_YAV_REGEX.match(value): + return "yav value '{}' should follow pattern <ENV_NAME>=<value|file>:<sec-id>:<key>".format(value) + + +def validate_numerical_requirement(name, value): + if mr.resolve_value(value) is None: + return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name) + + +def validate_choice_requirement(name, val, valid): + if val not in valid: + return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format( + name, val, ", ".join(valid) + ) + + +def validate_force_sandbox_requirement( + name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func +): + if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run: + if value == 'all': + return + return validate_numerical_requirement(name, value) + error_msg = validate_numerical_requirement(name, value) + if error_msg: + return error_msg + return check_func(mr.resolve_value(value), test_size, is_kvm) + + +def validate_ram_disk_requirement( + name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, ram +): + error_msg = validate_force_sandbox_requirement( + name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_ram_disk + ) + if error_msg: + return error_msg + if is_force_sandbox or not in_autocheck or test_size == consts.TestSize.Large: + return + if int(value) > int(ram): + return "Wrong 'ram_disk' value, 'ram_disk':{} should be no more than 'ram':{}".format(value, ram) + return None + + +# TODO: Remove is_kvm param when there will be guarantees on RAM +def validate_requirement( + req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, requirements +): + req_checks = { + 'container': validate_numerical_requirement, + 'cpu': lambda n, v: validate_force_sandbox_requirement( + n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_cpu + ), + 'disk_usage': validate_numerical_requirement, + 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS), + 'kvm': None, + 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS), + 'ram': lambda n, v: validate_force_sandbox_requirement( + n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_ram + ), + 'ram_disk': lambda n, v: validate_ram_disk_requirement( + n, + v, + test_size, + is_force_sandbox, + in_autocheck, + is_fuzzing, + is_kvm, + is_ytexec_run, + requirements.get( + 'ram', consts.TestSize.get_default_requirements(test_size).get(consts.TestRequirements.Ram) + ), + ), + 'sb': None, + 'sb_vault': validate_sb_vault, + 'yav': validate_yav_vault, + } + + if req_name not in req_checks: + return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format( + req_name, ", ".join(sorted(req_checks)) + ) + + if req_name in ('container', 'disk') and not is_force_sandbox: + return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format( + req_name + ) + + check_func = req_checks[req_name] + if check_func: + return check_func(req_name, value) diff --git a/build/plugins/_xsyn_includes.py b/build/plugins/_xsyn_includes.py new file mode 100644 index 0000000000..d458679da0 --- /dev/null +++ b/build/plugins/_xsyn_includes.py @@ -0,0 +1,62 @@ +def get_include_callback(): + """ + .. function: get_include_callback returns function that processes each DOM element to get xsyn include from it, and it's aware of directory with all the xsyns. + + :param xsyn_dir directory with xsyns. + """ + + def get_include(element): + """ + .. function: get_include returns list of includes from this DOM element. + + :param element DOM element. + """ + res = [] + if element.nodeType == element.ELEMENT_NODE and element.nodeName == "parse:include": + attrs = element.attributes + for i in xrange(attrs.length): + attr = attrs.item(i) + if attr.nodeName == "path": + include_filename = attr.nodeValue + res.append(include_filename) + return res + + return get_include + + +def traverse_xsyn(element, on_element): + """ + .. function: traverse_xsyn traverses element and returns concatenated lists of calling on_element of each element. + + :param element element in DOM. + :param on_element callback on element that returns list of values. + """ + res = on_element(element) + for child in element.childNodes: + child_results = traverse_xsyn(child, on_element) + res += child_results + return res + + +def process_xsyn(filepath, on_element): + """ + .. function: process_xsyn processes xsyn file and return concatenated list of calling on_element on each DOM element. + + :param filepath path to xsyn file + :param on_element callback called on each element in xsyn that returns list of values. + + """ + + # keep a stack of filepathes if on_element calls process_xsyn recursively + with open(filepath) as xsyn_file: + from xml.dom.minidom import parse + + tree = parse(xsyn_file) + tree.normalize() + res = traverse_xsyn(tree, on_element) + return res + + +def get_all_includes(filepath): + callback = get_include_callback() + return process_xsyn(filepath, callback) diff --git a/build/plugins/build_mn_files.py b/build/plugins/build_mn_files.py new file mode 100644 index 0000000000..4da76f1852 --- /dev/null +++ b/build/plugins/build_mn_files.py @@ -0,0 +1,29 @@ +from os.path import basename, splitext + + +def on_build_mns_files(unit, *args): + files = [] + name = '' + ranking_suffix = '' + check = '' + index = 0 + fml_unused_tool = '' + while index < len(args): + if args[index] == 'NAME': + index += 1 + name = args[index] + elif args[index] == 'RANKING_SUFFIX': + index += 1 + ranking_suffix = args[index] + elif args[index] == 'CHECK': + check = 'CHECK' + fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL' + else: + files.append(args[index]) + index += 1 + + for filename in files: + file_basename, _ = splitext(basename(filename)) + asmdataname = "staticMn{0}{1}Ptr".format(ranking_suffix, file_basename) + output_name = 'mn.staticMn{0}{1}Ptr.cpp'.format(ranking_suffix, file_basename) + unit.on_build_mns_file([filename, name, output_name, ranking_suffix, check, fml_unused_tool, asmdataname]) diff --git a/build/plugins/bundle.py b/build/plugins/bundle.py new file mode 100644 index 0000000000..69e3e01681 --- /dev/null +++ b/build/plugins/bundle.py @@ -0,0 +1,22 @@ +import os + + +def onbundle(unit, *args): + """ + @usage BUNDLE(<Dir [NAME Name]>...) + + Brings build artefact from module Dir under optional Name to the current module (e.g. UNION) + If NAME is not specified, the name of the Dir's build artefact will be preserved + It makes little sense to specify BUNDLE on non-final targets and so this may stop working without prior notice. + Bundle on multimodule will select final target among multimodule variants and will fail if there are none or more than one. + """ + i = 0 + while i < len(args): + if i + 2 < len(args) and args[i + 1] == "NAME": + target, name = args[i], args[i + 2] + i += 3 + else: + target, name = args[i], os.path.basename(args[i]) + i += 1 + + unit.on_bundle_target([target, name]) diff --git a/build/plugins/code_generator.py b/build/plugins/code_generator.py new file mode 100644 index 0000000000..396c2706d8 --- /dev/null +++ b/build/plugins/code_generator.py @@ -0,0 +1,45 @@ +import re +import os + +import ymake + +pattern = re.compile( + r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']" +) + + +class CodeGeneratorTemplateParser(object): + def __init__(self, path, unit): + self._path = path + retargeted = os.path.join(unit.path(), os.path.relpath(path, unit.resolve(unit.path()))) + with open(path, 'rb') as f: + includes, induced = CodeGeneratorTemplateParser.parse_includes(f.readlines()) + self._includes = unit.resolve_include([retargeted] + includes) if includes else [] + self._induced = unit.resolve_include([retargeted] + induced) if induced else [] + + @staticmethod + def parse_includes(lines): + includes = [] + induced = [] + + for line in lines: + for match in pattern.finditer(line): + type = match.lastgroup + if type == 'INCLUDE': + includes.append(match.group(type)) + elif type == 'INDUCED': + induced.append(match.group(type)) + else: + raise Exception("Unexpected match! Perhaps it is a result of an error in pattern.") + return (includes, induced) + + def includes(self): + return self._includes + + def induced_deps(self): + return {'h+cpp': self._induced} + + +def init(): + ymake.addparser('markettemplate', CodeGeneratorTemplateParser) + ymake.addparser('macro', CodeGeneratorTemplateParser) diff --git a/build/plugins/container_layers.py b/build/plugins/container_layers.py new file mode 100644 index 0000000000..5fa1530474 --- /dev/null +++ b/build/plugins/container_layers.py @@ -0,0 +1,9 @@ +from _common import rootrel_arc_src + + +def oncheck_allowed_path(unit, *args): + module_path = rootrel_arc_src(unit.path(), unit) + if not (module_path.startswith("junk") or module_path.startswith("base_layers")): + unit.message( + ["error", "Cannot create container layer in this directory. See https://st.yandex-team.ru/DTCC-1123"] + ) diff --git a/build/plugins/coverage.py b/build/plugins/coverage.py new file mode 100644 index 0000000000..64e3bda912 --- /dev/null +++ b/build/plugins/coverage.py @@ -0,0 +1,67 @@ +import re + +import _common +import lib.test_const as consts + + +def get_coverage_filter_regexp(pattern, cache={}): + return cache[pattern] if pattern in cache else cache.setdefault(pattern, re.compile(pattern)) + + +def should_be_covered(unit, filters): + if unit.get("FORCE_COVERAGE_DISABLED") == "yes": + return False + if unit.get("FORCE_COVERAGE_ENABLED") == "yes": + return True + unit_path = _common.get_norm_unit_path(unit) + return not any(pred(unit_path) for pred in filters) + + +def get_cpp_coverage_filters(unit, filters=[]): + # don`t calculate filters if it already was calculated + if filters: + return filters + + coverage_target_regexp = unit.get("COVERAGE_TARGET_REGEXP") or None + coverage_exclude_regexp = unit.get("COVERAGE_EXCLUDE_REGEXP") or None + if coverage_target_regexp: + cov_re = get_coverage_filter_regexp(coverage_target_regexp) + filters.append(lambda x: re.match(cov_re, x) is None) + if coverage_exclude_regexp: + cov_exclude_re = get_coverage_filter_regexp(coverage_exclude_regexp) + filters.append(lambda x: re.match(cov_exclude_re, x) is not None) + if unit.get("ENABLE_CONTRIB_COVERAGE") != "yes": + paths_to_exclude = ("contrib",) + filters.append(lambda x: x.startswith(paths_to_exclude)) + return filters + + +def add_cpp_coverage_ldflags(unit): + ldflags = unit.get("LDFLAGS") + changed = False + for flag in consts.COVERAGE_LDFLAGS: + if flag not in ldflags: + ldflags = ldflags + ' ' + flag + changed = True + if changed: + unit.set(["LDFLAGS", ldflags]) + + +def add_cpp_coverage_cflags(unit): + cflags = unit.get("CFLAGS") + changed = False + for flag in consts.COVERAGE_CFLAGS: + if flag not in cflags: + cflags = cflags + ' ' + flag + changed = True + if changed: + unit.set(["CFLAGS", cflags]) + + +def onset_cpp_coverage_flags(unit): + if unit.get("CLANG_COVERAGE") == "no": + return + filters = get_cpp_coverage_filters(unit) + if should_be_covered(unit, filters): + add_cpp_coverage_cflags(unit) + add_cpp_coverage_ldflags(unit) diff --git a/build/plugins/cp.py b/build/plugins/cp.py new file mode 100644 index 0000000000..5c663a3bdd --- /dev/null +++ b/build/plugins/cp.py @@ -0,0 +1,30 @@ +import os + +from _common import sort_by_keywords + + +def oncopy(unit, *args): + keywords = {'RESULT': 1, 'KEEP_DIR_STRUCT': 0, 'DESTINATION': 1, 'FROM': 1} + + flat_args, spec_args = sort_by_keywords(keywords, args) + + dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else '' + from_dir = spec_args['FROM'][0] if 'FROM' in spec_args else '' + keep_struct = 'KEEP_DIR_STRUCT' in spec_args + save_in_var = 'RESULT' in spec_args + targets = [] + + for source in flat_args: + rel_path = '' + path_list = source.split(os.sep) + filename = path_list[-1] + if keep_struct: + if path_list[:-1]: + rel_path = os.path.join(*path_list[:-1]) + source_path = os.path.join(from_dir, rel_path, filename) + target_path = os.path.join(dest_dir, rel_path, filename) + if save_in_var: + targets.append(target_path) + unit.oncopy_file([source_path, target_path]) + if save_in_var: + unit.set([spec_args["RESULT"][0], " ".join(targets)]) diff --git a/build/plugins/cpp_style.py b/build/plugins/cpp_style.py new file mode 100644 index 0000000000..6d59a1f0d4 --- /dev/null +++ b/build/plugins/cpp_style.py @@ -0,0 +1,19 @@ +import os + +from _common import sort_by_keywords + + +def on_style(unit, *args): + def it(): + yield 'DONT_PARSE' + + for f in args: + f = f[len('${ARCADIA_ROOT}') + 1 :] + + if '/generated/' in f: + continue + + yield f + yield '/cpp_style/files/' + f + + unit.onresource(list(it())) diff --git a/build/plugins/create_init_py.py b/build/plugins/create_init_py.py new file mode 100644 index 0000000000..c5fc119fc9 --- /dev/null +++ b/build/plugins/create_init_py.py @@ -0,0 +1,14 @@ +import os + +from _common import sort_by_keywords + + +def oncreate_init_py_structure(unit, *args): + if unit.get('DISTBUILD') or unit.get('AUTOCHECK'): + return + target_dir = unit.get('PY_PROTOS_FOR_DIR') + path_list = target_dir.split(os.path.sep)[1:] + inits = [os.path.join("${ARCADIA_BUILD_ROOT}", '__init__.py')] + for i in range(1, len(path_list) + 1): + inits.append(os.path.join("${ARCADIA_BUILD_ROOT}", os.path.join(*path_list[0:i]), '__init__.py')) + unit.ontouch(inits) diff --git a/build/plugins/credits.py b/build/plugins/credits.py new file mode 100644 index 0000000000..4b31518697 --- /dev/null +++ b/build/plugins/credits.py @@ -0,0 +1,23 @@ +from _common import rootrel_arc_src + + +def oncredits_disclaimer(unit, *args): + if unit.get('WITH_CREDITS'): + unit.message(["warn", "CREDITS WARNING: {}".format(' '.join(args))]) + + +def oncheck_contrib_credits(unit, *args): + module_path = rootrel_arc_src(unit.path(), unit) + excepts = set() + if 'EXCEPT' in args: + args = list(args) + except_pos = args.index('EXCEPT') + excepts = set(args[except_pos + 1 :]) + args = args[:except_pos] + for arg in args: + if module_path.startswith(arg) and not unit.get('CREDITS_TEXTS_FILE') and not unit.get('NO_CREDITS_TEXTS_FILE'): + for ex in excepts: + if module_path.startswith(ex): + break + else: + unit.message(["error", "License texts not found. See https://st.yandex-team.ru/DTCC-324"]) diff --git a/build/plugins/docs.py b/build/plugins/docs.py new file mode 100644 index 0000000000..92371a4c4f --- /dev/null +++ b/build/plugins/docs.py @@ -0,0 +1,53 @@ +import json + + +def extract_macro_calls(unit, macro_value_name): + if not unit.get(macro_value_name): + return [] + + return filter(None, unit.get(macro_value_name).replace('$' + macro_value_name, '').split()) + + +def macro_calls_to_dict(unit, calls): + def split_args(arg): + if arg is None: + return None + + kv = filter(None, arg.split('=')) + if len(kv) != 2: + unit.message( + [ + 'error', + 'Invalid variables specification "{}": value expected to be in form %name%=%value% (with no spaces)'.format( + arg + ), + ] + ) + return None + + return kv + + return dict(filter(None, map(split_args, calls))) + + +def get_variables(unit): + orig_variables = macro_calls_to_dict(unit, extract_macro_calls(unit, '_DOCS_VARS_VALUE')) + return {k: unit.get(k) or v for k, v in orig_variables.items()} + + +def onprocess_docs(unit, *args): + if unit.enabled('_DOCS_USE_PLANTUML'): + unit.on_docs_yfm_use_plantuml([]) + + if unit.get('_DOCS_DIR_VALUE') == '': + unit.on_yfm_docs_dir([unit.get('_YFM_DOCS_DIR_DEFAULT_VALUE')]) + + variables = get_variables(unit) + if variables: + unit.set(['_DOCS_VARS_FLAG', '--vars {}'.format(json.dumps(json.dumps(variables, sort_keys=True)))]) + + +def onprocess_mkdocs(unit, *args): + variables = get_variables(unit) + if variables: + unit.set(['_DOCS_VARS_FLAG', ' '.join(['--var {}={}'.format(k, v) for k, v in variables.items()])]) diff --git a/build/plugins/files.py b/build/plugins/files.py new file mode 100644 index 0000000000..78a6fe6169 --- /dev/null +++ b/build/plugins/files.py @@ -0,0 +1,5 @@ +def onfiles(unit, *args): + args = list(args) + for arg in args: + if not arg.startswith('${ARCADIA_BUILD_ROOT}'): + unit.oncopy_file([arg, arg]) diff --git a/build/plugins/gobuild.py b/build/plugins/gobuild.py new file mode 100644 index 0000000000..39c4b03242 --- /dev/null +++ b/build/plugins/gobuild.py @@ -0,0 +1,320 @@ +import base64 +import itertools +import md5 +import os +from _common import rootrel_arc_src, tobuilddir +import ymake + + +runtime_cgo_path = os.path.join('runtime', 'cgo') +runtime_msan_path = os.path.join('runtime', 'msan') +runtime_race_path = os.path.join('runtime', 'race') +arc_project_prefix = 'a.yandex-team.ru/' +import_runtime_cgo_false = { + 'norace': (runtime_cgo_path, runtime_msan_path, runtime_race_path), + 'race': (runtime_cgo_path, runtime_msan_path), +} +import_syscall_false = { + 'norace': (runtime_cgo_path), + 'race': (runtime_cgo_path, runtime_race_path), +} + + +def get_import_path(unit): + # std_lib_prefix = unit.get('GO_STD_LIB_PREFIX') + # unit.get() doesn't evalutate the value of variable, so the line above doesn't really work + std_lib_prefix = unit.get('GOSTD') + '/' + arc_project_prefix = unit.get('GO_ARCADIA_PROJECT_PREFIX') + vendor_prefix = unit.get('GO_CONTRIB_PROJECT_PREFIX') + + module_path = rootrel_arc_src(unit.path(), unit) + assert len(module_path) > 0 + import_path = module_path.replace('\\', '/') + if import_path.startswith(std_lib_prefix): + import_path = import_path[len(std_lib_prefix) :] + elif import_path.startswith(vendor_prefix): + import_path = import_path[len(vendor_prefix) :] + else: + import_path = arc_project_prefix + import_path + assert len(import_path) > 0 + return import_path + + +def get_appended_values(unit, key): + value = [] + raw_value = unit.get(key) + if raw_value: + value = filter(lambda x: len(x) > 0, raw_value.split(' ')) + assert len(value) == 0 or value[0] == '$' + key + return value[1:] if len(value) > 0 else value + + +def compare_versions(version1, version2): + def last_index(version): + index = version.find('beta') + return len(version) if index < 0 else index + + v1 = tuple(x.zfill(8) for x in version1[: last_index(version1)].split('.')) + v2 = tuple(x.zfill(8) for x in version2[: last_index(version2)].split('.')) + if v1 == v2: + return 0 + return 1 if v1 < v2 else -1 + + +def need_compiling_runtime(import_path, gostd_version): + return ( + import_path in ('runtime', 'reflect', 'syscall') + or import_path.startswith('runtime/internal/') + or compare_versions('1.17', gostd_version) >= 0 + and import_path == 'internal/bytealg' + ) + + +def go_package_name(unit): + name = unit.get('GO_PACKAGE_VALUE') + if not name: + name = unit.get('GO_TEST_IMPORT_PATH') + if name: + name = os.path.basename(os.path.normpath(name)) + elif unit.get('MODULE_TYPE') == 'PROGRAM': + name = 'main' + else: + name = unit.get('REALPRJNAME') + return name + + +def need_lint(path): + return not path.startswith('$S/vendor/') and not path.startswith('$S/contrib/') + + +def on_go_process_srcs(unit): + """ + _GO_PROCESS_SRCS() macro processes only 'CGO' files. All remaining *.go files + and other input files are currently processed by a link command of the + GO module (GO_LIBRARY, GO_PROGRAM) + """ + + srcs_files = get_appended_values(unit, '_GO_SRCS_VALUE') + + asm_files = [] + c_files = [] + cxx_files = [] + ev_files = [] + fbs_files = [] + go_files = [] + in_files = [] + proto_files = [] + s_files = [] + syso_files = [] + + classifed_files = { + '.c': c_files, + '.cc': cxx_files, + '.cpp': cxx_files, + '.cxx': cxx_files, + '.ev': ev_files, + '.fbs': fbs_files, + '.go': go_files, + '.in': in_files, + '.proto': proto_files, + '.s': asm_files, + '.syso': syso_files, + '.C': cxx_files, + '.S': s_files, + } + + # Classify files specifed in _GO_SRCS() macro by extension and process CGO_EXPORT keyword + # which can preceed C/C++ files only + is_cgo_export = False + for f in srcs_files: + _, ext = os.path.splitext(f) + ext_files = classifed_files.get(ext) + if ext_files is not None: + if is_cgo_export: + is_cgo_export = False + if ext in ('.c', '.cc', '.cpp', '.cxx', '.C'): + unit.oncopy_file_with_context([f, f, 'OUTPUT_INCLUDES', '${BINDIR}/_cgo_export.h']) + f = '${BINDIR}/' + f + else: + ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS() macro') + ext_files.append(f) + elif f == 'CGO_EXPORT': + is_cgo_export = True + else: + # FIXME(snermolaev): We can report an unsupported files for _GO_SRCS here + pass + if is_cgo_export: + ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS() macro') + + for f in go_files: + if f.endswith('_test.go'): + ymake.report_configure_error('file {} must be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f)) + go_test_files = get_appended_values(unit, '_GO_TEST_SRCS_VALUE') + go_xtest_files = get_appended_values(unit, '_GO_XTEST_SRCS_VALUE') + for f in go_test_files + go_xtest_files: + if not f.endswith('_test.go'): + ymake.report_configure_error( + 'file {} should not be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f) + ) + + is_test_module = unit.enabled('GO_TEST_MODULE') + + # Add gofmt style checks + if unit.enabled('_GO_FMT_ADD_CHECK'): + resolved_go_files = [] + go_source_files = [] if is_test_module and unit.get(['GO_TEST_FOR_DIR']) else go_files + for path in itertools.chain(go_source_files, go_test_files, go_xtest_files): + if path.endswith('.go'): + resolved = unit.resolve_arc_path([path]) + if resolved != path and need_lint(resolved): + resolved_go_files.append(resolved) + if resolved_go_files: + basedirs = {} + for f in resolved_go_files: + basedir = os.path.dirname(f) + if basedir not in basedirs: + basedirs[basedir] = [] + basedirs[basedir].append(f) + for basedir in basedirs: + unit.onadd_check(['gofmt'] + basedirs[basedir]) + + unit_path = unit.path() + + # Go coverage instrumentation (NOTE! go_files list is modified here) + if is_test_module and unit.enabled('GO_TEST_COVER'): + cover_info = [] + + for f in go_files: + if f.endswith('_test.go'): + continue + cover_var = 'GoCover' + base64.b32encode(f).rstrip('=') + cover_file = unit.resolve_arc_path(f) + cover_file_output = '{}/{}'.format(unit_path, os.path.basename(f)) + unit.on_go_gen_cover_go([cover_file, cover_file_output, cover_var]) + if cover_file.startswith('$S/'): + cover_file = arc_project_prefix + cover_file[3:] + cover_info.append('{}:{}'.format(cover_var, cover_file)) + + # go_files should be empty now since the initial list shouldn't contain + # any non-go or go test file. The value of go_files list will be used later + # to update the value of _GO_SRCS_VALUE + go_files = [] + unit.set(['GO_COVER_INFO_VALUE', ' '.join(cover_info)]) + + # We have cleaned up the list of files from _GO_SRCS_VALUE var and we have to update + # the value since it is used in module command line + unit.set(['_GO_SRCS_VALUE', ' '.join(itertools.chain(go_files, asm_files, syso_files))]) + + # Add go vet check + if unit.enabled('_GO_VET_ADD_CHECK') and need_lint(unit_path): + vet_report_file_name = os.path.join(unit_path, '{}{}'.format(unit.filename(), unit.get('GO_VET_REPORT_EXT'))) + unit.onadd_check(["govet", '$(BUILD_ROOT)/' + tobuilddir(vet_report_file_name)[3:]]) + + for f in ev_files: + ev_proto_file = '{}.proto'.format(f) + unit.oncopy_file_with_context([f, ev_proto_file]) + proto_files.append(ev_proto_file) + + # Process .proto files + for f in proto_files: + unit.on_go_proto_cmd(f) + + # Process .fbs files + for f in fbs_files: + unit.on_go_flatc_cmd([f, go_package_name(unit)]) + + # Process .in files + for f in in_files: + unit.onsrc(f) + + # Generate .symabis for .s files (starting from 1.12 version) + if len(asm_files) > 0: + symabis_flags = [] + gostd_version = unit.get('GOSTD_VERSION') + if compare_versions('1.16', gostd_version) >= 0: + import_path = get_import_path(unit) + symabis_flags.extend(['FLAGS', '-p', import_path]) + if need_compiling_runtime(import_path, gostd_version): + symabis_flags.append('-compiling-runtime') + unit.on_go_compile_symabis(asm_files + symabis_flags) + + # Process cgo files + cgo_files = get_appended_values(unit, '_CGO_SRCS_VALUE') + + cgo_cflags = [] + if len(c_files) + len(cxx_files) + len(s_files) + len(cgo_files) > 0: + if is_test_module: + go_test_for_dir = unit.get('GO_TEST_FOR_DIR') + if go_test_for_dir and go_test_for_dir.startswith('$S/'): + unit.onaddincl(['FOR', 'c', go_test_for_dir[3:]]) + unit.onaddincl(['FOR', 'c', unit.get('MODDIR')]) + cgo_cflags = get_appended_values(unit, 'CGO_CFLAGS_VALUE') + + for f in itertools.chain(c_files, cxx_files, s_files): + unit.onsrc([f] + cgo_cflags) + + if len(cgo_files) > 0: + if not unit.enabled('CGO_ENABLED'): + ymake.report_configure_error('trying to build with CGO (CGO_SRCS is non-empty) when CGO is disabled') + import_path = get_import_path(unit) + if import_path != runtime_cgo_path: + go_std_root = unit.get('GOSTD') + unit.onpeerdir(os.path.join(go_std_root, runtime_cgo_path)) + race_mode = 'race' if unit.enabled('RACE') else 'norace' + import_runtime_cgo = 'false' if import_path in import_runtime_cgo_false[race_mode] else 'true' + import_syscall = 'false' if import_path in import_syscall_false[race_mode] else 'true' + args = ( + [import_path] + + cgo_files + + ['FLAGS', '-import_runtime_cgo=' + import_runtime_cgo, '-import_syscall=' + import_syscall] + ) + unit.on_go_compile_cgo1(args) + cgo2_cflags = get_appended_values(unit, 'CGO2_CFLAGS_VALUE') + for f in cgo_files: + if f.endswith('.go'): + unit.onsrc([f[:-2] + 'cgo2.c'] + cgo_cflags + cgo2_cflags) + else: + ymake.report_configure_error('file {} should not be listed in CGO_SRCS() macros'.format(f)) + args = [go_package_name(unit)] + cgo_files + if len(c_files) > 0: + args += ['C_FILES'] + c_files + if len(s_files) > 0: + args += ['S_FILES'] + s_files + if len(syso_files) > 0: + args += ['OBJ_FILES'] + syso_files + unit.on_go_compile_cgo2(args) + + +def on_go_resource(unit, *args): + args = list(args) + files = args[::2] + keys = args[1::2] + suffix_md5 = md5.new('@'.join(args)).hexdigest() + resource_go = os.path.join("resource.{}.res.go".format(suffix_md5)) + + unit.onpeerdir(["library/go/core/resource"]) + + if len(files) != len(keys): + ymake.report_configure_error("last file {} is missing resource key".format(files[-1])) + + for i, (key, filename) in enumerate(zip(keys, files)): + if not key: + ymake.report_configure_error("file key must be non empty") + return + + if filename == "-" and "=" not in key: + ymake.report_configure_error("key \"{}\" must contain = sign".format(key)) + return + + # quote key, to avoid automatic substitution of filename by absolute + # path in RUN_PROGRAM + args[2 * i + 1] = "notafile" + args[2 * i + 1] + + files = [file for file in files if file != "-"] + unit.onrun_program( + ["library/go/core/resource/cc", "-package", go_package_name(unit), "-o", resource_go] + + list(args) + + ["IN"] + + files + + ["OUT", resource_go] + ) diff --git a/build/plugins/ios_app_settings.py b/build/plugins/ios_app_settings.py new file mode 100644 index 0000000000..0d5b233eea --- /dev/null +++ b/build/plugins/ios_app_settings.py @@ -0,0 +1,17 @@ +import _common as common +import ymake +import os + + +def onios_app_settings(unit, *args): + tail, kv = common.sort_by_keywords({'OS_VERSION': 1, 'DEVICES': -1}, args) + if tail: + ymake.report_configure_error('Bad IOS_COMMON_SETTINGS usage - unknown data: ' + str(tail)) + if kv.get('OS_VERSION', []): + unit.onios_app_common_flags(['--minimum-deployment-target', kv.get('OS_VERSION', [])[0]]) + unit.onios_app_assets_flags(['--filter-for-device-os-version', kv.get('OS_VERSION', [])[0]]) + devices_flags = [] + for device in kv.get('DEVICES', []): + devices_flags += ['--target-device', device] + if devices_flags: + unit.onios_app_common_flags(devices_flags) diff --git a/build/plugins/ios_assets.py b/build/plugins/ios_assets.py new file mode 100644 index 0000000000..16f58bda44 --- /dev/null +++ b/build/plugins/ios_assets.py @@ -0,0 +1,27 @@ +import _common as common +import ymake +import os + + +def onios_assets(unit, *args): + _, kv = common.sort_by_keywords({'ROOT': 1, 'CONTENTS': -1, 'FLAGS': -1}, args) + if not kv.get('ROOT', []) and kv.get('CONTENTS', []): + ymake.report_configure_error('Please specify ROOT directory for assets') + origin_root = kv.get('ROOT')[0] + destination_root = os.path.normpath(os.path.join('$BINDIR', os.path.basename(origin_root))) + rel_list = [] + for cont in kv.get('CONTENTS', []): + rel = os.path.relpath(cont, origin_root) + if rel.startswith('..'): + ymake.report_configure_error('{} is not subpath of {}'.format(cont, origin_root)) + rel_list.append(rel) + if not rel_list: + return + results_list = [os.path.join('$B', unit.path()[3:], os.path.basename(origin_root), i) for i in rel_list] + if len(kv.get('CONTENTS', [])) != len(results_list): + ymake.report_configure_error('IOS_ASSETTS content length is not equals results') + for s, d in zip(kv.get('CONTENTS', []), results_list): + unit.oncopy_file([s, d]) + if kv.get('FLAGS', []): + unit.onios_app_assets_flags(kv.get('FLAGS', [])) + unit.on_ios_assets([destination_root] + results_list) diff --git a/build/plugins/java.py b/build/plugins/java.py new file mode 100644 index 0000000000..32c083b596 --- /dev/null +++ b/build/plugins/java.py @@ -0,0 +1,480 @@ +import _common as common +import ymake +import json +import os +import base64 + + +DELIM = '================================' +CONTRIB_JAVA_PREFIX = 'contrib/java/' + + +def split_args(s): # TODO quotes, escapes + return filter(None, s.split()) + + +def extract_macro_calls(unit, macro_value_name, macro_calls_delim): + if not unit.get(macro_value_name): + return [] + + return filter( + None, map(split_args, unit.get(macro_value_name).replace('$' + macro_value_name, '').split(macro_calls_delim)) + ) + + +def extract_macro_calls2(unit, macro_value_name): + if not unit.get(macro_value_name): + return [] + + calls = [] + for call_encoded_args in unit.get(macro_value_name).strip().split(): + call_args = json.loads(base64.b64decode(call_encoded_args), encoding='utf-8') + calls.append(call_args) + + return calls + + +def on_run_jbuild_program(unit, *args): + args = list(args) + """ + Custom code generation + @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram + """ + + flat, kv = common.sort_by_keywords( + { + 'IN': -1, + 'IN_DIR': -1, + 'OUT': -1, + 'OUT_DIR': -1, + 'CWD': 1, + 'CLASSPATH': -1, + 'CP_USE_COMMAND_FILE': 1, + 'ADD_SRCS_TO_CLASSPATH': 0, + }, + args, + ) + depends = kv.get('CLASSPATH', []) + kv.get('JAR', []) + fake_out = None + if depends: + # XXX: hack to force ymake to build dependencies + fake_out = "fake.out.{}".format(hash(tuple(args))) + unit.on_run_java(['TOOL'] + depends + ["OUT", fake_out]) + + if not kv.get('CP_USE_COMMAND_FILE'): + args += ['CP_USE_COMMAND_FILE', unit.get(['JAVA_PROGRAM_CP_USE_COMMAND_FILE']) or 'yes'] + + if fake_out is not None: + args += ['FAKE_OUT', fake_out] + + prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or '' + new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip() + unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val]) + + +def ongenerate_script(unit, *args): + """ + heretic@ promised to make tutorial here + Don't forget + Feel free to remind + """ + flat, kv = common.sort_by_keywords({'OUT': -1, 'TEMPLATE': -1, 'CUSTOM_PROPERTY': -1}, args) + if len(kv.get('TEMPLATE', [])) > len(kv.get('OUT', [])): + ymake.report_configure_error('To many arguments for TEMPLATE parameter') + prev = unit.get(['GENERATE_SCRIPT_VALUE']) or '' + new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip() + unit.set(['GENERATE_SCRIPT_VALUE', new_val]) + + +def onjava_module(unit, *args): + args_delim = unit.get('ARGS_DELIM') + if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes": + unit.ondata_files(common.strip_roots(unit.path())) + + if unit.get('YA_IDE_IDEA') != 'yes': + return + + data = { + 'BUNDLE_NAME': unit.name(), + 'PATH': unit.path(), + 'MODULE_TYPE': unit.get('MODULE_TYPE'), + 'MODULE_ARGS': unit.get('MODULE_ARGS'), + 'MANAGED_PEERS': '${MANAGED_PEERS}', + 'MANAGED_PEERS_CLOSURE': '${MANAGED_PEERS_CLOSURE}', + 'NON_NAMAGEABLE_PEERS': '${NON_NAMAGEABLE_PEERS}', + 'TEST_CLASSPATH_MANAGED': '${TEST_CLASSPATH_MANAGED}', + 'EXCLUDE': extract_macro_calls(unit, 'EXCLUDE_VALUE', args_delim), + 'JAVA_SRCS': extract_macro_calls(unit, 'JAVA_SRCS_VALUE', args_delim), + 'JAVAC_FLAGS': extract_macro_calls(unit, 'JAVAC_FLAGS_VALUE', args_delim), + 'ANNOTATION_PROCESSOR': extract_macro_calls(unit, 'ANNOTATION_PROCESSOR_VALUE', args_delim), + 'EXTERNAL_JAR': extract_macro_calls(unit, 'EXTERNAL_JAR_VALUE', args_delim), + 'RUN_JAVA_PROGRAM': extract_macro_calls2(unit, 'RUN_JAVA_PROGRAM_VALUE'), + 'RUN_JAVA_PROGRAM_MANAGED': '${RUN_JAVA_PROGRAM_MANAGED}', + 'MAVEN_GROUP_ID': extract_macro_calls(unit, 'MAVEN_GROUP_ID_VALUE', args_delim), + 'JAR_INCLUDE_FILTER': extract_macro_calls(unit, 'JAR_INCLUDE_FILTER_VALUE', args_delim), + 'JAR_EXCLUDE_FILTER': extract_macro_calls(unit, 'JAR_EXCLUDE_FILTER_VALUE', args_delim), + # TODO remove when java test dart is in prod + 'UNITTEST_DIR': unit.get('UNITTEST_DIR'), + 'SYSTEM_PROPERTIES': extract_macro_calls(unit, 'SYSTEM_PROPERTIES_VALUE', args_delim), + 'JVM_ARGS': extract_macro_calls(unit, 'JVM_ARGS_VALUE', args_delim), + 'TEST_CWD': extract_macro_calls(unit, 'TEST_CWD_VALUE', args_delim), + 'TEST_FORK_MODE': extract_macro_calls(unit, 'TEST_FORK_MODE', args_delim), + 'SPLIT_FACTOR': extract_macro_calls(unit, 'TEST_SPLIT_FACTOR', args_delim), + 'TIMEOUT': extract_macro_calls(unit, 'TEST_TIMEOUT', args_delim), + 'TAG': extract_macro_calls(unit, 'TEST_TAGS_VALUE', args_delim), + 'SIZE': extract_macro_calls(unit, 'TEST_SIZE_NAME', args_delim), + 'DEPENDS': extract_macro_calls(unit, 'TEST_DEPENDS_VALUE', args_delim), + 'IDEA_EXCLUDE': extract_macro_calls(unit, 'IDEA_EXCLUDE_DIRS_VALUE', args_delim), + 'IDEA_RESOURCE': extract_macro_calls(unit, 'IDEA_RESOURCE_DIRS_VALUE', args_delim), + 'IDEA_MODULE_NAME': extract_macro_calls(unit, 'IDEA_MODULE_NAME_VALUE', args_delim), + 'GENERATE_SCRIPT': extract_macro_calls2(unit, 'GENERATE_SCRIPT_VALUE'), + 'FAKEID': extract_macro_calls(unit, 'FAKEID', args_delim), + 'TEST_DATA': extract_macro_calls(unit, 'TEST_DATA_VALUE', args_delim), + 'JAVA_FORBIDDEN_LIBRARIES': extract_macro_calls(unit, 'JAVA_FORBIDDEN_LIBRARIES_VALUE', args_delim), + 'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT'), + } + if unit.get('ENABLE_PREVIEW_VALUE') == 'yes' and (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION')) in ( + '15', + '16', + '17', + '18', + '19', + '20' + ): + data['ENABLE_PREVIEW'] = extract_macro_calls(unit, 'ENABLE_PREVIEW_VALUE', args_delim) + + if unit.get('SAVE_JAVAC_GENERATED_SRCS_DIR') and unit.get('SAVE_JAVAC_GENERATED_SRCS_TAR'): + data['SAVE_JAVAC_GENERATED_SRCS_DIR'] = extract_macro_calls(unit, 'SAVE_JAVAC_GENERATED_SRCS_DIR', args_delim) + data['SAVE_JAVAC_GENERATED_SRCS_TAR'] = extract_macro_calls(unit, 'SAVE_JAVAC_GENERATED_SRCS_TAR', args_delim) + + if unit.get('JAVA_ADD_DLLS_VALUE') == 'yes': + data['ADD_DLLS_FROM_DEPENDS'] = extract_macro_calls(unit, 'JAVA_ADD_DLLS_VALUE', args_delim) + + if unit.get('ERROR_PRONE_VALUE') == 'yes': + data['ERROR_PRONE'] = extract_macro_calls(unit, 'ERROR_PRONE_VALUE', args_delim) + + if unit.get('WITH_KOTLIN_VALUE') == 'yes': + data['WITH_KOTLIN'] = extract_macro_calls(unit, 'WITH_KOTLIN_VALUE', args_delim) + if unit.get('KOTLIN_JVM_TARGET'): + data['KOTLIN_JVM_TARGET'] = extract_macro_calls(unit, 'KOTLIN_JVM_TARGET', args_delim) + if unit.get('KOTLINC_FLAGS_VALUE'): + data['KOTLINC_FLAGS'] = extract_macro_calls(unit, 'KOTLINC_FLAGS_VALUE', args_delim) + if unit.get('KOTLINC_OPTS_VALUE'): + data['KOTLINC_OPTS'] = extract_macro_calls(unit, 'KOTLINC_OPTS_VALUE', args_delim) + + if unit.get('DIRECT_DEPS_ONLY_VALUE') == 'yes': + data['DIRECT_DEPS_ONLY'] = extract_macro_calls(unit, 'DIRECT_DEPS_ONLY_VALUE', args_delim) + + if unit.get('JAVA_EXTERNAL_DEPENDENCIES_VALUE'): + valid = [] + for dep in sum(extract_macro_calls(unit, 'JAVA_EXTERNAL_DEPENDENCIES_VALUE', args_delim), []): + if os.path.normpath(dep).startswith('..'): + ymake.report_configure_error( + '{}: {} - relative paths in JAVA_EXTERNAL_DEPENDENCIES is not allowed'.format(unit.path(), dep) + ) + elif os.path.isabs(dep): + ymake.report_configure_error( + '{}: {} absolute paths in JAVA_EXTERNAL_DEPENDENCIES is not allowed'.format(unit.path(), dep) + ) + else: + valid.append(dep) + if valid: + data['EXTERNAL_DEPENDENCIES'] = [valid] + + if unit.get('MAKE_UBERJAR_VALUE') == 'yes': + if unit.get('MODULE_TYPE') != 'JAVA_PROGRAM': + ymake.report_configure_error('{}: UBERJAR supported only for JAVA_PROGRAM module type'.format(unit.path())) + data['UBERJAR'] = extract_macro_calls(unit, 'MAKE_UBERJAR_VALUE', args_delim) + data['UBERJAR_PREFIX'] = extract_macro_calls(unit, 'UBERJAR_PREFIX_VALUE', args_delim) + data['UBERJAR_HIDE_EXCLUDE'] = extract_macro_calls(unit, 'UBERJAR_HIDE_EXCLUDE_VALUE', args_delim) + data['UBERJAR_PATH_EXCLUDE'] = extract_macro_calls(unit, 'UBERJAR_PATH_EXCLUDE_VALUE', args_delim) + data['UBERJAR_MANIFEST_TRANSFORMER_MAIN'] = extract_macro_calls( + unit, 'UBERJAR_MANIFEST_TRANSFORMER_MAIN_VALUE', args_delim + ) + data['UBERJAR_MANIFEST_TRANSFORMER_ATTRIBUTE'] = extract_macro_calls( + unit, 'UBERJAR_MANIFEST_TRANSFORMER_ATTRIBUTE_VALUE', args_delim + ) + data['UBERJAR_APPENDING_TRANSFORMER'] = extract_macro_calls( + unit, 'UBERJAR_APPENDING_TRANSFORMER_VALUE', args_delim + ) + data['UBERJAR_SERVICES_RESOURCE_TRANSFORMER'] = extract_macro_calls( + unit, 'UBERJAR_SERVICES_RESOURCE_TRANSFORMER_VALUE', args_delim + ) + + if unit.get('WITH_JDK_VALUE') == 'yes': + if unit.get('MODULE_TYPE') != 'JAVA_PROGRAM': + ymake.report_configure_error( + '{}: JDK export supported only for JAVA_PROGRAM module type'.format(unit.path()) + ) + data['WITH_JDK'] = extract_macro_calls(unit, 'WITH_JDK_VALUE', args_delim) + + if not data['EXTERNAL_JAR']: + has_processor = extract_macro_calls(unit, 'GENERATE_VCS_JAVA_INFO_NODEP', args_delim) + # IMPORTANT before switching vcs_info.py to python3 the value was always evaluated to $YMAKE_PYTHON but no + # code in java dart parser extracts its value only checks this key for existance. + data['EMBED_VCS'] = [['yes']] + # FORCE_VCS_INFO_UPDATE is responsible for setting special value of VCS_INFO_DISABLE_CACHE__NO_UID__ + macro_val = extract_macro_calls(unit, 'FORCE_VCS_INFO_UPDATE', args_delim) + macro_str = macro_val[0][0] if macro_val and macro_val[0] and macro_val[0][0] else '' + if macro_str and macro_str == 'yes': + data['VCS_INFO_DISABLE_CACHE__NO_UID__'] = macro_val + + for java_srcs_args in data['JAVA_SRCS']: + external = None + + for i in xrange(len(java_srcs_args)): + arg = java_srcs_args[i] + + if arg == 'EXTERNAL': + if not i + 1 < len(java_srcs_args): + continue # TODO configure error + + ex = java_srcs_args[i + 1] + + if ex in ('EXTERNAL', 'SRCDIR', 'PACKAGE_PREFIX', 'EXCLUDE'): + continue # TODO configure error + + if external is not None: + continue # TODO configure error + + external = ex + + if external: + unit.onpeerdir(external) + + for k, v in data.items(): + if not v: + data.pop(k) + + dart = 'JAVA_DART: ' + base64.b64encode(json.dumps(data)) + '\n' + DELIM + '\n' + unit.set_property(['JAVA_DART_DATA', dart]) + + +def on_add_java_style_checks(unit, *args): + if unit.get('LINT_LEVEL_VALUE') != "none" and common.get_no_lint_value(unit) != 'none': + unit.onadd_check(['JAVA_STYLE', unit.get('LINT_LEVEL_VALUE')] + list(args)) + + +def on_add_kotlin_style_checks(unit, *args): + """ + ktlint can be disabled using NO_LINT() and NO_LINT(ktlint) + """ + if unit.get('WITH_KOTLIN_VALUE') == 'yes': + if common.get_no_lint_value(unit) == '': + unit.onadd_check(['ktlint'] + list(args)) + + +def on_add_classpath_clash_check(unit, *args): + jdeps_val = (unit.get('CHECK_JAVA_DEPS_VALUE') or '').lower() + if jdeps_val and jdeps_val not in ('yes', 'no', 'strict'): + ymake.report_configure_error('CHECK_JAVA_DEPS: "yes", "no" or "strict" required') + if jdeps_val and jdeps_val != 'no': + unit.onjava_test_deps(jdeps_val) + + +# Ymake java modules related macroses + + +def onexternal_jar(unit, *args): + args = list(args) + flat, kv = common.sort_by_keywords({'SOURCES': 1}, args) + if not flat: + ymake.report_configure_error('EXTERNAL_JAR requires exactly one resource URL of compiled jar library') + res = flat[0] + resid = res[4:] if res.startswith('sbr:') else res + unit.set(['JAR_LIB_RESOURCE', resid]) + unit.set(['JAR_LIB_RESOURCE_URL', res]) + + +def on_check_java_srcdir(unit, *args): + args = list(args) + for arg in args: + if not '$' in arg: + arc_srcdir = os.path.join(unit.get('MODDIR'), arg) + abs_srcdir = unit.resolve(os.path.join("$S/", arc_srcdir)) + if not os.path.exists(abs_srcdir) or not os.path.isdir(abs_srcdir): + ymake.report_configure_error( + 'Trying to set a [[alt1]]JAVA_SRCS[[rst]] for a missing directory: [[imp]]$S/{}[[rst]]', + missing_dir=arc_srcdir, + ) + return + srcdir = unit.resolve_arc_path(arg) + if srcdir and not srcdir.startswith('$S'): + continue + abs_srcdir = unit.resolve(srcdir) if srcdir else unit.resolve(arg) + if not os.path.exists(abs_srcdir) or not os.path.isdir(abs_srcdir): + ymake.report_configure_error( + 'Trying to set a [[alt1]]JAVA_SRCS[[rst]] for a missing directory: [[imp]]{}[[rst]]', missing_dir=srcdir + ) + + +def on_fill_jar_copy_resources_cmd(unit, *args): + if len(args) == 4: + varname, srcdir, base_classes_dir, reslist = tuple(args) + package = '' + else: + varname, srcdir, base_classes_dir, package, reslist = tuple(args) + dest_dir = os.path.join(base_classes_dir, *package.split('.')) if package else base_classes_dir + var = unit.get(varname) + var += ' && $FS_TOOLS copy_files {} {} {}'.format( + srcdir if srcdir.startswith('"$') else '${CURDIR}/' + srcdir, dest_dir, reslist + ) + unit.set([varname, var]) + + +def on_fill_jar_gen_srcs(unit, *args): + varname, jar_type, srcdir, base_classes_dir, java_list, kt_list, groovy_list, res_list = tuple(args[0:8]) + resolved_srcdir = unit.resolve_arc_path(srcdir) + if not resolved_srcdir.startswith('$') or resolved_srcdir.startswith('$S'): + return + + exclude_pos = args.index('EXCLUDE') + globs = args[7:exclude_pos] + excludes = args[exclude_pos + 1 :] + var = unit.get(varname) + var += ' && ${{cwd:BINDIR}} $YMAKE_PYTHON ${{input:"build/scripts/resolve_java_srcs.py"}} --append -d {} -s {} -k {} -g {} -r {} --include-patterns {}'.format( + srcdir, java_list, kt_list, groovy_list, res_list, ' '.join(globs) + ) + if jar_type == 'SRC_JAR': + var += ' --all-resources' + if len(excludes) > 0: + var += ' --exclude-patterns {}'.format(' '.join(excludes)) + if unit.get('WITH_KOTLIN_VALUE') == 'yes': + var += ' --resolve-kotlin' + unit.set([varname, var]) + + +def on_check_run_java_prog_classpath(unit, *args): + if len(args) != 1: + ymake.report_configure_error( + 'multiple CLASSPATH elements in RUN_JAVA_PROGRAM invocation no more supported. Use JAVA_RUNTIME_PEERDIR on the JAVA_PROGRAM module instead' + ) + + +def extract_words(words, keys): + kv = {} + k = None + + for w in words: + if w in keys: + k = w + else: + if not k in kv: + kv[k] = [] + kv[k].append(w) + + return kv + + +def parse_words(words): + kv = extract_words(words, {'OUT', 'TEMPLATE'}) + if not 'TEMPLATE' in kv: + kv['TEMPLATE'] = ['template.tmpl'] + ws = [] + for item in ('OUT', 'TEMPLATE'): + for i, word in list(enumerate(kv[item])): + if word == 'CUSTOM_PROPERTY': + ws += kv[item][i:] + kv[item] = kv[item][:i] + templates = kv['TEMPLATE'] + outputs = kv['OUT'] + if len(outputs) < len(templates): + ymake.report_configure_error('To many arguments for TEMPLATE parameter') + return + if ws and ws[0] != 'CUSTOM_PROPERTY': + ymake.report_configure_error('''Can't parse {}'''.format(ws)) + custom_props = [] + for item in ws: + if item == 'CUSTOM_PROPERTY': + custom_props.append([]) + else: + custom_props[-1].append(item) + props = [] + for p in custom_props: + if not p: + ymake.report_configure_error('Empty CUSTOM_PROPERTY') + continue + props.append('-B') + if len(p) > 1: + props.append(base64.b64encode("{}={}".format(p[0], ' '.join(p[1:])))) + else: + ymake.report_configure_error('CUSTOM_PROPERTY "{}" value is not specified'.format(p[0])) + for i, o in enumerate(outputs): + yield o, templates[min(i, len(templates) - 1)], props + + +def on_ymake_generate_script(unit, *args): + for out, tmpl, props in parse_words(list(args)): + unit.on_add_gen_java_script([out, tmpl] + list(props)) + + +def on_jdk_version_macro_check(unit, *args): + if len(args) != 1: + unit.message(["error", "Invalid syntax. Single argument required."]) + jdk_version = args[0] + available_versions = ( + '10', + '11', + '15', + '16', + '17', + '18', + '19', + '20', + ) + if jdk_version not in available_versions: + ymake.report_configure_error( + "Invalid jdk version: {}. {} are available".format(jdk_version, available_versions) + ) + if int(jdk_version) >= 19 and unit.get('WITH_JDK_VALUE') != 'yes' and unit.get('MODULE_TAG') == 'JAR_RUNNABLE': + msg = ( + "Missing WITH_JDK() macro for JDK version >= 19" + # temporary link with additional explanation + ". For more info see https://clubs.at.yandex-team.ru/arcadia/28543" + ) + ymake.report_configure_error(msg) + + +def _maven_coords_for_project(unit, project_dir): + parts = project_dir.split('/') + + g = '.'.join(parts[2:-2]) + a = parts[-2] + v = parts[-1] + c = '' + + pom_path = unit.resolve(os.path.join('$S', project_dir, 'pom.xml')) + if os.path.exists(pom_path): + import xml.etree.ElementTree as et + + with open(pom_path) as f: + root = et.fromstring(f.read()) + for xpath in ('./{http://maven.apache.org/POM/4.0.0}artifactId', './artifactId'): + artifact = root.find(xpath) + if artifact is not None: + artifact = artifact.text + if a != artifact and a.startswith(artifact): + c = a[len(artifact) :].lstrip('-_') + a = artifact + break + + return '{}:{}:{}:{}'.format(g, a, v, c) + + +def on_setup_maven_export_coords_if_need(unit, *args): + if not unit.enabled('MAVEN_EXPORT'): + return + + unit.set(['MAVEN_EXPORT_COORDS_GLOBAL', _maven_coords_for_project(unit, args[0])]) + + +def on_setup_project_coords_if_needed(unit, *args): + if not unit.enabled('EXPORT_GRADLE'): + return + + project_dir = args[0] + if project_dir.startswith(CONTRIB_JAVA_PREFIX): + value = '\\"{}\\"'.format(_maven_coords_for_project(unit, project_dir).rstrip(':')) + else: + value = 'project(\\":{}\\")'.format(project_dir.replace('/', ':')) + unit.set(['_EXPORT_GRADLE_PROJECT_COORDS', value]) diff --git a/build/plugins/large_files.py b/build/plugins/large_files.py new file mode 100644 index 0000000000..568e294253 --- /dev/null +++ b/build/plugins/large_files.py @@ -0,0 +1,40 @@ +import os +import ymake +from _common import strip_roots + +PLACEHOLDER_EXT = "external" + + +def onlarge_files(unit, *args): + """ + @usage LARGE_FILES([AUTOUPDATED] Files...) + + Use large file ether from working copy or from remote storage via placeholder <File>.external + If <File> is present locally (and not a symlink!) it will be copied to build directory. + Otherwise macro will try to locate <File>.external, parse it retrieve ot during build phase. + """ + args = list(args) + + if args and args[0] == 'AUTOUPDATED': + args = args[1:] + + for arg in args: + if arg == 'AUTOUPDATED': + unit.message(["warn", "Please set AUTOUPDATED argument before other file names"]) + continue + + src = unit.resolve_arc_path(arg) + if src.startswith("$S"): + msg = "Used local large file {}. Don't forget to run 'ya upload --update-external' and commit {}.{}".format( + src, src, PLACEHOLDER_EXT + ) + unit.message(["warn", msg]) + unit.oncopy_file([arg, arg]) + else: + out_file = strip_roots(os.path.join(unit.path(), arg)) + external = "{}.{}".format(arg, PLACEHOLDER_EXT) + from_external_cmd = [external, out_file, 'OUT_NOAUTO', arg] + if os.path.dirname(arg): + from_external_cmd.extend(("RENAME", os.path.basename(arg))) + unit.on_from_external(from_external_cmd) + unit.onadd_check(['check.external', external]) diff --git a/build/plugins/lib/__init__.py b/build/plugins/lib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/build/plugins/lib/__init__.py diff --git a/build/plugins/lib/_metric_resolvers.py b/build/plugins/lib/_metric_resolvers.py new file mode 100644 index 0000000000..270eb78345 --- /dev/null +++ b/build/plugins/lib/_metric_resolvers.py @@ -0,0 +1,11 @@ +import re + +VALUE_PATTERN = re.compile(r"^\s*(?P<value>\d+)\s*$") + + +def resolve_value(val): + match = VALUE_PATTERN.match(val) + if not match: + return None + val = match.group('value') + return int(val) diff --git a/build/plugins/lib/nots/__init__.py b/build/plugins/lib/nots/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/build/plugins/lib/nots/__init__.py diff --git a/build/plugins/lib/nots/erm_json_lite.py b/build/plugins/lib/nots/erm_json_lite.py new file mode 100644 index 0000000000..dee76302a0 --- /dev/null +++ b/build/plugins/lib/nots/erm_json_lite.py @@ -0,0 +1,102 @@ +import json +from functools import cmp_to_key + +from lib.nots.semver import Version, VersionRange + + +class ErmJsonLite(object): + """ + Basic implementation to read `erm-packages.json`. + + It doesn't use any models, works with only raw JSON types: lists, dicts, strings + """ + + class ResourceType(object): + NPM_PACKAGE = "NPM_PACKAGE" + NODE_JS = "NODE_JS" + + data = None + + @staticmethod + def get_versions_of(er_resource): + # type: (dict) -> list[Version] + """ + Return all versions of the resource in ASC order (from older to latest) + """ + unsorted = er_resource.get("versions").keys() + # We have to sort because in python 2 the order of keys in a dict is not guaranteed + versions = sorted(unsorted, key=cmp_to_key(Version.cmp)) + + return [Version.from_str(v) for v in versions] + + @classmethod + def load(cls, path): + # type: (str) -> ErmJsonLite + erm_json = cls() + + with open(path) as f: + erm_json.data = dict() + for k, v in json.load(f).items(): + # Ignore comments (when key starts with `_`), used for banner + if not k.startswith("_"): + erm_json.data[k] = v + + return erm_json + + def get_resource(self, resource_name): + # type: (str) -> dict + """ + Return resource by his name + """ + er_resource = self.data.get(resource_name) + if not er_resource: + raise Exception("Requested resource {} is not a toolchain item".format(resource_name)) + + return er_resource + + def get_sb_resources(self, resource_name, version): + # type: (str, Version) -> list[dict] + """ + Return a list of SB resources for ER version + """ + er_resource = self.get_resource(resource_name) + + return er_resource.get("versions").get(str(version)).get("resources") + + def is_resource_multiplatform(self, resource_name): + # type: (str) -> bool + """ + Return True if resource is multiplatform, False otherwise + """ + er_resource = self.get_resource(resource_name) + + return er_resource.get("multiplatform", False) + + def list_npm_packages(self): + # type: () -> list[str] + """ + Returns a list of the names of the npm tools used in the toolchain + """ + result = [] + for resource_name, resource in self.data.items(): + if resource.get("type") == self.ResourceType.NPM_PACKAGE: + result.append(resource_name) + + return result + + def select_version_of(self, resource_name, range_str=None): + # type: (str, str|None) -> Version|None + er_resource = self.get_resource(resource_name) + + if range_str is None: + return Version.from_str(er_resource.get("default")) + + version_range = VersionRange.from_str(range_str) + + # assuming the version list is sorted from the lowest to the highest version, + # we stop the loop as early as possible and hence return the lowest compatible version + for version in self.get_versions_of(er_resource): + if version_range.is_satisfied_by(version): + return version + + return None diff --git a/build/plugins/lib/nots/package_manager/__init__.py b/build/plugins/lib/nots/package_manager/__init__.py new file mode 100644 index 0000000000..570231e1e9 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/__init__.py @@ -0,0 +1,7 @@ +from .pnpm import PnpmPackageManager +from .base import PackageJson, constants, utils, bundle_node_modules, extract_node_modules + + +manager = PnpmPackageManager + +__all__ = ["PackageJson", "constants", "utils", "bundle_node_modules", "extract_node_modules", "manager"] diff --git a/build/plugins/lib/nots/package_manager/base/__init__.py b/build/plugins/lib/nots/package_manager/base/__init__.py new file mode 100644 index 0000000000..022d4a960e --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/__init__.py @@ -0,0 +1,20 @@ +from . import constants, utils +from .lockfile import BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError +from .package_json import PackageJson +from .package_manager import BasePackageManager, PackageManagerError, PackageManagerCommandError +from .node_modules_bundler import bundle_node_modules, extract_node_modules + + +__all__ = [ + "constants", + "utils", + "BaseLockfile", + "LockfilePackageMeta", + "LockfilePackageMetaInvalidError", + "BasePackageManager", + "PackageManagerError", + "PackageManagerCommandError", + "PackageJson", + "bundle_node_modules", + "extract_node_modules", +] diff --git a/build/plugins/lib/nots/package_manager/base/constants.py b/build/plugins/lib/nots/package_manager/base/constants.py new file mode 100644 index 0000000000..d03df2a570 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/constants.py @@ -0,0 +1,7 @@ +BUILD_DIRNAME = "build" +BUNDLE_DIRNAME = "bundle" +NODE_MODULES_BUNDLE_FILENAME = "node_modules.tar" +NODE_MODULES_DIRNAME = "node_modules" +NODE_MODULES_WORKSPACE_BUNDLE_FILENAME = "workspace_node_modules.tar" +NPM_REGISTRY_URL = "http://npm.yandex-team.ru" +PACKAGE_JSON_FILENAME = "package.json" diff --git a/build/plugins/lib/nots/package_manager/base/lockfile.py b/build/plugins/lib/nots/package_manager/base/lockfile.py new file mode 100644 index 0000000000..1d7cc6ad3e --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/lockfile.py @@ -0,0 +1,69 @@ +import os + +from abc import ABCMeta, abstractmethod +from six import add_metaclass + + +class LockfilePackageMeta(object): + """ + Basic struct representing package meta from lockfile. + """ + + __slots__ = ("name", "version", "sky_id", "integrity", "integrity_algorithm", "tarball_path") + + @staticmethod + def from_str(s): + return LockfilePackageMeta(*s.strip().split(" ")) + + def __init__(self, name, version, sky_id, integrity, integrity_algorithm): + self.name = name + self.version = version + self.sky_id = sky_id + self.integrity = integrity + self.integrity_algorithm = integrity_algorithm + self.tarball_path = "{}-{}.tgz".format(name, version) + + def to_str(self): + return " ".join([self.name, self.version, self.sky_id, self.integrity, self.integrity_algorithm]) + + +class LockfilePackageMetaInvalidError(RuntimeError): + pass + + +@add_metaclass(ABCMeta) +class BaseLockfile(object): + @classmethod + def load(cls, path): + """ + :param path: lockfile path + :type path: str + :rtype: BaseLockfile + """ + pj = cls(path) + pj.read() + + return pj + + def __init__(self, path): + if not os.path.isabs(path): + raise TypeError("Absolute path required, given: {}".format(path)) + + self.path = path + self.data = None + + @abstractmethod + def read(self): + pass + + @abstractmethod + def write(self, path=None): + pass + + @abstractmethod + def get_packages_meta(self): + pass + + @abstractmethod + def update_tarball_resolutions(self, fn): + pass diff --git a/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py new file mode 100644 index 0000000000..c835c4d7ca --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py @@ -0,0 +1,66 @@ +import os +import tarfile + +from io import BytesIO + +from .utils import build_nm_path + + +PEERS_DIR = ".peers" +PEERS_INDEX = "index" + + +def bundle_node_modules(build_root, peers, node_modules_path, bundle_path): + """ + Creates node_modules bundle. + Bundle contains node_modules directory, peers' node_modules directories, + and index file with the list of added peers (\\n delimited). + :param build_root: arcadia build root + :type build_root: str + :param peers: list of peers (arcadia root related) + :type peers: list of str + :param node_modules_path: node_modules path + :type node_modules_path: str + :param bundle_path: tarball path + :type bundle_path: str + """ + with tarfile.open(bundle_path, "w") as tf: + tf.add(node_modules_path, arcname=".") + + # Peers' node_modules. + added_peers = [] + for p in peers: + peer_nm_path = build_nm_path(os.path.join(build_root, p)) + peer_bundled_nm_path = build_nm_path(os.path.join(PEERS_DIR, p)) + if not os.path.isdir(peer_nm_path): + continue + tf.add(peer_nm_path, arcname=peer_bundled_nm_path) + added_peers.append(p) + + # Peers index. + peers_index = "\n".join(added_peers) + ti = tarfile.TarInfo(name=os.path.join(PEERS_DIR, PEERS_INDEX)) + ti.size = len(peers_index) + tf.addfile(ti, BytesIO(peers_index.encode())) + + +def extract_node_modules(build_root, node_modules_path, bundle_path): + """ + Extracts node_modules bundle. + :param build_root: arcadia build root + :type build_root: str + :param node_modules_path: node_modules path + :type node_modules_path: str + :param bundle_path: tarball path + :type bundle_path: str + """ + with tarfile.open(bundle_path) as tf: + tf.extractall(node_modules_path) + + peers = open(os.path.join(node_modules_path, PEERS_DIR, PEERS_INDEX)).read().split("\n") + for p in peers: + if not p: + continue + bundled_nm_path = build_nm_path(os.path.join(node_modules_path, PEERS_DIR, p)) + nm_path = build_nm_path(os.path.join(build_root, p)) + os.rename(bundled_nm_path, nm_path) diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py new file mode 100644 index 0000000000..d99b1e8254 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/package_json.py @@ -0,0 +1,198 @@ +import json +import logging +import os + +from six import iteritems + +from .utils import build_pj_path + +logger = logging.getLogger(__name__) + + +class PackageJsonWorkspaceError(RuntimeError): + pass + + +class PackageJson(object): + DEP_KEY = "dependencies" + DEV_DEP_KEY = "devDependencies" + PEER_DEP_KEY = "peerDependencies" + OPT_DEP_KEY = "optionalDependencies" + DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY) + + WORKSPACE_SCHEMA = "workspace:" + + @classmethod + def load(cls, path): + """ + :param path: package.json path + :type path: str + :rtype: PackageJson + """ + pj = cls(path) + pj.read() + + return pj + + def __init__(self, path): + if not os.path.isabs(path): + raise TypeError("Absolute path required, given: {}".format(path)) + + self.path = path + self.data = None + + def read(self): + with open(self.path) as f: + self.data = json.load(f) + + def write(self, path=None): + """ + :param path: path to store package.json, defaults to original path + :type path: str + """ + if path is None: + path = self.path + + directory = os.path.dirname(path) + if not os.path.exists(directory): + os.mkdir(directory) + + with open(path, "w") as f: + json.dump(self.data, f, indent=2, ensure_ascii=False) + f.write('\n') # it's better for diff algorithm in arc + logger.debug("Written {}".format(path)) + + def get_name(self): + name = self.data.get("name") + + if not name: + name = self.path.replace("/", "-") + + return name + + def get_version(self): + return self.data["version"] + + def get_description(self): + return self.data.get("description") + + def get_nodejs_version(self): + return self.data.get("engines", {}).get("node") + + def get_dep_specifier(self, dep_name): + for name, spec in self.dependencies_iter(): + if dep_name == name: + return spec + return None + + def dependencies_iter(self): + for key in self.DEP_KEYS: + deps = self.data.get(key) + if not deps: + continue + + for name, spec in iteritems(deps): + yield (name, spec) + + def has_dependencies(self): + first_dep = next(self.dependencies_iter(), None) + return first_dep is not None + + def bins_iter(self): + bins = self.data.get("bin") + if isinstance(bins, str): + yield bins + elif isinstance(bins, dict): + for bin in bins.values(): + yield bin + + def get_workspace_dep_spec_paths(self): + """ + Returns names and paths from specifiers of the defined workspace dependencies. + :rtype: list of (str, str) + """ + spec_paths = [] + schema = self.WORKSPACE_SCHEMA + schema_len = len(schema) + + for name, spec in self.dependencies_iter(): + if not spec.startswith(schema): + continue + + spec_path = spec[schema_len:] + if not (spec_path.startswith(".") or spec_path.startswith("..")): + raise PackageJsonWorkspaceError( + "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format( + spec, name, self.path + ) + ) + + spec_paths.append((name, spec_path)) + + return spec_paths + + def get_workspace_dep_paths(self, base_path=None): + """ + Returns paths of the defined workspace dependencies. + :param base_path: base path to resolve relative dep paths + :type base_path: str + :rtype: list of str + """ + if base_path is None: + base_path = os.path.dirname(self.path) + + return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()] + + def get_workspace_deps(self): + """ + :rtype: list of PackageJson + """ + ws_deps = [] + pj_dir = os.path.dirname(self.path) + + for name, rel_path in self.get_workspace_dep_spec_paths(): + dep_path = os.path.normpath(os.path.join(pj_dir, rel_path)) + dep_pj = PackageJson.load(build_pj_path(dep_path)) + + if name != dep_pj.get_name(): + raise PackageJsonWorkspaceError( + "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format( + name, dep_pj.get_name(), self.path + ) + ) + + ws_deps.append(dep_pj) + + return ws_deps + + def get_workspace_map(self, ignore_self=False): + """ + Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth. + :param ignore_self: whether path of the current module will be excluded + :type ignore_self: bool + :rtype: dict of (PackageJson, int) + """ + ws_deps = {} + # list of (pj, depth) + pj_queue = [(self, 0)] + + while len(pj_queue): + (pj, depth) = pj_queue.pop() + pj_dir = os.path.dirname(pj.path) + if pj_dir in ws_deps: + continue + + if not ignore_self or pj != self: + ws_deps[pj_dir] = (pj, depth) + + for dep_pj in pj.get_workspace_deps(): + pj_queue.append((dep_pj, depth + 1)) + + return ws_deps + + def get_dep_paths_by_names(self): + """ + Returns dict of {dependency_name: dependency_path} + """ + ws_map = self.get_workspace_map() + return {pj.get_name(): path for path, (pj, _) in ws_map.items()} diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py new file mode 100644 index 0000000000..d594d4ea92 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/package_manager.py @@ -0,0 +1,153 @@ +import os +import sys +import subprocess + +from abc import ABCMeta, abstractmethod +from six import add_metaclass + +from .constants import NPM_REGISTRY_URL +from .package_json import PackageJson +from .utils import build_nm_path, build_pj_path + + +class PackageManagerError(RuntimeError): + pass + + +class PackageManagerCommandError(PackageManagerError): + def __init__(self, cmd, code, stdout, stderr): + self.cmd = cmd + self.code = code + self.stdout = stdout + self.stderr = stderr + + msg = "package manager exited with code {} while running {}:\n{}\n{}".format(code, cmd, stdout, stderr) + super(PackageManagerCommandError, self).__init__(msg) + + +@add_metaclass(ABCMeta) +class BasePackageManager(object): + def __init__( + self, + build_root, + build_path, + sources_path, + nodejs_bin_path, + script_path, + contribs_path, + module_path=None, + sources_root=None, + ): + self.module_path = build_path[len(build_root) + 1 :] if module_path is None else module_path + self.build_path = build_path + self.sources_path = sources_path + self.build_root = build_root + self.sources_root = sources_path[: -len(self.module_path) - 1] if sources_root is None else sources_root + self.nodejs_bin_path = nodejs_bin_path + self.script_path = script_path + self.contribs_path = contribs_path + + @classmethod + def load_package_json(cls, path): + """ + :param path: path to package.json + :type path: str + :rtype: PackageJson + """ + return PackageJson.load(path) + + @classmethod + def load_package_json_from_dir(cls, dir_path): + """ + :param dir_path: path to directory with package.json + :type dir_path: str + :rtype: PackageJson + """ + return cls.load_package_json(build_pj_path(dir_path)) + + @classmethod + @abstractmethod + def load_lockfile(cls, path): + pass + + @classmethod + @abstractmethod + def load_lockfile_from_dir(cls, dir_path): + pass + + @abstractmethod + def create_node_modules(self): + pass + + @abstractmethod + def calc_node_modules_inouts(self): + pass + + @abstractmethod + def extract_packages_meta_from_lockfiles(self, lf_paths): + pass + + def get_local_peers_from_package_json(self): + """ + Returns paths of direct workspace dependencies (source root related). + :rtype: list of str + """ + return self.load_package_json_from_dir(self.sources_path).get_workspace_dep_paths(base_path=self.module_path) + + def get_peers_from_package_json(self): + """ + Returns paths of workspace dependencies (source root related). + :rtype: list of str + """ + pj = self.load_package_json_from_dir(self.sources_path) + prefix_len = len(self.sources_root) + 1 + + return [p[prefix_len:] for p in pj.get_workspace_map(ignore_self=True).keys()] + + def _exec_command(self, args, include_defaults=True): + if not self.nodejs_bin_path: + raise PackageManagerError("Unable to execute command: nodejs_bin_path is not configured") + + cmd = ( + [self.nodejs_bin_path, self.script_path] + args + (self._get_default_options() if include_defaults else []) + ) + p = subprocess.Popen( + cmd, + cwd=self.build_path, + stdin=None, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = p.communicate() + + if p.returncode != 0: + self._dump_debug_log() + + raise PackageManagerCommandError(cmd, p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8")) + + def _nm_path(self, *parts): + return os.path.join(build_nm_path(self.build_path), *parts) + + def _contrib_tarball_path(self, pkg): + return os.path.join(self.contribs_path, pkg.tarball_path) + + def _contrib_tarball_url(self, pkg): + return "file:" + self._contrib_tarball_path(pkg) + + def _get_default_options(self): + return ["--registry", NPM_REGISTRY_URL] + + def _get_debug_log_path(self): + return None + + def _dump_debug_log(self): + log_path = self._get_debug_log_path() + + if not log_path: + return + + try: + with open(log_path) as f: + sys.stderr.write("Package manager log {}:\n{}\n".format(log_path, f.read())) + except Exception: + sys.stderr.write("Failed to dump package manager log {}.\n".format(log_path)) diff --git a/build/plugins/lib/nots/package_manager/base/tests/package_json.py b/build/plugins/lib/nots/package_manager/base/tests/package_json.py new file mode 100644 index 0000000000..ccf7d4f607 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/tests/package_json.py @@ -0,0 +1,201 @@ +import os +import pytest + +from build.plugins.lib.nots.package_manager.base.package_json import PackageJson, PackageJsonWorkspaceError + + +def test_get_name_exist(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "name": "package-name", + } + + name = pj.get_name() + + assert name == "package-name" + + +def test_get_name_none(): + pj = PackageJson("/packages/foo/package.json") + pj.data = {} + + name = pj.get_name() + + assert name == "packages-foo" + + +def test_get_workspace_dep_spec_paths_ok(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:../bar", + }, + "devDependencies": { + "@yandex-int/baz": "workspace:../baz", + }, + } + + ws_dep_spec_paths = pj.get_workspace_dep_spec_paths() + + assert ws_dep_spec_paths == [ + ("@yandex-int/bar", "../bar"), + ("@yandex-int/baz", "../baz"), + ] + + +def test_get_workspace_dep_spec_paths_invalid_path(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:*", + }, + } + + with pytest.raises(PackageJsonWorkspaceError) as e: + pj.get_workspace_dep_spec_paths() + + assert ( + str(e.value) + == "Expected relative path specifier for workspace dependency, but got 'workspace:*' for @yandex-int/bar in /packages/foo/package.json" + ) + + +def test_get_workspace_dep_paths_ok(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:../bar", + }, + "devDependencies": { + "@yandex-int/baz": "workspace:../baz", + }, + } + + ws_dep_paths = pj.get_workspace_dep_paths() + + assert ws_dep_paths == [ + "/packages/bar", + "/packages/baz", + ] + + +def test_get_dep_specifier(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "jestify": "0.0.1", + "eslint": ">= 7.27.0", + }, + "devDependencies": { + "jest": "27.1.0", + "eslinting": "0.0.2", + }, + } + + jest_spec = pj.get_dep_specifier("jest") + assert jest_spec == "27.1.0", "Got unexpected jest specifier: {}".format(jest_spec) + + eslint_spec = pj.get_dep_specifier("eslint") + assert eslint_spec == ">= 7.27.0", "Got unexpected eslint specifier: {}".format(eslint_spec) + + +def test_get_workspace_dep_paths_with_custom_base_path(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:../bar", + }, + "devDependencies": { + "@yandex-int/baz": "workspace:../baz", + }, + } + + ws_dep_paths = pj.get_workspace_dep_paths(base_path="custom/dir") + + assert ws_dep_paths == [ + "custom/bar", + "custom/baz", + ] + + +def test_get_workspace_deps_ok(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:../bar", + }, + "devDependencies": { + "@yandex-int/baz": "workspace:../baz", + }, + } + + def load_mock(cls, path): + p = PackageJson(path) + p.data = { + "name": "@yandex-int/{}".format(os.path.basename(os.path.dirname(path))), + } + return p + + PackageJson.load = classmethod(load_mock) + + ws_deps = pj.get_workspace_deps() + + assert len(ws_deps) == 2 + assert ws_deps[0].path == "/packages/bar/package.json" + assert ws_deps[1].path == "/packages/baz/package.json" + + +def test_get_workspace_deps_with_wrong_name(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:../bar", + }, + } + + def load_mock(cls, path): + p = PackageJson(path) + p.data = { + "name": "@shouldbe/{}".format(os.path.basename(os.path.dirname(path))), + } + return p + + PackageJson.load = classmethod(load_mock) + + with pytest.raises(PackageJsonWorkspaceError) as e: + pj.get_workspace_deps() + + assert ( + str(e.value) + == "Workspace dependency name mismatch, found '@yandex-int/bar' instead of '@shouldbe/bar' in /packages/foo/package.json" + ) + + +def test_get_workspace_map_ok(): + pj = PackageJson("/packages/foo/package.json") + pj.data = { + "dependencies": { + "@yandex-int/bar": "workspace:../bar", + }, + } + + def load_mock(cls, path): + name = os.path.basename(os.path.dirname(path)) + p = PackageJson(path) + p.data = { + "name": "@yandex-int/{}".format(name), + "dependencies": ({"@yandex-int/qux": "workspace:../qux"} if name == "bar" else {}), + } + return p + + PackageJson.load = classmethod(load_mock) + + ws_map = pj.get_workspace_map() + + assert len(ws_map) == 3 + assert ws_map["/packages/foo"][0].path == "/packages/foo/package.json" + assert ws_map["/packages/foo"][1] == 0 + assert ws_map["/packages/bar"][0].path == "/packages/bar/package.json" + assert ws_map["/packages/bar"][1] == 1 + assert ws_map["/packages/qux"][0].path == "/packages/qux/package.json" + assert ws_map["/packages/qux"][1] == 2 diff --git a/build/plugins/lib/nots/package_manager/base/tests/utils.py b/build/plugins/lib/nots/package_manager/base/tests/utils.py new file mode 100644 index 0000000000..4287beec47 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/tests/utils.py @@ -0,0 +1,15 @@ +from build.plugins.lib.nots.package_manager.base import utils + + +def test_extract_package_name_from_path(): + happy_checklist = [ + ("@yandex-int/foo-bar-baz/some/path/inside/the/package", "@yandex-int/foo-bar-baz"), + ("@yandex-int/foo-bar-buzz", "@yandex-int/foo-bar-buzz"), + ("package-wo-scope", "package-wo-scope"), + ("p", "p"), + ("", ""), + ] + + for item in happy_checklist: + package_name = utils.extract_package_name_from_path(item[0]) + assert package_name == item[1] diff --git a/build/plugins/lib/nots/package_manager/base/tests/ya.make b/build/plugins/lib/nots/package_manager/base/tests/ya.make new file mode 100644 index 0000000000..1bece69c33 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/tests/ya.make @@ -0,0 +1,14 @@ +PY23_TEST() + +OWNER(g:frontend-build-platform) + +TEST_SRCS( + package_json.py + utils.py +) + +PEERDIR( + build/plugins/lib/nots/package_manager/base +) + +END() diff --git a/build/plugins/lib/nots/package_manager/base/utils.py b/build/plugins/lib/nots/package_manager/base/utils.py new file mode 100644 index 0000000000..017bf4ca41 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/utils.py @@ -0,0 +1,29 @@ +import os + +from .constants import PACKAGE_JSON_FILENAME, NODE_MODULES_DIRNAME, NODE_MODULES_BUNDLE_FILENAME + + +def s_rooted(p): + return os.path.join("$S", p) + + +def b_rooted(p): + return os.path.join("$B", p) + + +def build_pj_path(p): + return os.path.join(p, PACKAGE_JSON_FILENAME) + + +def build_nm_path(p): + return os.path.join(p, NODE_MODULES_DIRNAME) + + +def build_nm_bundle_path(p): + return os.path.join(p, NODE_MODULES_BUNDLE_FILENAME) + + +def extract_package_name_from_path(p): + # if we have scope prefix then we are using the first two tokens, otherwise - only the first one + parts = p.split("/", 2) + return "/".join(parts[:2]) if p.startswith("@") else parts[0] diff --git a/build/plugins/lib/nots/package_manager/base/ya.make b/build/plugins/lib/nots/package_manager/base/ya.make new file mode 100644 index 0000000000..4b7f22f05a --- /dev/null +++ b/build/plugins/lib/nots/package_manager/base/ya.make @@ -0,0 +1,23 @@ +PY23_LIBRARY() + +OWNER(g:frontend-build-platform) + +PY_SRCS( + __init__.py + constants.py + lockfile.py + node_modules_bundler.py + package_json.py + package_manager.py + utils.py +) + +PEERDIR( + contrib/python/six +) + +END() + +RECURSE_FOR_TESTS( + tests +) diff --git a/build/plugins/lib/nots/package_manager/pnpm/__init__.py b/build/plugins/lib/nots/package_manager/pnpm/__init__.py new file mode 100644 index 0000000000..b3a3c20c02 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/__init__.py @@ -0,0 +1,12 @@ +from . import constants +from .lockfile import PnpmLockfile +from .package_manager import PnpmPackageManager +from .workspace import PnpmWorkspace + + +__all__ = [ + "constants", + "PnpmLockfile", + "PnpmPackageManager", + "PnpmWorkspace", +] diff --git a/build/plugins/lib/nots/package_manager/pnpm/constants.py b/build/plugins/lib/nots/package_manager/pnpm/constants.py new file mode 100644 index 0000000000..e84a78c55e --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/constants.py @@ -0,0 +1,2 @@ +PNPM_WS_FILENAME = "pnpm-workspace.yaml" +PNPM_LOCKFILE_FILENAME = "pnpm-lock.yaml" diff --git a/build/plugins/lib/nots/package_manager/pnpm/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py new file mode 100644 index 0000000000..79c351b7fa --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py @@ -0,0 +1,164 @@ +import base64 +import binascii +import yaml +import os + +from six.moves.urllib import parse as urlparse +from six import iteritems + +from ..base import PackageJson, BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError + + +class PnpmLockfile(BaseLockfile): + IMPORTER_KEYS = PackageJson.DEP_KEYS + ("specifiers",) + + def read(self): + with open(self.path, "r") as f: + self.data = yaml.load(f, Loader=yaml.CSafeLoader) + + def write(self, path=None): + """ + :param path: path to store lockfile, defaults to original path + :type path: str + """ + if path is None: + path = self.path + + with open(path, "w") as f: + yaml.dump(self.data, f, Dumper=yaml.CSafeDumper) + + def get_packages_meta(self): + """ + Extracts packages meta from lockfile. + :rtype: list of LockfilePackageMeta + """ + packages = self.data.get("packages", {}) + + return map(lambda x: _parse_package_meta(*x), iteritems(packages)) + + def update_tarball_resolutions(self, fn): + """ + :param fn: maps `LockfilePackageMeta` instance to new `resolution.tarball` value + :type fn: lambda + """ + packages = self.data.get("packages", {}) + + for key, meta in iteritems(packages): + meta["resolution"]["tarball"] = fn(_parse_package_meta(key, meta)) + packages[key] = meta + + def get_importers(self): + """ + Returns "importers" section from the lockfile or creates similar structure from "dependencies" and "specifiers". + :rtype: dict of dict of dict of str + """ + importers = self.data.get("importers") + if importers is not None: + return importers + + importer = {k: self.data[k] for k in self.IMPORTER_KEYS if k in self.data} + + return {".": importer} if importer else {} + + def merge(self, lf): + """ + Merges two lockfiles: + 1. Converts the lockfile to monorepo-like lockfile with "importers" section instead of "dependencies" and "specifiers". + 2. Merges `lf`'s dependencies and specifiers to importers. + 3. Merges `lf`'s packages to the lockfile. + :param lf: lockfile to merge + :type lf: PnpmLockfile + """ + importers = self.get_importers() + build_path = os.path.dirname(self.path) + + for [importer, imports] in iteritems(lf.get_importers()): + importer_path = os.path.normpath(os.path.join(os.path.dirname(lf.path), importer)) + importer_rel_path = os.path.relpath(importer_path, build_path) + importers[importer_rel_path] = imports + + self.data["importers"] = importers + + for k in self.IMPORTER_KEYS: + self.data.pop(k, None) + + packages = self.data.get("packages", {}) + for k, v in iteritems(lf.data.get("packages", {})): + if k not in packages: + packages[k] = v + self.data["packages"] = packages + + +def _parse_package_meta(key, meta): + """ + :param key: uniq package key from lockfile + :type key: string + :param meta: package meta dict from lockfile + :type meta: dict + :rtype: LockfilePackageMetaInvalidError + """ + try: + name, version = _parse_package_key(key) + sky_id = _parse_sky_id_from_tarball_url(meta["resolution"]["tarball"]) + integrity_algorithm, integrity = _parse_package_integrity(meta["resolution"]["integrity"]) + except KeyError as e: + raise TypeError("Invalid package meta for key {}, missing {} key".format(key, e)) + except LockfilePackageMetaInvalidError as e: + raise TypeError("Invalid package meta for key {}, parse error: {}".format(key, e)) + + return LockfilePackageMeta(name, version, sky_id, integrity, integrity_algorithm) + + +def _parse_package_key(key): + """ + Returns tuple of scoped package name and version. + :param key: package key in format "/({scope}/)?{package_name}/{package_version}(_{peer_dependencies})?" + :type key: string + :rtype: (str, str) + """ + try: + tokens = key.split("/")[1:] + version = tokens.pop().split("_", 1)[0] + + if len(tokens) < 1 or len(tokens) > 2: + raise TypeError() + except (IndexError, TypeError): + raise LockfilePackageMetaInvalidError("Invalid package key") + + return ("/".join(tokens), version) + + +def _parse_sky_id_from_tarball_url(tarball_url): + """ + :param tarball_url: tarball url + :type tarball_url: string + :rtype: string + """ + if tarball_url.startswith("file:"): + return "" + + rbtorrent_param = urlparse.parse_qs(urlparse.urlparse(tarball_url).query).get("rbtorrent") + + if rbtorrent_param is None: + raise LockfilePackageMetaInvalidError("Missing rbtorrent param in tarball url {}".format(tarball_url)) + + return "rbtorrent:{}".format(rbtorrent_param[0]) + + +def _parse_package_integrity(integrity): + """ + Returns tuple of algorithm and hash (hex). + :param integrity: package integrity in format "{algo}-{base64_of_hash}" + :type integrity: string + :rtype: (str, str) + """ + algo, hash_b64 = integrity.split("-", 1) + + try: + hash_hex = binascii.hexlify(base64.b64decode(hash_b64)) + except TypeError as e: + raise LockfilePackageMetaInvalidError( + "Invalid package integrity encoding, integrity: {}, error: {}".format(integrity, e) + ) + + return (algo, hash_hex) diff --git a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py new file mode 100644 index 0000000000..3960f6498c --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py @@ -0,0 +1,213 @@ +import os +import yaml + +from six import iteritems + +from ..base import BasePackageManager, PackageManagerError +from ..base.utils import build_pj_path, build_nm_path, build_nm_bundle_path, s_rooted, b_rooted +from ..base.node_modules_bundler import bundle_node_modules +from ..base.constants import NODE_MODULES_BUNDLE_FILENAME +from .lockfile import PnpmLockfile +from .workspace import PnpmWorkspace +from .utils import build_lockfile_path, build_ws_config_path + + +class PnpmPackageManager(BasePackageManager): + _STORE_NM_PATH = os.path.join(".pnpm", "store") + _VSTORE_NM_PATH = os.path.join(".pnpm", "virtual-store") + _STORE_VER = "v3" + + @classmethod + def load_lockfile(cls, path): + """ + :param path: path to lockfile + :type path: str + :rtype: PnpmLockfile + """ + return PnpmLockfile.load(path) + + @classmethod + def load_lockfile_from_dir(cls, dir_path): + """ + :param dir_path: path to directory with lockfile + :type dir_path: str + :rtype: PnpmLockfile + """ + return cls.load_lockfile(build_lockfile_path(dir_path)) + + def create_node_modules(self): + """ + Creates node_modules directory according to the lockfile. + """ + ws = self._prepare_workspace() + self._exec_command( + [ + "install", + "--offline", + "--frozen-lockfile", + "--store-dir", + self._nm_path(self._STORE_NM_PATH), + "--virtual-store-dir", + self._nm_path(self._VSTORE_NM_PATH), + "--no-verify-store-integrity", + "--package-import-method", + "hardlink", + "--ignore-pnpmfile", + "--ignore-scripts", + "--strict-peer-dependencies", + ] + ) + self._fix_stores_in_modules_yaml() + + bundle_node_modules( + build_root=self.build_root, + node_modules_path=self._nm_path(), + peers=ws.get_paths(base_path=self.module_path, ignore_self=True), + bundle_path=NODE_MODULES_BUNDLE_FILENAME, + ) + + def calc_node_modules_inouts(self): + """ + Returns input and output paths for command that creates `node_modules` bundle. + Inputs: + - source package.json and lockfile, + - built package.jsons of all deps, + - merged lockfiles and workspace configs of direct non-leave deps, + - tarballs. + Outputs: + - merged lockfile, + - generated workspace config, + - created node_modules bundle. + :rtype: (list of str, list of str) + """ + ins = [ + s_rooted(build_pj_path(self.module_path)), + s_rooted(build_lockfile_path(self.module_path)), + ] + outs = [ + b_rooted(build_lockfile_path(self.module_path)), + b_rooted(build_ws_config_path(self.module_path)), + b_rooted(build_nm_bundle_path(self.module_path)), + ] + + # Source lockfiles are used only to get tarballs info. + src_lf_paths = [build_lockfile_path(self.sources_path)] + pj = self.load_package_json_from_dir(self.sources_path) + + for [dep_src_path, (_, depth)] in iteritems(pj.get_workspace_map(ignore_self=True)): + dep_mod_path = dep_src_path[len(self.sources_root) + 1 :] + # pnpm requires all package.jsons. + ins.append(b_rooted(build_pj_path(dep_mod_path))) + + dep_lf_src_path = build_lockfile_path(dep_src_path) + if not os.path.isfile(dep_lf_src_path): + # It is ok for leaves. + continue + src_lf_paths.append(dep_lf_src_path) + + if depth == 1: + ins.append(b_rooted(build_ws_config_path(dep_mod_path))) + ins.append(b_rooted(build_lockfile_path(dep_mod_path))) + + for pkg in self.extract_packages_meta_from_lockfiles(src_lf_paths): + ins.append(b_rooted(self._contrib_tarball_path(pkg))) + + return (ins, outs) + + def extract_packages_meta_from_lockfiles(self, lf_paths): + """ + :type lf_paths: iterable of BaseLockfile + :rtype: iterable of LockfilePackageMeta + """ + tarballs = set() + + for lf_path in lf_paths: + try: + for pkg in self.load_lockfile(lf_path).get_packages_meta(): + if pkg.tarball_path not in tarballs: + tarballs.add(pkg.tarball_path) + yield pkg + except Exception as e: + raise PackageManagerError("Unable to process lockfile {}: {}".format(lf_path, e)) + + def _prepare_workspace(self): + """ + :rtype: PnpmWorkspace + """ + pj = self._build_package_json() + ws = PnpmWorkspace(build_ws_config_path(self.build_path)) + ws.set_from_package_json(pj) + dep_paths = ws.get_paths(ignore_self=True) + self._build_merged_workspace_config(ws, dep_paths) + self._build_merged_lockfile(dep_paths) + + return ws + + def _build_package_json(self): + """ + :rtype: PackageJson + """ + pj = self.load_package_json_from_dir(self.sources_path) + + if not os.path.exists(self.build_path): + os.makedirs(self.build_path, exist_ok=True) + + pj.path = build_pj_path(self.build_path) + pj.write() + + return pj + + def _build_merged_lockfile(self, dep_paths): + """ + :type dep_paths: list of str + :rtype: PnpmLockfile + """ + lf = self.load_lockfile_from_dir(self.sources_path) + # Change to the output path for correct path calcs on merging. + lf.path = build_lockfile_path(self.build_path) + + for dep_path in dep_paths: + lf_path = build_lockfile_path(dep_path) + if os.path.isfile(lf_path): + lf.merge(self.load_lockfile(lf_path)) + + lf.update_tarball_resolutions(lambda p: self._contrib_tarball_url(p)) + lf.write() + + def _build_merged_workspace_config(self, ws, dep_paths): + """ + NOTE: This method mutates `ws`. + :type ws: PnpmWorkspaceConfig + :type dep_paths: list of str + """ + for dep_path in dep_paths: + ws_config_path = build_ws_config_path(dep_path) + if os.path.isfile(ws_config_path): + ws.merge(PnpmWorkspace.load(ws_config_path)) + + ws.write() + + def _fix_stores_in_modules_yaml(self): + """ + Ensures that store paths are the same as would be after installing deps in the source dir. + This is required to reuse `node_modules` after build. + """ + with open(self._nm_path(".modules.yaml"), "r+") as f: + data = yaml.load(f, Loader=yaml.CSafeLoader) + # NOTE: pnpm requires absolute store path here. + data["storeDir"] = os.path.join(build_nm_path(self.sources_path), self._STORE_NM_PATH, self._STORE_VER) + data["virtualStoreDir"] = self._VSTORE_NM_PATH + f.seek(0) + yaml.dump(data, f, Dumper=yaml.CSafeDumper) + f.truncate() + + def _get_default_options(self): + return super(PnpmPackageManager, self)._get_default_options() + [ + "--stream", + "--reporter", + "append-only", + "--no-color", + ] + + def _get_debug_log_path(self): + return self._nm_path(".pnpm-debug.log") diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py new file mode 100644 index 0000000000..5985f0261e --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py @@ -0,0 +1,326 @@ +import pytest + +from build.plugins.lib.nots.package_manager.pnpm.lockfile import PnpmLockfile + + +def test_lockfile_get_packages_meta_ok(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "/@babel/cli/7.6.2_@babel+core@7.6.2": { + "resolution": { + "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==", + "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187", + }, + }, + }, + } + + packages = list(lf.get_packages_meta()) + pkg = packages[0] + + assert len(packages) == 1 + assert pkg.name == "@babel/cli" + assert pkg.version == "7.6.2" + assert pkg.sky_id == "rbtorrent:cb1849da3e4947e56a8f6bde6a1ec42703ddd187" + assert ( + pkg.integrity + == b"24367e4ff6ebf693df4f696600c272a490d34d31ccf5e3c3fc40f5d13463473255744572f89077891961cd8993b796243601efc561a55159cbb5dbfaaee883ad" + ) + assert pkg.integrity_algorithm == "sha512" + + +def test_lockfile_get_packages_empty(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = {} + + assert len(list(lf.get_packages_meta())) == 0 + + +def test_package_meta_invalid_key(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "in/valid": {}, + }, + } + + with pytest.raises(TypeError) as e: + list(lf.get_packages_meta()) + + assert str(e.value) == "Invalid package meta for key in/valid, parse error: Invalid package key" + + +def test_package_meta_missing_resolution(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "/valid/1.2.3": {}, + }, + } + + with pytest.raises(TypeError) as e: + list(lf.get_packages_meta()) + + assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'resolution' key" + + +def test_package_meta_missing_tarball(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "/valid/1.2.3": { + "resolution": {}, + }, + }, + } + + with pytest.raises(TypeError) as e: + list(lf.get_packages_meta()) + + assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'tarball' key" + + +def test_package_meta_missing_rbtorrent(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "/valid/1.2.3": { + "resolution": { + "tarball": "valid-1.2.3.tgz", + }, + }, + }, + } + + with pytest.raises(TypeError) as e: + list(lf.get_packages_meta()) + + assert ( + str(e.value) + == "Invalid package meta for key /valid/1.2.3, parse error: Missing rbtorrent param in tarball url valid-1.2.3.tgz" + ) + + +def test_lockfile_meta_file_tarball(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "/@babel/cli/7.6.2": { + "resolution": { + "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==", + "tarball": "file:/some/abs/path.tgz", + }, + }, + }, + } + + packages = list(lf.get_packages_meta()) + pkg = packages[0] + + assert len(packages) == 1 + assert pkg.name == "@babel/cli" + assert pkg.version == "7.6.2" + assert pkg.sky_id == "" + + +def test_lockfile_update_tarball_resolutions_ok(): + lf = PnpmLockfile(path="/pnpm-lock.yaml") + lf.data = { + "packages": { + "/@babel/cli/7.6.2_@babel+core@7.6.2": { + "resolution": { + "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==", + "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187", + }, + }, + }, + } + + lf.update_tarball_resolutions(lambda p: p.name) + + assert lf.data["packages"]["/@babel/cli/7.6.2_@babel+core@7.6.2"]["resolution"]["tarball"] == "@babel/cli" + + +def test_lockfile_merge(): + lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml") + lf1.data = { + "dependencies": { + "a": "1.0.0", + }, + "specifiers": { + "a": "1.0.0", + }, + "packages": { + "/a/1.0.0": {}, + }, + } + + lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml") + lf2.data = { + "dependencies": { + "b": "1.0.0", + }, + "specifiers": { + "b": "1.0.0", + }, + "packages": { + "/b/1.0.0": {}, + }, + } + + lf3 = PnpmLockfile(path="/another/baz/pnpm-lock.yaml") + lf3.data = { + "importers": { + ".": { + "dependencies": { + "@a/qux": "link:../qux", + "a": "1.0.0", + }, + "specifiers": { + "@a/qux": "workspace:../qux", + "a": "1.0.0", + }, + }, + "../qux": { + "dependencies": { + "b": "1.0.1", + }, + "specifiers": { + "b": "1.0.1", + }, + }, + }, + "packages": { + "/a/1.0.0": {}, + "/b/1.0.1": {}, + }, + } + + lf4 = PnpmLockfile(path="/another/quux/pnpm-lock.yaml") + lf4.data = { + "dependencies": { + "@a/bar": "link:../../bar", + }, + "specifiers": { + "@a/bar": "workspace:../../bar", + }, + } + + lf1.merge(lf2) + lf1.merge(lf3) + lf1.merge(lf4) + + assert lf1.data == { + "importers": { + ".": { + "dependencies": { + "a": "1.0.0", + }, + "specifiers": { + "a": "1.0.0", + }, + }, + "../bar": { + "dependencies": { + "b": "1.0.0", + }, + "specifiers": { + "b": "1.0.0", + }, + }, + "../another/baz": { + "dependencies": { + "@a/qux": "link:../qux", + "a": "1.0.0", + }, + "specifiers": { + "@a/qux": "workspace:../qux", + "a": "1.0.0", + }, + }, + "../another/qux": { + "dependencies": { + "b": "1.0.1", + }, + "specifiers": { + "b": "1.0.1", + }, + }, + "../another/quux": { + "dependencies": { + "@a/bar": "link:../../bar", + }, + "specifiers": { + "@a/bar": "workspace:../../bar", + }, + }, + }, + "packages": { + "/a/1.0.0": {}, + "/b/1.0.0": {}, + "/b/1.0.1": {}, + }, + } + + +def test_lockfile_merge_dont_overrides_packages(): + lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml") + lf1.data = { + "dependencies": { + "a": "1.0.0", + }, + "specifiers": { + "a": "1.0.0", + }, + "packages": { + "/a/1.0.0": {}, + }, + } + + lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml") + lf2.data = { + "dependencies": { + "a": "1.0.0", + "b": "1.0.0", + }, + "specifiers": { + "a": "1.0.0", + "b": "1.0.0", + }, + "packages": { + "/a/1.0.0": { + "overriden": True, + }, + "/b/1.0.0": {}, + }, + } + + lf1.merge(lf2) + + assert lf1.data == { + "importers": { + ".": { + "dependencies": { + "a": "1.0.0", + }, + "specifiers": { + "a": "1.0.0", + }, + }, + "../bar": { + "dependencies": { + "a": "1.0.0", + "b": "1.0.0", + }, + "specifiers": { + "a": "1.0.0", + "b": "1.0.0", + }, + }, + }, + "packages": { + "/a/1.0.0": {}, + "/b/1.0.0": {}, + }, + } diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py new file mode 100644 index 0000000000..ffc010de88 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py @@ -0,0 +1,68 @@ +from build.plugins.lib.nots.package_manager.base import PackageJson +from build.plugins.lib.nots.package_manager.pnpm.workspace import PnpmWorkspace + + +def test_workspace_get_paths(): + ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml") + ws.packages = set([".", "../bar", "../../another/baz"]) + + assert sorted(ws.get_paths()) == [ + "/another/baz", + "/packages/bar", + "/packages/foo", + ] + + +def test_workspace_get_paths_with_custom_base_path_without_self(): + ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml") + ws.packages = set([".", "../bar", "../../another/baz"]) + + assert sorted(ws.get_paths(base_path="some/custom/dir", ignore_self=True)) == [ + "some/another/baz", + "some/custom/bar", + ] + + +def test_workspace_set_from_package_json(): + ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml") + pj = PackageJson(path="/packages/foo/package.json") + pj.data = { + "dependencies": { + "@a/bar": "workspace:../bar", + }, + "devDependencies": { + "@a/baz": "workspace:../../another/baz", + }, + "peerDependencies": { + "@a/qux": "workspace:../../another/qux", + }, + "optionalDependencies": { + "@a/quux": "workspace:../../another/quux", + }, + } + + ws.set_from_package_json(pj) + + assert sorted(ws.get_paths()) == [ + "/another/baz", + "/another/quux", + "/another/qux", + "/packages/bar", + "/packages/foo", + ] + + +def test_workspace_merge(): + ws1 = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml") + ws1.packages = set([".", "../bar", "../../another/baz"]) + ws2 = PnpmWorkspace(path="/another/baz/pnpm-workspace.yaml") + ws2.packages = set([".", "../qux"]) + + ws1.merge(ws2) + + assert sorted(ws1.get_paths()) == [ + "/another/baz", + "/another/qux", + "/packages/bar", + "/packages/foo", + ] diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make new file mode 100644 index 0000000000..44877dfc1b --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make @@ -0,0 +1,15 @@ +PY23_TEST() + +OWNER(g:frontend-build-platform) + +TEST_SRCS( + lockfile.py + workspace.py +) + +PEERDIR( + build/plugins/lib/nots/package_manager/base + build/plugins/lib/nots/package_manager/pnpm +) + +END() diff --git a/build/plugins/lib/nots/package_manager/pnpm/utils.py b/build/plugins/lib/nots/package_manager/pnpm/utils.py new file mode 100644 index 0000000000..1fa4291b9d --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/utils.py @@ -0,0 +1,11 @@ +import os + +from .constants import PNPM_LOCKFILE_FILENAME, PNPM_WS_FILENAME + + +def build_lockfile_path(p): + return os.path.join(p, PNPM_LOCKFILE_FILENAME) + + +def build_ws_config_path(p): + return os.path.join(p, PNPM_WS_FILENAME) diff --git a/build/plugins/lib/nots/package_manager/pnpm/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/workspace.py new file mode 100644 index 0000000000..e596e20a18 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/workspace.py @@ -0,0 +1,81 @@ +import os +import yaml + + +class PnpmWorkspace(object): + @classmethod + def load(cls, path): + ws = cls(path) + ws.read() + + return ws + + def __init__(self, path): + if not os.path.isabs(path): + raise TypeError("Absolute path required, given: {}".format(path)) + + self.path = path + # NOTE: pnpm requires relative workspace paths. + self.packages = set() + + def read(self): + with open(self.path) as f: + self.packages = set(yaml.load(f, Loader=yaml.CSafeLoader).get("packages", [])) + + def write(self, path=None): + if not path: + path = self.path + + with open(path, "w") as f: + data = { + "packages": list(self.packages), + } + yaml.dump(data, f, Dumper=yaml.CSafeDumper) + + def get_paths(self, base_path=None, ignore_self=False): + """ + Returns absolute paths of the workspace packages. + :param base_path: base path to resolve relative dep paths + :type base_path: str + :param ignore_self: whether path of the current module will be excluded (if present) + :type ignore_self: bool + :rtype: list of str + """ + if base_path is None: + base_path = os.path.dirname(self.path) + + return [ + os.path.normpath(os.path.join(base_path, pkg_path)) + for pkg_path in self.packages + if not ignore_self or pkg_path != "." + ] + + def set_from_package_json(self, package_json): + """ + Sets packages to "workspace" deps from given package.json. + :param package_json: package.json of workspace + :type package_json: PackageJson + """ + if os.path.dirname(package_json.path) != os.path.dirname(self.path): + raise TypeError( + "package.json should be in workspace directory {}, given: {}".format( + os.path.dirname(self.path), package_json.path + ) + ) + + self.packages = set(path for _, path in package_json.get_workspace_dep_spec_paths()) + # Add relative path to self. + self.packages.add(".") + + def merge(self, ws): + """ + Adds `ws`'s packages to the workspace. + :param ws: workspace to merge + :type ws: PnpmWorkspace + """ + dir_path = os.path.dirname(self.path) + ws_dir_path = os.path.dirname(ws.path) + + for p_rel_path in ws.packages: + p_path = os.path.normpath(os.path.join(ws_dir_path, p_rel_path)) + self.packages.add(os.path.relpath(p_path, dir_path)) diff --git a/build/plugins/lib/nots/package_manager/pnpm/ya.make b/build/plugins/lib/nots/package_manager/pnpm/ya.make new file mode 100644 index 0000000000..f57ae4a2ba --- /dev/null +++ b/build/plugins/lib/nots/package_manager/pnpm/ya.make @@ -0,0 +1,24 @@ +PY23_LIBRARY() + +OWNER(g:frontend-build-platform) + +PY_SRCS( + __init__.py + constants.py + lockfile.py + package_manager.py + workspace.py + utils.py +) + +PEERDIR( + build/plugins/lib/nots/package_manager/base + contrib/python/PyYAML + contrib/python/six +) + +END() + +RECURSE_FOR_TESTS( + tests +) diff --git a/build/plugins/lib/nots/package_manager/ya.make b/build/plugins/lib/nots/package_manager/ya.make new file mode 100644 index 0000000000..3ac1ea9103 --- /dev/null +++ b/build/plugins/lib/nots/package_manager/ya.make @@ -0,0 +1,14 @@ +PY23_LIBRARY() + +OWNER(g:frontend-build-platform) + +PY_SRCS( + __init__.py +) + +PEERDIR( + build/plugins/lib/nots/package_manager/base + build/plugins/lib/nots/package_manager/pnpm +) + +END() diff --git a/build/plugins/lib/nots/semver/__init__.py b/build/plugins/lib/nots/semver/__init__.py new file mode 100644 index 0000000000..be4319f9f3 --- /dev/null +++ b/build/plugins/lib/nots/semver/__init__.py @@ -0,0 +1,7 @@ +from .semver import Version, Operator, VersionRange + +__all__ = [ + "Version", + "Operator", + "VersionRange", +] diff --git a/build/plugins/lib/nots/semver/semver.py b/build/plugins/lib/nots/semver/semver.py new file mode 100644 index 0000000000..1398da8586 --- /dev/null +++ b/build/plugins/lib/nots/semver/semver.py @@ -0,0 +1,244 @@ +import re + + +class Version: + """ + This class is intended to provide utility methods to work with semver ranges. + Right now it is limited to the simplest case: a ">=" operator followed by an exact version with no prerelease or build specification. + Example: ">= 1.2.3" + """ + + @classmethod + def from_str(cls, input): + """ + :param str input: save exact formatted version e.g. 1.2.3 + :rtype: Version + :raises: ValueError + """ + parts = input.strip().split(".", 2) + major = int(parts[0]) + minor = int(parts[1]) + patch = int(parts[2]) + + return cls(major, minor, patch) + + STABLE_VERSION_RE = re.compile(r'^\d+\.\d+\.\d+$') + + @classmethod + def is_stable(cls, v): + """ + Verifies that the version is in a supported format. + + :param v:string with the version + :return: bool + """ + return cls.STABLE_VERSION_RE.match(v) is not None + + @classmethod + def cmp(cls, a, b): + """ + Compare two versions. Should be used with "cmp_to_key" wrapper in sorted(), min(), max()... + + For example: + sorted(["1.2.3", "2.4.2", "1.2.7"], key=cmp_to_key(Version.cmp)) + + :param a:string with version or Version instance + :param b:string with version or Version instance + :return: int + :raises: ValueError + """ + a_version = a if isinstance(a, cls) else cls.from_str(a) + b_version = b if isinstance(b, cls) else cls.from_str(b) + + if a_version > b_version: + return 1 + elif a_version < b_version: + return -1 + else: + return 0 + + __slots__ = "_values" + + def __init__(self, major, minor, patch): + """ + :param int major + :param int minor + :param int patch + :raises ValueError + """ + version_parts = { + "major": major, + "minor": minor, + "patch": patch, + } + + for name, value in version_parts.items(): + value = int(value) + version_parts[name] = value + if value < 0: + raise ValueError("{!r} is negative. A version can only be positive.".format(name)) + + self._values = (version_parts["major"], version_parts["minor"], version_parts["patch"]) + + def __str__(self): + return "{}.{}.{}".format(self._values[0], self._values[1], self._values[2]) + + def __repr__(self): + return '<Version({})>'.format(self) + + def __eq__(self, other): + """ + :param Version|str other + :rtype: bool + """ + if isinstance(other, str): + if self.is_stable(other): + other = self.from_str(other) + else: + return False + + return self.as_tuple() == other.as_tuple() + + def __ne__(self, other): + return not self == other + + def __gt__(self, other): + """ + :param Version other + :rtype: bool + """ + return self.as_tuple() > other.as_tuple() + + def __ge__(self, other): + """ + :param Version other + :rtype: bool + """ + return self.as_tuple() >= other.as_tuple() + + def __lt__(self, other): + """ + :param Version other + :rtype: bool + """ + return self.as_tuple() < other.as_tuple() + + def __le__(self, other): + """ + :param Version other + :rtype: bool + """ + return self.as_tuple() <= other.as_tuple() + + @property + def major(self): + """The major part of the version (read-only).""" + return self._values[0] + + @major.setter + def major(self, value): + raise AttributeError("Attribute 'major' is readonly") + + @property + def minor(self): + """The minor part of the version (read-only).""" + return self._values[1] + + @minor.setter + def minor(self, value): + raise AttributeError("Attribute 'minor' is readonly") + + @property + def patch(self): + """The patch part of the version (read-only).""" + return self._values[2] + + @patch.setter + def patch(self, value): + raise AttributeError("Attribute 'patch' is readonly") + + def as_tuple(self): + """ + :rtype: tuple + """ + return self._values + + +class Operator: + EQ = "=" + GT = ">" + GE = ">=" + LT = "<" + LE = "<=" + + +class VersionRange: + @classmethod + def operator_is_ok(self, operator): + return [Operator.GE, Operator.EQ, None].count(operator) + + @classmethod + def from_str(cls, input): + """ + :param str input + :rtype: VersionRange + :raises: ValueError + """ + m = re.match(r"^\s*([<>=]+)?\s*(\d+\.\d+\.\d+)\s*$", input) + res = m.groups() if m else None + if not res or not cls.operator_is_ok(res[0]): + raise ValueError( + "Unsupported version range: '{}'. Currently we only support ranges with stable versions and GE / EQ: '>= 1.2.3' / '= 1.2.3' / '1.2.3'".format( + input + ) + ) + + version = Version.from_str(res[1]) + + return cls(res[0], version) + + __slots__ = ("_operator", "_version") + + def __init__(self, operator, version): + """ + :param str operator + :raises: ValueError + """ + if not self.operator_is_ok(operator): + raise ValueError("Unsupported range operator '{}'".format(operator)) + + # None defaults to Operator.EQ + self._operator = operator or Operator.EQ + self._version = version + + @property + def operator(self): + """The comparison operator to be used (read-only).""" + return self._operator + + @operator.setter + def operator(self, value): + raise AttributeError("Attribute 'operator' is readonly") + + @property + def version(self): + """Version to be used with the operator (read-only).""" + return self._version + + @version.setter + def version(self, value): + raise AttributeError("Attribute 'version' is readonly") + + def is_satisfied_by(self, version): + """ + :param Version version + :rtype: bool + :raises: ValueError + """ + if self._operator == Operator.GE: + return version >= self._version + + if self._operator == Operator.EQ: + return version == self._version + + raise ValueError("Unsupported operator '{}'".format(self._operator)) diff --git a/build/plugins/lib/nots/semver/tests/test_version.py b/build/plugins/lib/nots/semver/tests/test_version.py new file mode 100644 index 0000000000..e6c0e44225 --- /dev/null +++ b/build/plugins/lib/nots/semver/tests/test_version.py @@ -0,0 +1,269 @@ +from functools import cmp_to_key + +from build.plugins.lib.nots.semver import Version + + +def test_from_str(): + # arrange + version_str = "1.2.3" + + # act + version = Version.from_str(version_str) + + # assert + assert version.major == 1 + assert version.minor == 2 + assert version.patch == 3 + + +def test_from_str_bad_version(): + # arrange + version_str = "best version imaginable" + error = None + + # act + try: + Version.from_str(version_str) + except Exception as exception: + error = exception + + # assert + assert error is not None + + +def test_is_stable_true(): + # arrange + version_str = "1.2.3" + + # act + assert + assert Version.is_stable(version_str) + + +def test_is_stable_false(): + # arrange + version_str = "1.2.3-beta1" + + # act + assert + assert not Version.is_stable(version_str) + + +def test_is_stable_incorrect(): + # arrange + version_str = "v1.2.3" + + # act + assert + assert not Version.is_stable(version_str) + + +def test_cmp_lt(): + # arrange + a = Version.from_str("1.2.3") + b = Version.from_str("1.2.5") + + # act + assert + assert Version.cmp(a, b) == -1 + + +def test_cmp_gt(): + # arrange + a = Version.from_str("1.2.3") + b = Version.from_str("1.2.2") + + # act + assert + assert Version.cmp(a, b) == 1 + + +def test_cmp_eq(): + # arrange + a = Version.from_str("1.2.3") + b = Version.from_str("1.2.3") + + # act + assert + assert Version.cmp(a, b) == 0 + + +def test_cmp_lt_str(): + # arrange + a = "1.2.3" + b = "1.2.5" + + # act + assert + assert Version.cmp(a, b) == -1 + + +def test_cmp_gt_str(): + # arrange + a = "1.2.3" + b = "1.2.2" + + # act + assert + assert Version.cmp(a, b) == 1 + + +def test_cmp_eq_str(): + # arrange + a = "1.2.3" + b = "1.2.3" + + # act + assert + assert Version.cmp(a, b) == 0 + + +def test_cmp_usage_in_sorted_asc(): + # arrange + unsorted = ["1.2.3", "2.4.2", "1.2.7"] + + # act + assert + assert sorted(unsorted, key=cmp_to_key(Version.cmp)) == ["1.2.3", "1.2.7", "2.4.2"] + + +def test_cmp_usage_in_sorted_desc(): + # arrange + unsorted = ["1.2.3", "2.4.2", "1.2.7"] + + # act + assert + assert sorted(unsorted, key=cmp_to_key(Version.cmp), reverse=True) == ["2.4.2", "1.2.7", "1.2.3"] + + +def test_init_negative_numbers(): + # arrange + major = 1 + minor = -2 + patch = 3 + + error = None + + # act + try: + Version(major, minor, patch) + except Exception as exception: + error = exception + + # assert + assert isinstance(error, ValueError) + assert str(error) == "'minor' is negative. A version can only be positive." + + +def test_eq(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("1.2.3") + + # act + assert + assert version_a == version_b + + +def test_eq_negative(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("3.2.1") + + # act + assert + assert not version_a == version_b + + +def test_eq_with_str(): + # arrange + version = Version.from_str("1.2.3") + + # act + assert + assert version == "1.2.3" + assert not version == "1.2.4" + + +def test_eq_with_invalid_str(): + # arrange + version = Version.from_str("1.2.3") + + # act + assert + assert not version == "bla-bla" + assert not version == "1.2.3-beta" + + +def test_ne(): + # arrange + version_a = Version.from_str("3.2.1") + version_b = Version.from_str("1.2.3") + + # act + assert + assert version_a != version_b + + +def test_ne_negative(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("1.2.3") + + # act + assert + assert not version_a != version_b + + +def test_ne_with_str(): + # arrange + version = Version.from_str("1.2.3") + + # act + assert + assert version != "1.2.4" + assert not version != "1.2.3" + + +def test_gt(): + # arrange + version_a = Version.from_str("3.2.1") + version_b = Version.from_str("1.2.3") + + # act + assert + assert version_a > version_b + + +def test_ge_equals(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("1.2.3") + + # act + assert + assert version_a >= version_b + + +def test_ge_exceeds(): + # arrange + version_a = Version.from_str("3.2.1") + version_b = Version.from_str("1.2.3") + + # act + assert + assert version_a >= version_b + + +def test_lt(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("3.2.1") + + # act + assert + assert version_a < version_b + + +def test_le_equals(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("1.2.3") + + # act + assert + assert version_a <= version_b + + +def test_le_is_less(): + # arrange + version_a = Version.from_str("1.2.3") + version_b = Version.from_str("3.2.1") + + # act + assert + assert version_a <= version_b + + +def test_to_tuple(): + # arrange + version = Version.from_str("1.2.3") + + # act + assert + assert version.as_tuple() == (1, 2, 3) diff --git a/build/plugins/lib/nots/semver/tests/test_version_range.py b/build/plugins/lib/nots/semver/tests/test_version_range.py new file mode 100644 index 0000000000..e0833b6dba --- /dev/null +++ b/build/plugins/lib/nots/semver/tests/test_version_range.py @@ -0,0 +1,107 @@ +from build.plugins.lib.nots.semver import Version, Operator, VersionRange + + +def test_from_str(): + checklist = [ + (">= 1.2.3", VersionRange, Operator.GE), + (">=1.2.3", VersionRange, Operator.GE), + (">= 1.2.3", VersionRange, Operator.GE), + (" >= 1.2.3 ", VersionRange, Operator.GE), + ("= 1.2.3", VersionRange, Operator.EQ), + ("=1.2.3", VersionRange, Operator.EQ), + ("= 1.2.3", VersionRange, Operator.EQ), + (" = 1.2.3 ", VersionRange, Operator.EQ), + (" 1.2.3", VersionRange, Operator.EQ), + ("1.2.3", VersionRange, Operator.EQ), + (" 1.2.3", VersionRange, Operator.EQ), + (" 1.2.3 ", VersionRange, Operator.EQ), + ] + + for range_str, expected_class, expected_operator in checklist: + range = VersionRange.from_str(range_str) + + assert isinstance(range, expected_class), f"unexpected class for '{range_str}': '{type(range)}'" + assert range.operator == expected_operator, f"unexpected operator for '{range_str}': '{range.operator}'" + + +def test_from_str_error(): + error_template = "Unsupported version range: '{}'. Currently we only support ranges with stable versions and GE / EQ: '>= 1.2.3' / '= 1.2.3' / '1.2.3'" + checklist = [ + (r"¯\_(ツ)_/¯", ValueError, error_template), + ("<= 1.2.3", ValueError, error_template), + ("<=1.2.3", ValueError, error_template), + ("<= 1.2.3", ValueError, error_template), + (" <= 1.2.3 ", ValueError, error_template), + ("< 1.2.3", ValueError, error_template), + ("<1.2.3", ValueError, error_template), + ("< 1.2.3", ValueError, error_template), + (" < 1.2.3 ", ValueError, error_template), + ("> 1.2.3", ValueError, error_template), + (">1.2.3", ValueError, error_template), + ("> 1.2.3", ValueError, error_template), + (" > 1.2.3 ", ValueError, error_template), + ("0.0.1-beta", ValueError, error_template), + ] + + for range_str, expected_class, expected_msg_template in checklist: + try: + VersionRange.from_str(range_str) + except Exception as exception: + error = exception + + assert isinstance(error, expected_class), f"unexpected error class for '{range_str}': '{type(error)}'" + assert str(error) == expected_msg_template.format( + range_str + ), f"unexpected error message for '{range_str}': '{error}'" + + +def test_init(): + checklist = [ + (Operator.GE, "1.2.3", Operator.GE, Version(1, 2, 3)), + (Operator.GE, " 1.2.3 ", Operator.GE, Version(1, 2, 3)), + (Operator.GE, "0.0.1", Operator.GE, Version(0, 0, 1)), + (Operator.EQ, "1.2.3", Operator.EQ, Version(1, 2, 3)), + (Operator.EQ, " 1.2.3 ", Operator.EQ, Version(1, 2, 3)), + (Operator.EQ, "0.0.1", Operator.EQ, Version(0, 0, 1)), + (None, "1.2.3", Operator.EQ, Version(1, 2, 3)), + (None, " 1.2.3 ", Operator.EQ, Version(1, 2, 3)), + (None, "0.0.1", Operator.EQ, Version(0, 0, 1)), + ] + + for operator_provided, version_provided, expected_operator, expected_version in checklist: + range = VersionRange(operator_provided, Version.from_str(version_provided)) + + assert ( + range.operator == expected_operator + ), f"unexpected operator for '{operator_provided}', '{version_provided}': '{range.operator}'" + assert ( + range.version == expected_version + ), f"unexpected result version for '{operator_provided}', '{version_provided}': '{range.version}'" + + +def test_is_satisfied(): + checklist = [ + (">= 1.2.3", "1.2.3", True), + (">= 1.2.3", "1.2.4", True), + (">= 1.2.3", "1.3.0", True), + (">= 1.2.3", "2.0.0", True), + (">= 1.2.3", "5.8.2", True), + (">= 1.2.3", "1.2.2", False), + (">= 1.2.3", "0.100.200", False), + ("= 1.2.3", "1.2.3", True), + ("1.2.3", "1.2.3", True), + ("1.2.3", "1.2.2", False), + ("1.2.3", "1.3.3", False), + ("1.2.3", "2.2.3", False), + ("12345.45634.456234", "12345.45634.456234", True), + ("0.0.0", "0.0.0", True), + ] + + for range_provided, version_provided, expected_result in checklist: + + version = Version.from_str(version_provided) + range = VersionRange.from_str(range_provided) + + assert ( + range.is_satisfied_by(version) == expected_result + ), f"Unexpected is_satisfied_by result for '{range_provided}', '{version_provided}': {(not expected_result)}" diff --git a/build/plugins/lib/nots/semver/tests/ya.make b/build/plugins/lib/nots/semver/tests/ya.make new file mode 100644 index 0000000000..b7605505f3 --- /dev/null +++ b/build/plugins/lib/nots/semver/tests/ya.make @@ -0,0 +1,14 @@ +PY3TEST() + +OWNER(g:frontend-build-platform) + +PEERDIR( + build/plugins/lib/nots/semver +) + +TEST_SRCS( + test_version_range.py + test_version.py +) + +END() diff --git a/build/plugins/lib/nots/semver/ya.make b/build/plugins/lib/nots/semver/ya.make new file mode 100644 index 0000000000..7d2be228f2 --- /dev/null +++ b/build/plugins/lib/nots/semver/ya.make @@ -0,0 +1,14 @@ +PY23_LIBRARY() + +OWNER(g:frontend-build-platform) + +PY_SRCS( + __init__.py + semver.py +) + +END() + +RECURSE_FOR_TESTS( + tests +) diff --git a/build/plugins/lib/nots/typescript/__init__.py b/build/plugins/lib/nots/typescript/__init__.py new file mode 100644 index 0000000000..e0b3ee901c --- /dev/null +++ b/build/plugins/lib/nots/typescript/__init__.py @@ -0,0 +1,10 @@ +from .ts_config import DEFAULT_TS_CONFIG_FILE, TsConfig +from .ts_errors import TsError, TsValidationError + + +__all__ = [ + "DEFAULT_TS_CONFIG_FILE", + "TsConfig", + "TsError", + "TsValidationError", +] diff --git a/build/plugins/lib/nots/typescript/tests/ts_config.py b/build/plugins/lib/nots/typescript/tests/ts_config.py new file mode 100644 index 0000000000..4b8fd675b3 --- /dev/null +++ b/build/plugins/lib/nots/typescript/tests/ts_config.py @@ -0,0 +1,86 @@ +import pytest + +from build.plugins.lib.nots.typescript import TsConfig, TsValidationError + + +def test_ts_config_validate_valid(): + cfg = TsConfig(path="/tsconfig.json") + cfg.data = { + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build", + }, + } + + cfg.validate() + + +def test_ts_config_validate_empty(): + cfg = TsConfig(path="/tsconfig.json") + + with pytest.raises(TsValidationError) as e: + cfg.validate() + + assert e.value.errors == [ + "'rootDir' option is required", + "'outDir' option is required", + ] + + +def test_ts_config_validate_invalid_common(): + cfg = TsConfig(path="/tsconfig.json") + cfg.data = { + "compilerOptions": { + "preserveSymlinks": True, + "rootDirs": [], + "outFile": "./foo.js", + }, + "references": [], + "files": [], + "include": [], + "exclude": [], + } + + with pytest.raises(TsValidationError) as e: + cfg.validate() + + assert e.value.errors == [ + "'rootDir' option is required", + "'outDir' option is required", + "'outFile' option is not supported", + "'preserveSymlinks' option is not supported due to pnpm limitations", + "'rootDirs' option is not supported, relative imports should have single root", + "'files' option is not supported, use 'include'", + "composite builds are not supported, use peerdirs in ya.make instead of 'references' option", + ] + + +def test_ts_config_validate_invalid_subdirs(): + cfg = TsConfig(path="/foo/tsconfig.json") + cfg.data = { + "compilerOptions": { + "rootDir": "/bar/src", + "outDir": "../bar/build", + }, + } + + with pytest.raises(TsValidationError) as e: + cfg.validate() + + assert e.value.errors == [ + "'outDir' should be a subdirectory of the module", + ] + + +def test_ts_config_compiler_options(): + cfg = TsConfig(path="/tsconfig.json") + + assert cfg.compiler_option("invalid") is None + + cfg.data = { + "compilerOptions": { + "rootDir": "src", + }, + } + + assert cfg.compiler_option("rootDir") == "src" diff --git a/build/plugins/lib/nots/typescript/tests/ya.make b/build/plugins/lib/nots/typescript/tests/ya.make new file mode 100644 index 0000000000..44798138bc --- /dev/null +++ b/build/plugins/lib/nots/typescript/tests/ya.make @@ -0,0 +1,13 @@ +PY23_TEST() + +OWNER(g:frontend-build-platform) + +TEST_SRCS( + ts_config.py +) + +PEERDIR( + build/plugins/lib/nots/typescript +) + +END() diff --git a/build/plugins/lib/nots/typescript/ts_config.py b/build/plugins/lib/nots/typescript/ts_config.py new file mode 100644 index 0000000000..c54121a9d1 --- /dev/null +++ b/build/plugins/lib/nots/typescript/ts_config.py @@ -0,0 +1,251 @@ +import copy +import os +import json + +from .ts_errors import TsError, TsValidationError + +from ..package_manager.base import utils + +DEFAULT_TS_CONFIG_FILE = "tsconfig.json" + + +def merge_dicts(d1, d2): + """ + Merges two dicts recursively assuming that both have similar structure. + If d1.x.y.z has different type than d2.x.y.z then d2 will override d1 and result value res.x.y.z == d2.x.y.z. + If corresponding values are lists then the result will have a sum of those lists. + """ + if isinstance(d1, dict) and isinstance(d2, dict): + for k in d2: + d1[k] = merge_dicts(d1[k], d2[k]) if k in d1 else d2[k] + else: + if isinstance(d1, list) and isinstance(d2, list): + return d1 + d2 + else: + return d2 + return d1 + + +class TsConfig(object): + @classmethod + def load(cls, path): + """ + :param path: tsconfig.json path + :type path: str + :rtype: TsConfig + """ + tsconfig = cls(path) + tsconfig.read() + + return tsconfig + + def __init__(self, path): + if not os.path.isabs(path): + raise TypeError("Absolute path required, given: {}".format(path)) + + self.path = path + self.data = {} + + def read(self): + try: + with open(self.path) as f: + self.data = json.load(f) + except Exception as e: + raise TsError("Failed to read tsconfig {}: {}".format(self.path, e)) + + def merge(self, rel_path, base_tsconfig): + """ + :param rel_path: relative path to the configuration file we are merging in. + It is required to set the relative paths correctly. + :type rel_path: str + :param base_tsconfig: base TsConfig we are merging with our TsConfig instance + :type base_tsconfig: dict + """ + if not base_tsconfig.data: + return + + def relative_path(p): + return os.path.normpath(os.path.join(rel_path, p)) + + base_config_data = copy.deepcopy(base_tsconfig.data) + + parameter_section_labels = ["compilerOptions", "typeAcquisition", "watchOptions"] + for opt_label in parameter_section_labels: + base_options = base_config_data.get(opt_label) + if not base_options: + continue + + new_options = self.data.get(opt_label) + for key in base_options: + val = base_options[key] + + # lists of paths + if key in ["extends", "outDir", "rootDir", "baseUrl", "include"]: + val = relative_path(val) + + # path string + elif key in ["rootDirs", "excludeDirectories", "excludeFiles"]: + val = map(relative_path, val) + + # dicts having paths as values + elif key in ["paths"]: + new_paths = new_options.get(key) + val = map(relative_path, val) + (new_paths if new_paths else []) + + base_options[key] = val + + if new_options and base_options: + base_options.update(new_options) + self.data[opt_label] = base_options + + base_config_data.update(self.data) + self.data = base_config_data + + def inline_extend(self, dep_paths): + """ + Merges the tsconfig parameters from configuration file referred by "extends" if any. + Relative paths are adjusted, current parameter values are prioritized higer than + those coming from extension file (according to TSC mergin rules). + Returns list of file paths for config files merged into the current configuration + :param dep_paths: dict of dependency names to their paths + :type dep_paths: dict + :rtype: list of str + """ + ext_value = self.data.get("extends") + if not ext_value: + return [] + + if ext_value.startswith("."): + base_config_path = ext_value + + else: + dep_name = utils.extract_package_name_from_path(ext_value) + # the rest part is the ext config path + file_path_start = len(dep_name) + 1 + file_path = ext_value[file_path_start:] + dep_path = dep_paths.get(dep_name) + if dep_path is None: + raise Exception( + "referenceing from {}, data: {}\n: Dependency '{}' not found in dep_paths: {}".format( + self.path, str(self.data), dep_name, dep_paths + ) + ) + base_config_path = os.path.join(dep_path, file_path) + + rel_path = os.path.dirname(base_config_path) + tsconfig_curdir_path = os.path.join(os.path.dirname(self.path), base_config_path) + if os.path.isdir(tsconfig_curdir_path): + base_config_path = os.path.join(base_config_path, DEFAULT_TS_CONFIG_FILE) + + # processing the base file recursively + base_config = TsConfig.load(os.path.join(os.path.dirname(self.path), base_config_path)) + paths = [base_config_path] + base_config.inline_extend(dep_paths) + + self.merge(rel_path, base_config) + del self.data["extends"] + + return paths + + def get_or_create_compiler_options(self): + """ + Returns ref to the "compilerOptions" dict. + :rtype: dict + """ + opts = self.data.get("compilerOptions") + if opts is None: + opts = {} + self.data["compilerOptions"] = opts + + return opts + + def prepend_include(self, value): + """ + Prepends `value` to `include` list + :param value: value to prepend + :type value: str + """ + includeList = self.data.get("include") + self.data["include"] = [value] + includeList + + def compiler_option(self, name, default=None): + """ + :param name: option key + :type name: str + :param default: default value + :type default: mixed + :rtype: mixed + """ + return self.get_or_create_compiler_options().get(name, default) + + def add_to_compiler_option(self, name, add_value): + """ + Merges the existing value with add_value for the option with label=name. + Merge is done recursively if the value is of a dict instance. + :param name: option key + :type name: str + :param value: option value to set + :type value: mixed + """ + default_value = {} if isinstance(add_value, dict) else [] + opts = self.get_or_create_compiler_options() + opts[name] = merge_dicts(opts.get(name, default_value), add_value) + + def inject_plugin(self, plugin): + """ + :param plugin: plugin dict (ts-patch compatible, see https://github.com/nonara/ts-patch) + :type plugin: dict of str + """ + opts = self.get_or_create_compiler_options() + if not opts.get("plugins"): + opts["plugins"] = [] + opts["plugins"].append(plugin) + + def validate(self): + """ + Checks whether the config is compatible with current toolchain. + """ + opts = self.get_or_create_compiler_options() + errors = [] + root_dir = opts.get("rootDir") + out_dir = opts.get("outDir") + config_dir = os.path.dirname(self.path) + + def is_mod_subdir(p): + return not os.path.isabs(p) and os.path.normpath(os.path.join(config_dir, p)).startswith(config_dir) + + if root_dir is None: + errors.append("'rootDir' option is required") + + if out_dir is None: + errors.append("'outDir' option is required") + elif not is_mod_subdir(out_dir): + errors.append("'outDir' should be a subdirectory of the module") + + if opts.get("outFile") is not None: + errors.append("'outFile' option is not supported") + + if opts.get("preserveSymlinks"): + errors.append("'preserveSymlinks' option is not supported due to pnpm limitations") + + if opts.get("rootDirs") is not None: + errors.append("'rootDirs' option is not supported, relative imports should have single root") + + if self.data.get("files") is not None: + errors.append("'files' option is not supported, use 'include'") + + if self.data.get("references") is not None: + errors.append("composite builds are not supported, use peerdirs in ya.make instead of 'references' option") + + if len(errors): + raise TsValidationError(self.path, errors) + + def write(self, path=None, indent=None): + """ + :param path: tsconfig path, defaults to original path + :type path: str + """ + if path is None: + path = self.path + + with open(path, "w") as f: + json.dump(self.data, f, indent=indent) diff --git a/build/plugins/lib/nots/typescript/ts_errors.py b/build/plugins/lib/nots/typescript/ts_errors.py new file mode 100644 index 0000000000..105851d9ec --- /dev/null +++ b/build/plugins/lib/nots/typescript/ts_errors.py @@ -0,0 +1,10 @@ +class TsError(RuntimeError): + pass + + +class TsValidationError(TsError): + def __init__(self, path, errors): + self.path = path + self.errors = errors + + super(TsValidationError, self).__init__("Invalid tsconfig {}:\n{}".format(path, "\n".join(errors))) diff --git a/build/plugins/lib/nots/typescript/ya.make b/build/plugins/lib/nots/typescript/ya.make new file mode 100644 index 0000000000..8847f9bbd3 --- /dev/null +++ b/build/plugins/lib/nots/typescript/ya.make @@ -0,0 +1,19 @@ +PY23_LIBRARY() + +OWNER(g:frontend-build-platform) + +PY_SRCS( + __init__.py + ts_errors.py + ts_config.py +) + +PEERDIR( + build/plugins/lib/nots/package_manager +) + +END() + +RECURSE_FOR_TESTS( + tests +) diff --git a/build/plugins/lib/nots/ya.make b/build/plugins/lib/nots/ya.make new file mode 100644 index 0000000000..b24c534033 --- /dev/null +++ b/build/plugins/lib/nots/ya.make @@ -0,0 +1,15 @@ +PY23_LIBRARY() + +OWNER(g:frontend-build-platform) + +PY_SRCS( + __init__.py +) + +PEERDIR( + build/plugins/lib/nots/package_manager + build/plugins/lib/nots/semver + build/plugins/lib/nots/typescript +) + +END() diff --git a/build/plugins/lib/test_const/__init__.py b/build/plugins/lib/test_const/__init__.py new file mode 100644 index 0000000000..a9cf8b2e74 --- /dev/null +++ b/build/plugins/lib/test_const/__init__.py @@ -0,0 +1,521 @@ +# coding: utf-8 +import re + + +RESTART_TEST_INDICATOR = '##restart-test##' +INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##' + +RESTART_TEST_INDICATORS = [ + RESTART_TEST_INDICATOR, + "network error", +] + +UID_PREFIX_DELIMITER = '-' + +# testing +BIN_DIRECTORY = 'bin' +CANON_DATA_DIR_NAME = "canondata" +CANON_RESULT_FILE_NAME = "result.json" +CANONIZATION_RESULT_FILE_NAME = "canonization_res.json" +COMMON_CONTEXT_FILE_NAME = "common_test.context" +CONSOLE_SNIPPET_LIMIT = 5000 +FAKE_OUTPUT_EXTS = frozenset([".mf", ".fake", ".cpf", ".cpsf"]) +LIST_NODE_LOG_FILE = "test_list.log" +LIST_NODE_RESULT_FILE = "test_list.json" +LIST_RESULT_NODE_LOG_FILE = "list_result.log" +LIST_TRACE_FILE_NAME = "ytest_list.report.trace" +MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB +MAX_TEST_RESTART_COUNT = 3 +NO_LISTED_TESTS = "NO_LISTED_TESTS" +REPORT_SNIPPET_LIMIT = 12000 +SANITIZER_ERROR_RC = 100 +SUITE_CONTEXT_FILE_NAME = "test.context" +TEST_LIST_FILE = "test_names_list.json" +TEST_SUBTEST_SEPARATOR = '::' +TESTING_OUT_DIR_NAME = "testing_out_stuff" +TESTING_OUT_RAM_DRIVE_DIR_NAME = "ram_drive_output" +TESTING_OUT_TAR_NAME = TESTING_OUT_DIR_NAME + ".tar.zstd" +TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f" +TRACE_FILE_NAME = "ytest.report.trace" +TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, SUITE_CONTEXT_FILE_NAME, "run_test.log"} +YT_RUN_TEST_DIR_NAME = "yt_run_test" +YT_RUN_TEST_TAR_NAME = "yt_run_test.tar" +COVERAGE_CFLAGS = ["-fprofile-instr-generate", "-fcoverage-mapping", "-DCLANG_COVERAGE"] +COVERAGE_LDFLAGS = ["-fprofile-instr-generate", "-fcoverage-mapping"] + +MANDATORY_ENV_VAR_NAME = 'YA_MANDATORY_ENV_VARS' + +BUILD_FLAGS_ALLOWED_IN_CONTEXT = { + 'AUTOCHECK', + # Required for local test runs + 'TESTS_REQUESTED', + 'USE_ARCADIA_PYTHON', + 'USE_SYSTEM_PYTHON', +} + +STYLE_TEST_TYPES = [ + "classpath.clash", + "clang_tidy", + "eslint", + "gofmt", + "govet", + "java.style", + "ktlint", + "custom_lint", +] + +REGULAR_TEST_TYPES = [ + "benchmark", + "boost_test", + "exectest", + "fuzz", + "g_benchmark", + "go_bench", + "go_test", + "gtest", + "hermione", + "hermione_beta", + "java", + "jest", + "py2test", + "py3test", + "pytest", + "unittest", +] + +TEST_NODE_OUTPUT_RESULTS = [TESTING_OUT_TAR_NAME, YT_RUN_TEST_TAR_NAME] + +# kvm +DEFAULT_RAM_REQUIREMENTS_FOR_KVM = 4 +MAX_RAM_REQUIREMENTS_FOR_KVM = 16 + +# distbuild +DISTBUILD_STATUS_REPORT_ENV_NAME = 'NODE_EXTENDED_STATUS_FILE_PATH' +DEFAULT_TEST_NODE_TIMEOUT = 15 * 60 +TEST_NODE_FINISHING_TIME = 5 * 60 + +# coverage +COVERAGE_FUNCTION_ENTRIES_LIMIT = 2 +COVERAGE_PYTHON_EXTS = (".py", ".pyx", ".pxi", ".pxd") + +COVERAGE_RESOLVED_FILE_NAME_PATTERN = "coverage_resolved.{}.json" +CPP_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("cpp") +GO_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("go") +JAVA_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("java") +NLG_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("nlg") +PYTHON2_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("py2") +PYTHON3_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("py3") +TS_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("ts") + +COVERAGE_CLANG_ENV_NAME = 'LLVM_PROFILE_FILE' +COVERAGE_GCOV_ENV_NAME = 'GCOV_PREFIX' +COVERAGE_GO_ENV_NAME = 'GO_COVERAGE_PREFIX' +COVERAGE_PYTHON_ENV_NAME = 'PYTHON_COVERAGE_PREFIX' +COVERAGE_TS_ENV_NAME = 'TS_COVERAGE_PREFIX' +COVERAGE_NLG_ENV_NAME = 'NLG_COVERAGE_FILENAME' +COVERAGE_ENV_VARS = ( + COVERAGE_CLANG_ENV_NAME, + COVERAGE_GCOV_ENV_NAME, + COVERAGE_GO_ENV_NAME, + COVERAGE_NLG_ENV_NAME, + COVERAGE_PYTHON_ENV_NAME, + COVERAGE_TS_ENV_NAME, +) +PYTHON_COVERAGE_PREFIX_FILTER_ENV_NAME = 'PYTHON_COVERAGE_PREFIX_FILTER' +PYTHON_COVERAGE_EXCLUDE_REGEXP_ENV_NAME = 'PYTHON_COVERAGE_EXCLUDE_REGEXP' + +CLANG_COVERAGE_TEST_TYPES = ( + "boost_test", + "coverage_extractor", + "exectest", + "gtest", + # java tests might use shared libraries + "java", + "py2test", + "py3test", + "pytest", + "unittest", +) +COVERAGE_TABLE_CHUNKS = 20 +COVERAGE_TESTS_TIMEOUT_FACTOR = 1.5 +COVERAGE_YT_PROXY = "hahn.yt.yandex.net" +COVERAGE_YT_ROOT_PATH = "//home/codecoverage" +COVERAGE_YT_TABLE_PREFIX = "datatable" + +# fuzzing +CORPUS_DATA_FILE_NAME = 'corpus.json' +CORPUS_DATA_ROOT_DIR = 'fuzzing' +CORPUS_DIR_NAME = 'corpus' +FUZZING_COVERAGE_ARGS = ['--sanitize-coverage=trace-div,trace-gep'] +FUZZING_COMPRESSION_COEF = 1.1 +FUZZING_DEFAULT_TIMEOUT = 3600 +FUZZING_FINISHING_TIME = 600 +FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)') +GENERATED_CORPUS_DIR_NAME = 'mined_corpus' +MAX_CORPUS_RESOURCES_ALLOWED = 5 + +# hermione +HERMIONE_REPORT_DIR_NAME = "hermione-report" +HERMIONE_REPORT_TAR_NAME = HERMIONE_REPORT_DIR_NAME + ".tar" +HERMIONE_REPORT_INDEX_FILE_NAME = "index.html" +HERMIONE_REPORT_DB_URLS_FILE_NAME = "databaseUrls.json" +HERMIONE_TESTS_READ_FILE_NAME = "tests.json" +HERMIONE_TESTS_READ_STDOUT_FILE_NAME = "read_tests.out" +HERMIONE_TESTS_READ_STDERR_FILE_NAME = "read_tests.err" +HERMIONE_TESTS_RUN_FILE_NAME = "test_results.jsonl" +HERMIONE_TESTS_RUN_STDOUT_FILE_NAME = "run_tests.out" +HERMIONE_TESTS_RUN_STDERR_FILE_NAME = "run_tests.err" + +# yt +YT_OPERATION_ID_SUBSTITUTION = '$OPERATION_ID' +YT_SANDBOX_ROOT_PREFIX = '$(YT_SANDBOX_ROOT)' + +# sandbox +SANDBOX_RUN_TEST_YT_TOKEN_VALUE_NAME = 'YA_MAKE_SANDBOX_RUN_TEST_YT_TOKEN' + +# global resources +ANDROID_AVD_ROOT = 'ANDROID_AVD_RESOURCE_GLOBAL' +ANDROID_SDK_ROOT = 'ANDROID_SDK_RESOURCE_GLOBAL' +COVERAGE_PUSH_TOOL_LOCAL = 'USE_SYSTEM_COVERAGE_PUSH_TOOL' +COVERAGE_PUSH_TOOL_RESOURCE = 'COVERAGE_PUSH_TOOL_RESOURCE_GLOBAL' +COVERAGE_PUSH_TOOL_LB_LOCAL = 'USE_SYSTEM_COVERAGE_PUSH_TOOL_LB' +COVERAGE_PUSH_TOOL_LB_RESOURCE = 'COVERAGE_PUSH_TOOL_LB_RESOURCE_GLOBAL' +FLAKE8_PY2_RESOURCE = 'FLAKE8_PY2_RESOURCE_GLOBAL' +FLAKE8_PY3_RESOURCE = 'FLAKE8_PY3_RESOURCE_GLOBAL' +GO_TOOLS_RESOURCE = 'GO_TOOLS_RESOURCE_GLOBAL' +JSTYLE_RUNNER_LIB = 'JSTYLE_LIB_RESOURCE_GLOBAL' +NODEJS_RESOURCE = 'NODEJS_RESOURCE_GLOBAL' +NYC_RESOURCE = 'NYC_RESOURCE_GLOBAL' +TEST_TOOL3_HOST = 'TEST_TOOL3_HOST_RESOURCE_GLOBAL' +TEST_TOOL3_HOST_LOCAL = 'TEST_TOOL3_HOST_LOCAL' +TEST_TOOL_HOST = 'TEST_TOOL_HOST_RESOURCE_GLOBAL' +TEST_TOOL_HOST_LOCAL = 'TEST_TOOL_HOST_LOCAL' +TEST_TOOL_TARGET = 'TEST_TOOL_TARGET_RESOURCE_GLOBAL' +TEST_TOOL_TARGET_LOCAL = 'TEST_TOOL_TARGET_LOCAL' +XCODE_TOOLS_RESOURCE = 'XCODE_TOOLS_ROOT_RESOURCE_GLOBAL' +WINE_TOOL = 'WINE_TOOL_RESOURCE_GLOBAL' +WINE32_TOOL = 'WINE32_TOOL_RESOURCE_GLOBAL' + + +class Enum(object): + @classmethod + def enumerate(cls): + return [v for k, v in cls.__dict__.items() if not k.startswith("_")] + + +class TestRequirements(Enum): + Container = 'container' + Cpu = 'cpu' + DiskUsage = 'disk_usage' + Dns = 'dns' + Kvm = 'kvm' + Network = 'network' + Ram = 'ram' + RamDisk = 'ram_disk' + SbVault = 'sb_vault' + YavSecret = 'yav' + + +class TestRequirementsConstants(Enum): + All = 'all' + AllCpuValue = 50 + AllRamDiskValue = 50 + MinCpu = 1 + MinRam = 1 + MinRamDisk = 0 + + @classmethod + def is_all_cpu(cls, value): + return value == cls.All + + @classmethod + def get_cpu_value(cls, value): + return cls.AllCpuValue if cls.is_all_cpu(value) else value + + @classmethod + def is_all_ram_disk(cls, value): + return value == cls.All + + @classmethod + def get_ram_disk_value(cls, value): + return cls.AllRamDiskValue if cls.is_all_ram_disk(value) else value + + +class TestSize(Enum): + Small = 'small' + Medium = 'medium' + Large = 'large' + + DefaultTimeouts = { + Small: 60, + Medium: 600, + Large: 3600, + } + + DefaultPriorities = { + Small: -1, + Medium: -2, + Large: -3, + } + + DefaultRequirements = { + Small: { + TestRequirements.Cpu: 1, + TestRequirements.Ram: 8, + # TestRequirements.Ram: 2, + TestRequirements.RamDisk: 0, + }, + Medium: { + TestRequirements.Cpu: 1, + TestRequirements.Ram: 8, + # TestRequirements.Ram: 4, + TestRequirements.RamDisk: 0, + }, + Large: { + TestRequirements.Cpu: 1, + TestRequirements.Ram: 8, + # TestRequirements.Ram: 8, + TestRequirements.RamDisk: 0, + }, + } + + MaxRequirements = { + Small: { + TestRequirements.Cpu: 4, + TestRequirements.Ram: 32, + # TestRequirements.Ram: 4, + TestRequirements.RamDisk: 32, + }, + Medium: { + TestRequirements.Cpu: 4, + # TestRequirements.Cpu: 8, + TestRequirements.Ram: 32, + # TestRequirements.Ram: 16, + TestRequirements.RamDisk: 32, + }, + Large: { + TestRequirements.Cpu: 4, + TestRequirements.Ram: 32, + TestRequirements.RamDisk: 32, + }, + } + + LargeMarker = "TL" + MediumMarker = "TM" + SmallMarker = "TS" + SizeMarkers = (LargeMarker, MediumMarker, SmallMarker) + + SizeShorthandMap = { + Large: LargeMarker, + Medium: MediumMarker, + Small: SmallMarker, + } + + @classmethod + def sizes(cls): + return cls.DefaultTimeouts.keys() + + @classmethod + def get_shorthand(cls, size): + return cls.SizeShorthandMap[size] + + @classmethod + def is_test_shorthand(cls, name): + return name in cls.SizeMarkers + + @classmethod + def get_default_timeout(cls, size): + if size in cls.DefaultTimeouts: + return cls.DefaultTimeouts[size] + raise Exception("Unknown test size '{}'".format(size)) + + @classmethod + def get_default_priorities(cls, size): + if size in cls.DefaultPriorities: + return cls.DefaultPriorities[size] + raise Exception("Unknown test size '{}'".format(size)) + + @classmethod + def get_default_requirements(cls, size): + if size in cls.DefaultRequirements: + return cls.DefaultRequirements[size] + raise Exception("Unknown test size '{}'".format(size)) + + @classmethod + def get_max_requirements(cls, size): + if size in cls.MaxRequirements: + return cls.MaxRequirements[size] + raise Exception("Unknown test size '{}'".format(size)) + + +class TestRunExitCode(Enum): + Skipped = 2 + Failed = 3 + TimeOut = 10 + InfrastructureError = 12 + + +class YaTestTags(Enum): + AlwaysMinimize = "ya:always_minimize" + Dirty = "ya:dirty" + DumpNodeEnvironment = "ya:dump_node_env" + DumpTestEnvironment = "ya:dump_test_env" + ExoticPlatform = "ya:exotic_platform" + External = "ya:external" + Fat = "ya:fat" + ForceDistbuild = "ya:force_distbuild" + ForceSandbox = "ya:force_sandbox" + GoNoSubtestReport = "ya:go_no_subtest_report" + GoTotalReport = "ya:go_total_report" + HugeLogs = "ya:huge_logs" + Manual = "ya:manual" + MapRootUser = "ya:map_root_user" + NoFuse = "ya:nofuse" + NoGracefulShutdown = "ya:no_graceful_shutdown" + Norestart = "ya:norestart" + Noretries = "ya:noretries" + NotAutocheck = "ya:not_autocheck" + Notags = "ya:notags" + PerfTest = "ya:perftest" + Privileged = "ya:privileged" + ReportChunks = "ya:report_chunks" + RunWithAsserts = "ya:relwithdebinfo" + SandboxCoverage = "ya:sandbox_coverage" + SequentialRun = "ya:sequential_run" + TraceOutput = "ya:trace_output" + YtRunner = "ya:yt" + + +class ServiceTags(Enum): + AnyTag = "ya:__any_tag" + + +class Status(object): + GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8) + SKIPPED = -100 + NOT_LAUNCHED = -200 + CANON_DIFF = -300 + DESELECTED = -400 + INTERNAL = -int(2**31 - 1) # maxint + FLAKY = -50 + # XFAILDIFF is internal status and should be replaced + # with XFAIL or XPASS during verification stage of canon data + XFAILDIFF = -90 + + BY_NAME = { + 'crashed': CRASHED, + 'deselected': DESELECTED, + 'diff': CANON_DIFF, + 'fail': FAIL, + 'flaky': FLAKY, + 'good': GOOD, + 'internal': INTERNAL, + 'missing': MISSING, + 'not_launched': NOT_LAUNCHED, + 'skipped': SKIPPED, + 'timeout': TIMEOUT, + 'xfail': XFAIL, + 'xfaildiff': XFAILDIFF, + 'xpass': XPASS, + } + TO_STR = { + CANON_DIFF: 'diff', + CRASHED: 'crashed', + DESELECTED: 'deselected', + FAIL: 'fail', + FLAKY: 'flaky', + GOOD: 'good', + INTERNAL: 'internal', + MISSING: 'missing', + NOT_LAUNCHED: 'not_launched', + SKIPPED: 'skipped', + TIMEOUT: 'timeout', + XFAIL: 'xfail', + XFAILDIFF: 'xfaildiff', + XPASS: 'xpass', + } + + +class _Colors(object): + + _NAMES = [ + "blue", + "cyan", + "default", + "green", + "grey", + "magenta", + "red", + "white", + "yellow", + ] + _PREFIXES = ["", "light", "dark"] + + def __init__(self): + self._table = {} + for prefix in self._PREFIXES: + for value in self._NAMES: + name = value + if prefix: + name = "{}_{}".format(prefix, value) + value = "{}-{}".format(prefix, value) + self.__add_color(name.upper(), value) + + def __add_color(self, name, value): + self._table[name] = value + self.__setattr__(name, value) + + +Colors = _Colors() + + +class _Highlight(object): + + _MARKERS = { + # special + "RESET": "rst", + "IMPORTANT": "imp", + "UNIMPORTANT": "unimp", + "BAD": "bad", + "WARNING": "warn", + "GOOD": "good", + "PATH": "path", + "ALTERNATIVE1": "alt1", + "ALTERNATIVE2": "alt2", + "ALTERNATIVE3": "alt3", + } + + def __init__(self): + # setting attributes because __getattr__ is much slower + for attr, value in self._MARKERS.items(): + self.__setattr__(attr, value) + + +Highlight = _Highlight() + + +class _StatusColorMap(object): + # There should be no XFAILDIFF, because it's internal status. + # It should be replaced with XFAIL or XPASS during verification of canon data. + + _MAP = { + 'crashed': Highlight.WARNING, + 'deselected': Highlight.UNIMPORTANT, + 'diff': Highlight.BAD, + 'fail': Highlight.BAD, + 'flaky': Highlight.ALTERNATIVE3, + 'good': Highlight.GOOD, + 'internal': Highlight.BAD, + 'missing': Highlight.ALTERNATIVE1, + 'not_launched': Highlight.BAD, + 'skipped': Highlight.UNIMPORTANT, + 'timeout': Highlight.BAD, + 'xfail': Highlight.WARNING, + 'xpass': Highlight.WARNING, + } + + def __getitem__(self, item): + return self._MAP[item] + + +StatusColorMap = _StatusColorMap() diff --git a/build/plugins/lib/test_const/ya.make b/build/plugins/lib/test_const/ya.make new file mode 100644 index 0000000000..60f4867e46 --- /dev/null +++ b/build/plugins/lib/test_const/ya.make @@ -0,0 +1,9 @@ +OWNER(g:ymake) + +PY23_LIBRARY() + +PY_SRCS( + __init__.py +) + +END() diff --git a/build/plugins/lib/ya.make b/build/plugins/lib/ya.make new file mode 100644 index 0000000000..7e61d12080 --- /dev/null +++ b/build/plugins/lib/ya.make @@ -0,0 +1,7 @@ +OWNER(g:ymake)
+
+PY23_LIBRARY()
+ PY_SRCS(
+ _metric_resolvers.py
+ )
+END()
diff --git a/build/plugins/linker_script.py b/build/plugins/linker_script.py new file mode 100644 index 0000000000..a5432884c1 --- /dev/null +++ b/build/plugins/linker_script.py @@ -0,0 +1,12 @@ +def onlinker_script(unit, *args): + """ + @usage: LINKER_SCRIPT(Files...) + + Specify files to be used as a linker script + """ + for arg in args: + if not arg.endswith(".ld") and not arg.endswith(".ld.in"): + unit.message(['error', "Invalid linker script extension: {}".format(arg)]) + return + + unit.onglobal_srcs(list(args)) diff --git a/build/plugins/lj_archive.py b/build/plugins/lj_archive.py new file mode 100644 index 0000000000..9f071f3d87 --- /dev/null +++ b/build/plugins/lj_archive.py @@ -0,0 +1,46 @@ +def onlj_archive(unit, *args): + """ + @usage: LJ_ARCHIVE(NAME Name LuaFiles...) + Precompile .lua files using LuaJIT and archive both sources and results using sources names as keys + """ + + def iter_luas(l): + for a in l: + if a.endswith('.lua'): + yield a + + def iter_objs(l): + for a in l: + s = a[:-3] + 'raw' + unit.on_luajit_objdump(['OUT', s, a]) + yield s + + luas = list(iter_luas(args)) + objs = list(iter_objs(luas)) + + unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaScripts.inc', 'KEYS', ':'.join(luas)] + objs) + unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaSources.inc', 'KEYS', ':'.join(luas)] + luas) + + +def onlj_21_archive(unit, *args): + """ + @usage: LJ_21_ARCHIVE(NAME Name LuaFiles...) # deprecated + Precompile .lua files using LuaJIT 2.1 and archive both sources and results using sources names as keys + """ + + def iter_luas(l): + for a in l: + if a.endswith('.lua'): + yield a + + def iter_objs(l): + for a in l: + s = a[:-3] + 'raw' + unit.on_luajit_21_objdump(['OUT', s, a]) + yield s + + luas = list(iter_luas(args)) + objs = list(iter_objs(luas)) + + unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaScripts.inc', 'KEYS', ':'.join(luas)] + objs) + unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaSources.inc', 'KEYS', ':'.join(luas)] + luas) diff --git a/build/plugins/llvm_bc.py b/build/plugins/llvm_bc.py new file mode 100644 index 0000000000..afe46fa386 --- /dev/null +++ b/build/plugins/llvm_bc.py @@ -0,0 +1,36 @@ +import sys + +from _common import rootrel_arc_src, sort_by_keywords, skip_build_root, stripext + + +def onllvm_bc(unit, *args): + free_args, kwds = sort_by_keywords({'SYMBOLS': -1, 'NAME': 1, 'GENERATE_MACHINE_CODE': 0, 'NO_COMPILE': 0}, args) + name = kwds['NAME'][0] + symbols = kwds.get('SYMBOLS') + obj_suf = unit.get('OBJ_SUF') + skip_compile_step = 'NO_COMPILE' in kwds + merged_bc = name + '_merged' + obj_suf + '.bc' + out_bc = name + '_optimized' + obj_suf + '.bc' + bcs = [] + for x in free_args: + rel_path = rootrel_arc_src(x, unit) + bc_path = '${ARCADIA_BUILD_ROOT}/' + skip_build_root(rel_path) + obj_suf + '.bc' + if not skip_compile_step: + if x.endswith('.c'): + llvm_compile = unit.onllvm_compile_c + elif x.endswith('.ll'): + llvm_compile = unit.onllvm_compile_ll + else: + llvm_compile = unit.onllvm_compile_cxx + llvm_compile([rel_path, bc_path]) + bcs.append(bc_path) + unit.onllvm_link([merged_bc] + bcs) + opt_opts = ['-O2', '-globalopt', '-globaldce'] + if symbols: + # XXX: '#' used instead of ',' to overcome ymake tendency to split everything by comma + opt_opts += ['-internalize', '-internalize-public-api-list=' + '#'.join(symbols)] + unit.onllvm_opt([merged_bc, out_bc] + opt_opts) + if 'GENERATE_MACHINE_CODE' in kwds: + unit.onllvm_llc([out_bc, '-O2']) + else: + unit.onresource([out_bc, '/llvm_bc/' + name]) diff --git a/build/plugins/macros_with_error.py b/build/plugins/macros_with_error.py new file mode 100644 index 0000000000..eceb8b9c42 --- /dev/null +++ b/build/plugins/macros_with_error.py @@ -0,0 +1,32 @@ +import sys + +import _common + +import ymake + + +def onmacros_with_error(unit, *args): + print >> sys.stderr, 'This macros will fail' + raise Exception('Expected fail in MACROS_WITH_ERROR') + + +def onrestrict_path(unit, *args): + if args: + if 'MSG' in args: + pos = args.index('MSG') + paths, msg = args[:pos], args[pos + 1 :] + msg = ' '.join(msg) + else: + paths, msg = args, 'forbidden' + if not _common.strip_roots(unit.path()).startswith(paths): + error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format( + unit.path(), msg, ', '.join(paths) + ) + ymake.report_configure_error(error_msg) + + +def onassert(unit, *args): + val = unit.get(args[0]) + if val and val.lower() == "no": + msg = ' '.join(args[1:]) + ymake.report_configure_error(msg) diff --git a/build/plugins/maps_mobile_idl.py b/build/plugins/maps_mobile_idl.py new file mode 100644 index 0000000000..1964e19192 --- /dev/null +++ b/build/plugins/maps_mobile_idl.py @@ -0,0 +1,1081 @@ +import os +import re +from collections import namedtuple + +from _common import sort_by_keywords + +Framework = namedtuple( + 'Framework', ['cpp_namespace_tokens', 'java_class_path', 'objc_framework_name', 'objc_framework_prefix'] +) + + +def _get_proto_header_file(proto_file_name): + return proto_file_name.split('.')[0] + '.pb.h' + + +def _get_appended_values(unit, key): + value = [] + raw_value = unit.get(key) + if raw_value: + value = filter(lambda x: len(x) > 0, raw_value.split(' ')) + assert len(value) == 0 or value[0] == '$' + key + return value[1:] if len(value) > 0 else value + + +def _load_framework_file_list(unit): + frameworks = [ + unit.resolve(unit.resolve_arc_path(os.sep.join(path.split(os.sep)[1:]))) + for path in unit.get('MAPKIT_IDL_FRAMEWORK_FILES').split(' ') + ] + return frameworks + + +def _require_framework_entry(entry, framework): + if entry not in framework: + raise Exception('No {} entry in {} framework'.format(entry, framework)) + + +def _read_framework(unit, framework_file): + file_path = unit.resolve(framework_file) + result = {} + with open(file_path, 'r') as f: + lineId = 0 + for line in f: + lineId += 1 + tokens = line.split('=') + if len(tokens) != 2: + raise Exception('Malformed idl framework file {} line {}'.format(framework_file, lineId)) + result[tokens[0].strip()] = tokens[1].strip() + + _require_framework_entry('CPP_NAMESPACE', result) + _require_framework_entry('JAVA_PACKAGE', result) + _require_framework_entry('OBJC_FRAMEWORK', result) + _require_framework_entry('OBJC_FRAMEWORK_PREFIX', result) + return Framework( + result['CPP_NAMESPACE'].split('.'), + result['JAVA_PACKAGE'], + result['OBJC_FRAMEWORK'], + result['OBJC_FRAMEWORK_PREFIX'], + ) + + +def _read_frameworks(unit): + framework_file_list = _load_framework_file_list(unit) + result = {} + for file_name in framework_file_list: + name = file_name.split(os.sep)[-1].split('.')[0] + result[name] = _read_framework(unit, file_name) + return result + + +def _extract_by_regexp(line, regexp): + re_match = regexp.search(line) + if not re_match: + return None + return re_match.group(1) + + +class RegExp: + OBJC_INFIX = r'\bobjc_infix\s*([^\s]+);' + + IMPORT = r'^import\s+"([^"]+)"' + + WEAK_INTERFACE = r'\bweak_ref\s+interface\b' + SHARED_INTERFACE = r'\bshared_ref\s+interface\b' + STRONG_INTERFACE = r'^\s*interface\b' + NATIVE_LISTENER = r'\bnative\s+listener\b' + STATIC_INTERFACE = r'\bstatic\s+interface\b' + VIEW_DELEGATE = r'\bview_delegate\b' + + CUSTOM_PROTO_HEADER = r'^\s*protoconv\s+"([^"]+)"\s*$' + BASED_ON_PROTO_START = r'\bbased\s+on(\s|$)' + BASED_ON_PROTO = r'\bbased\s+on\s+"([^"]+)"\s*:' + + CUSTOM_CPP_HEADER = r'^\s*cpp\s+"([^"]+)"\s*$' + STRUCT = r'\bstruct\b' + + LITE_STRUCT = r'\blite\s+struct\b' + BRIDGED_STRUCT = r'^(\s*options|\s*(navi_)?serializable|\s*abstract)*\s*struct\s+' + + LAMBDA_LISTENER = r'\blambda\s+listener\b' + LISTENER = r'^\s*listener\s+' + PLATFORM_INTERFACE = r'platform\s+interface' + + VARIANT = r'\bvariant\b' + + OPTIONAL = r'\boptional\b' + INT_64 = r'\bint64\b' + STRING = r'\bstring\b' + POINT = r'\bpoint\b' + BYTES = r'\bbytes\b' + VECTOR = r'\bvector\b' + DICTIONARY = r'\bdictionary\b' + ANY = r'\bany[^_]' + ENUM = r'\benum\b' + TIME = r'\b(time_interval|abs_timestamp|rel_timestamp)\b' + BITMAP = r'\bbitmap\b' + VIEW_PROVIDER = r'\bview_provider\b' + IMAGE_PROVIDER = r'\bimage_provider\b' + ANIMATED_IMAGE_PROVIDER = r'\banimated_image_provider\b' + MODEL_PROVIDER = r'\bmodel_provider\b' + ANIMATED_MODEL_PROVIDER = r'\banimated_model_provider\b' + COLOR = r'\bcolor\b' + PLATFORM_VIEW = r'\bplatform_view\b' + ERROR = r'\b(runtime\.)?Error\b' + TYPE_DICTIONARY = r'\btype_dictionary\b' + + SERIALIZABLE = r'\bserializable\b' + NAVI_SERIALIZABLE = r'\bnavi_serializable\b' + + +class OutputType: + BASE_HEADER = 1 + STRUCT_SOURCE = 2 + PROTOCONV_HEADER = 3 + PROTOCONV_SOURCE = 4 + ANDROID_HEADER = 5 + ANDROID_SOURCE = 6 + IOS_HEADER = 7 + IOS_SOURCE = 8 + IOS_PRIVATE_HEADER = 9 + IOS_BINDING_SOURCE = 10 + DART_CPP_HEADER = 11 + DART_CPP_SOURCE = 12 + DART_SOURCE = 13 + DART_SOURCE_PRIVATE = 14 + + +class OutputNameGenerator: + def __init__(self, file_path, frameworks): + path_tokens = file_path.split(os.sep) + framework_name = path_tokens[0] + self._framework = frameworks[framework_name] + self._cpp_namespace_tokens = self._framework.cpp_namespace_tokens + path_tokens[1:-1] + file_name = path_tokens[-1] + self._cpp_name = file_name.split('.')[0] + + name_tokens = self._cpp_name.split('_') + self._objc_name_core = ''.join((self._capitalize(token) for token in name_tokens)) + self._objc_name = self._framework.objc_framework_prefix + self._objc_name_core + + def set_objc_infix(self, objc_infix): + self._objc_name = self._framework.objc_framework_prefix + objc_infix + self._objc_name_core + + def is_header(self, output_type): + return output_type in [ + OutputType.BASE_HEADER, + OutputType.PROTOCONV_HEADER, + OutputType.ANDROID_HEADER, + OutputType.IOS_HEADER, + OutputType.IOS_PRIVATE_HEADER, + OutputType.DART_CPP_HEADER + ] + + def _cpp_file_name(self, extension, additional_tokens=[]): + path_tokens = self._cpp_namespace_tokens + additional_tokens + [self._cpp_name + extension] + return os.path.join(*path_tokens) + + def _dart_public_file_name(self, extension): + return self._cpp_file_name(extension) + + def _dart_private_file_name(self, extension): + path_tokens = ['src'] + self._cpp_namespace_tokens + [self._cpp_name + '_private' + extension] + return os.path.join(*path_tokens) + + def _objc_file_name(self, extension, additional_tokens=[]): + path_tokens = [self._framework.objc_framework_name] + additional_tokens + [self._objc_name + extension] + return os.path.join(*path_tokens) + + def _capitalize(self, word): + return word[:1].upper() + word[1:] + + def generate_name(self, output_type): + if output_type is OutputType.BASE_HEADER: + return self._cpp_file_name('.h') + + if output_type is OutputType.STRUCT_SOURCE: + return self._cpp_file_name('.cpp') + + if output_type is OutputType.PROTOCONV_HEADER: + return self._cpp_file_name('.conv.h') + + if output_type is OutputType.PROTOCONV_SOURCE: + return self._cpp_file_name('.conv.cpp') + + if output_type is OutputType.ANDROID_HEADER: + return self._cpp_file_name('_binding.h', ['internal', 'android']) + + if output_type is OutputType.ANDROID_SOURCE: + return self._cpp_file_name('_binding.cpp', ['internal', 'android']) + + if output_type is OutputType.IOS_HEADER: + return self._objc_file_name('.h') + + if output_type is OutputType.IOS_SOURCE: + return self._objc_file_name('.m') + + if output_type is OutputType.IOS_PRIVATE_HEADER: + return self._objc_file_name('_Private.h', ['Internal']) + + if output_type is OutputType.IOS_BINDING_SOURCE: + return self._objc_file_name('_Binding.mm') + + if output_type is OutputType.DART_CPP_SOURCE: + return self._cpp_file_name('_dart_binding.cpp') + + if output_type is OutputType.DART_SOURCE: + return self._dart_public_file_name('.dart') + + if output_type is OutputType.DART_CPP_HEADER: + return self._cpp_file_name('_dart_binding.h') + + if output_type is OutputType.DART_SOURCE_PRIVATE: + return self._dart_private_file_name('.dart') + + def generate_path(self, output_type): + name = self.generate_name(output_type) + + if self.is_header(output_type): + return os.path.join('include', name) + + return os.path.join('impl', name) + + +class ProcessContext: + def __init__(self, unit, frameworks, file_paths): + self.unit = unit + self.frameworks = frameworks + self.file_paths = file_paths + self.is_dart = unit.enabled("MAPKIT_DART_IDL") + self.is_ios = unit.enabled("OS_IOS") and not self.is_dart + self.is_android = unit.enabled("OS_ANDROID") and not self.is_dart + self.output_name_generator = None + self.add_generated_output_includes = unit.enabled("H_CPP_IDL") + + def runtime_include(self, include_name): + name_tokens = self.frameworks['runtime'].cpp_namespace_tokens + [include_name] + return os.path.join(*name_tokens) + + def runtime_objc_import(self, import_name): + return os.path.join( + self.frameworks['runtime'].objc_framework_name, + self.frameworks['runtime'].objc_framework_prefix + import_name, + ) + + +class BaseRule: + def __init__(self, context): + self.context = context + + def start_file(self, file_path): + pass + + def process_line(self, line): + pass + + def get_output_types(self): + return set() + + def get_output_includes(self): + return set() + + +class ObjcInfixRule(BaseRule): + def __init__(self, context): + BaseRule.__init__(self, context) + self._found_infix = False + self._reg_exp = re.compile(RegExp.OBJC_INFIX) + + def start_file(self, file_path): + BaseRule.start_file(self, file_path) + self.context.output_name_generator.set_objc_infix('') + self._found_infix = False + + def process_line(self, line): + BaseRule.process_line(self, line) + if self._found_infix: + return + + infix = _extract_by_regexp(line, self._reg_exp) + if infix: + self._found_infix = True + self.context.output_name_generator.set_objc_infix(infix) + + +class ImportRule(BaseRule): + def __init__(self, context): + BaseRule.__init__(self, context) + self._imports = set() + self._import_reg_exp = re.compile(RegExp.IMPORT) + + def start_file(self, file_path): + self._imports = set() + + def process_line(self, line): + BaseRule.process_line(self, line) + idl_import = _extract_by_regexp(line, self._import_reg_exp) + if idl_import: + self._imports.add(idl_import) + + def get_output_includes(self): + result = set() + for idl_import in self._imports: + if idl_import in self.context.file_paths: + continue + + name_generator = OutputNameGenerator(idl_import, self.context.frameworks) + result.add(name_generator.generate_name(OutputType.BASE_HEADER)) + + return result + + +class DefaultRule(BaseRule): + def __init__(self, context): + BaseRule.__init__(self, context) + + def get_output_types(self): + result = set() + result.add(OutputType.BASE_HEADER) + + if self.context.is_dart: + result.add(OutputType.DART_SOURCE) + result.add(OutputType.DART_CPP_SOURCE) + result.add(OutputType.DART_CPP_HEADER) + result.add(OutputType.DART_SOURCE_PRIVATE) + return result + + if self.context.is_ios: + result.add(OutputType.IOS_HEADER) + result.add(OutputType.IOS_SOURCE) + + return result + + def get_output_includes(self): + result = set() + + if self.context.is_dart: + result.add(self.context.runtime_include('bindings/traits.h')) + result.add(self.context.runtime_include('assert.h')) + result.add(self.context.runtime_include('bindings/flutter/base_types.h')) + result.add(self.context.runtime_include('bindings/flutter/exception.h')) + result.add(self.context.runtime_include('bindings/flutter/export.h')) + result.add(self.context.runtime_include('bindings/flutter/to_native.h')) + result.add(self.context.runtime_include('bindings/flutter/to_platform.h')) + return result + + result.add('yandex/maps/export.h') + result.add(self.context.runtime_include('assert.h')) + result.add(self.context.runtime_include('exception.h')) + result.add(self.context.runtime_include('bindings/traits.h')) + + if self.context.is_ios: + result.add(self.context.runtime_include('bindings/platform.h')) + result.add('Foundation/Foundation.h') + result.add(self.context.runtime_include('ios/object.h')) + result.add(self.context.runtime_include('bindings/ios/to_native.h')) + result.add(self.context.runtime_include('bindings/ios/to_platform.h')) + result.add(self.context.runtime_include('ios/exception.h')) + result.add(self.context.runtime_objc_import('Subscription.h')) + + if self.context.is_android: + result.add(self.context.runtime_include('bindings/platform.h')) + result.add(self.context.runtime_include('android/object.h')) + result.add(self.context.runtime_include('bindings/android/to_native.h')) + result.add(self.context.runtime_include('bindings/android/to_platform.h')) + result.add(self.context.runtime_include('exception.h')) + + return result + + +class CheckRule(BaseRule): + def __init__( + self, + context, + output_types=set(), + output_includes=set(), + ios_output_types=set(), + ios_output_includes=set(), + android_output_types=set(), + android_output_includes=set(), + ): + BaseRule.__init__(self, context) + self._output_types = output_types + self._output_includes = output_includes + self._ios_output_types = ios_output_types + self._ios_output_includes = ios_output_includes + self._android_output_types = android_output_types + self._android_output_includes = android_output_includes + + def triggered_on_file(self): + pass + + def get_output_types(self): + result = set() + if self.triggered_on_file(): + result.update(self._output_types) + + if self.context.is_ios: + result.update(self._ios_output_types) + + if self.context.is_android: + result.update(self._android_output_types) + + return result + + def get_output_includes(self): + result = set() + + if self.triggered_on_file(): + result.update(self._output_includes) + + if self.context.is_ios: + result.update(self._ios_output_includes) + + if self.context.is_android: + result.update(self._android_output_includes) + + return result + + +class OrRule(CheckRule): + def __init__(self, check_rules, *args, **kwargs): + CheckRule.__init__(self, *args, **kwargs) + self._rules = check_rules + + def triggered_on_file(self): + return any((rule.triggered_on_file() for rule in self._rules)) + + +class AndRule(CheckRule): + def __init__(self, check_rules, *args, **kwargs): + CheckRule.__init__(self, *args, **kwargs) + self._rules = check_rules + + def triggered_on_file(self): + return all((rule.triggered_on_file() for rule in self._rules)) + + +class RegExpRule(CheckRule): + def __init__(self, reg_exp_string, *args, **kwargs): + CheckRule.__init__(self, *args, **kwargs) + self._reg_exp = re.compile(reg_exp_string) + self._reg_exp_found_file = False + + def start_file(self, file_path): + CheckRule.start_file(self, file_path) + self._reg_exp_found_file = False + + def process_line(self, line): + CheckRule.process_line(self, line) + if self._reg_exp_found_file: + return + + if self._reg_exp.search(line) is not None: + self._reg_exp_found_file = True + + def triggered_on_file(self): + return self._reg_exp_found_file + + +class ProtoRule(BaseRule): + def __init__(self, context): + BaseRule.__init__(self, context) + self._file_has_non_custom_proto = False + self._currently_custom_proto = False + self._currently_based_on = False + self._running_line = '' + self._custom_proto_headers = set() + self._proto_files = set() + + self._custom_proto_reg_exp = re.compile(RegExp.CUSTOM_PROTO_HEADER) + self._based_on_proto_start_reg_exp = re.compile(RegExp.BASED_ON_PROTO_START) + self._based_on_proto_reg_exp = re.compile(RegExp.BASED_ON_PROTO) + + def start_file(self, file_path): + BaseRule.start_file(self, file_path) + self._currently_custom_proto = False + self._file_has_non_custom_proto = False + self._currently_based_on = False + self._running_line = '' + + def process_line(self, line): + BaseRule.process_line(self, line) + proto_header = _extract_by_regexp(line, self._custom_proto_reg_exp) + if proto_header: + self._custom_proto_headers.add(proto_header) + self._currently_based_on = False + self._running_line = '' + + self._currently_custom_proto = True + return + + if self._based_on_proto_start_reg_exp.search(line) is not None: + self._currently_based_on = True + self._running_line = '' + + if self._currently_based_on: + self._running_line += '\n' + line + proto_file = _extract_by_regexp(self._running_line, self._based_on_proto_reg_exp) + if proto_file: + self._currently_based_on = False + self._running_line = '' + self._proto_files.add(proto_file) + + if self._currently_custom_proto: + self._currently_custom_proto = False + else: + self._file_has_non_custom_proto = True + + def get_output_types(self): + if self._file_has_non_custom_proto: + return {OutputType.PROTOCONV_HEADER, OutputType.PROTOCONV_SOURCE} + return set() + + def get_output_includes(self): + result = set() + result.update(self._custom_proto_headers) + result.update((proto_file.split('.')[0] + '.pb.h' for proto_file in self._proto_files)) + + if self._file_has_non_custom_proto: + result.update({'vector'}) + + return result + + +class StructImplementationRule(BaseRule): + def __init__(self, context): + BaseRule.__init__(self, context) + self._file_has_non_custom_struct = False + self._custom_cpp_headers = set() + self._currently_custom_struct = False + + self._custom_cpp_header_reg_exp = re.compile(RegExp.CUSTOM_CPP_HEADER) + self._struct_reg_exp = re.compile(RegExp.STRUCT) + + def start_file(self, file_path): + BaseRule.start_file(self, file_path) + self._currently_custom_struct = False + self._file_has_non_custom_struct = False + + def process_line(self, line): + BaseRule.process_line(self, line) + + cpp_header = _extract_by_regexp(line, self._custom_cpp_header_reg_exp) + if cpp_header: + self._custom_cpp_headers.add(cpp_header) + self._currently_custom_struct = True + return + + if not self._file_has_non_custom_struct: + if self._struct_reg_exp.search(line) is not None: + if self._currently_custom_struct: + self._currently_custom_struct = False + else: + self._file_has_non_custom_struct = True + + def get_output_types(self): + result = set() + if self._file_has_non_custom_struct: + result.add(OutputType.STRUCT_SOURCE) + if self.context.is_ios: + result.add(OutputType.IOS_BINDING_SOURCE) + + return result + + def get_output_includes(self): + return self._custom_cpp_headers + + +class IdlFileProcessor: + def __init__(self, unit, frameworks, file_paths): + self._context = ProcessContext(unit, frameworks, file_paths) + self._resolved_idl_dir = unit.resolve(unit.resolve_arc_path(unit.path())) + self._outputs = set() + self._output_includes = set() + + self._rules = set() + + self._rules.add(ObjcInfixRule(self._context)) + self._rules.add(DefaultRule(self._context)) + self._rules.add(ImportRule(self._context)) + self._rules.add(ProtoRule(self._context)) + self._rules.add(StructImplementationRule(self._context)) + + view_delegate_rule = self._create_reg_exp_rule( + RegExp.VIEW_DELEGATE, output_includes={self._context.runtime_include('view/view_delegate.h')} + ) + + weak_interface_rule = self._create_or_rule( + rules={ + self._create_reg_exp_rule(RegExp.WEAK_INTERFACE), + view_delegate_rule, + }, + output_includes={'boost/any.hpp', 'memory', self._context.runtime_include('platform_holder.h')}, + ) + + strong_interface_rule = self._create_or_rule( + rules={ + self._create_reg_exp_rule(RegExp.STRONG_INTERFACE), + self._create_reg_exp_rule(RegExp.NATIVE_LISTENER), + } + ) + + non_static_interface_rule = self._create_or_rule( + rules={self._create_reg_exp_rule(RegExp.SHARED_INTERFACE), strong_interface_rule, weak_interface_rule}, + ios_output_types={OutputType.IOS_PRIVATE_HEADER}, + ) + + # interface rule + self._create_or_rule( + rules={self._create_reg_exp_rule(RegExp.STATIC_INTERFACE), non_static_interface_rule}, + ios_output_types={OutputType.IOS_BINDING_SOURCE}, + android_output_types={OutputType.ANDROID_SOURCE}, + ios_output_includes={'memory'}, + ) + + bridged_struct_rule = self._create_reg_exp_rule( + RegExp.BRIDGED_STRUCT, + output_includes={'memory', self._context.runtime_include('bindings/platform.h')}, + android_output_includes={self._context.runtime_include('bindings/android/internal/new_serialization.h')}, + ) + + # struct rule + self._create_or_rule( + rules={self._create_reg_exp_rule(RegExp.LITE_STRUCT), bridged_struct_rule}, + ios_output_types={OutputType.IOS_PRIVATE_HEADER}, + android_output_types={OutputType.ANDROID_HEADER, OutputType.ANDROID_SOURCE}, + ios_output_includes={self._context.runtime_objc_import('NativeObject.h')}, + ) + + lambda_listener_rule = self._create_reg_exp_rule( + RegExp.LAMBDA_LISTENER, + output_includes={'functional'}, + android_output_includes={self._context.runtime_include('verify_and_run.h')}, + ios_output_includes={self._context.runtime_include('verify_and_run.h')}, + ) + + # listener rule + self._create_or_rule( + rules={ + self._create_reg_exp_rule(RegExp.PLATFORM_INTERFACE), + self._create_reg_exp_rule(RegExp.LISTENER), + lambda_listener_rule, + }, + ios_output_types={OutputType.IOS_PRIVATE_HEADER, OutputType.IOS_BINDING_SOURCE}, + android_output_types={OutputType.ANDROID_HEADER, OutputType.ANDROID_SOURCE}, + output_includes={'memory'}, + android_output_includes={'string', self._context.runtime_include('verify_and_run.h')}, + ios_output_includes={self._context.runtime_include('verify_and_run.h')}, + ) + + if self._context.unit.enabled("MAPS_MOBILE_USE_STD_VARIANT"): + variant_header = 'variant' + variant_serialization_header = self.context.runtime_include('serialization/variant.hpp') + else: + variant_header = 'boost/variant.hpp' + variant_serialization_header = 'boost/serialization/variant.hpp' + + variant_rule = self._create_reg_exp_rule( + RegExp.VARIANT, + ios_output_types={OutputType.IOS_PRIVATE_HEADER, OutputType.IOS_BINDING_SOURCE}, + output_includes={variant_header, 'boost/variant/recursive_wrapper.hpp'}, + ios_output_includes={ + self._context.runtime_include('bindings/ios/to_platform_fwd.h'), + self._context.runtime_include('bindings/ios/to_native_fwd.h'), + 'type_traits', + }, + ) + + optional_rule = self._create_reg_exp_rule(RegExp.OPTIONAL, output_includes={'optional'}) + # int64 rule + self._create_reg_exp_rule(RegExp.INT_64, output_includes={'cstdint'}) + + string_rule = self._create_reg_exp_rule( + RegExp.STRING, output_includes={'string', self._context.runtime_include('bindings/platform.h')} + ) + + point_rule = self._create_reg_exp_rule( + RegExp.POINT, + output_includes={'Eigen/Geometry', self._context.runtime_include('bindings/point_traits.h')}, + android_output_includes={ + self._context.runtime_include('bindings/android/point_to_native.h'), + self._context.runtime_include('bindings/android/point_to_platform.h'), + }, + ios_output_includes={ + self._context.runtime_include('bindings/ios/point_to_native.h'), + self._context.runtime_include('bindings/ios/point_to_platform.h'), + 'UIKit/UIKit.h', + }, + ) + + bytes_rule = self._create_reg_exp_rule(RegExp.BYTES, output_includes={'cstdint', 'vector'}) + + vector_rule = self._create_reg_exp_rule( + RegExp.VECTOR, + output_includes={'memory', self._context.runtime_include('bindings/platform.h')}, + android_output_includes={ + self._context.runtime_include('bindings/android/vector_to_native.h'), + self._context.runtime_include('bindings/android/vector_to_platform.h'), + }, + ios_output_includes={ + self._context.runtime_include('bindings/ios/vector_to_native.h'), + self._context.runtime_include('bindings/ios/vector_to_platform.h'), + }, + ) + + dictionary_rule = self._create_reg_exp_rule( + RegExp.DICTIONARY, + output_includes={'memory', self._context.runtime_include('bindings/platform.h')}, + android_output_includes={ + self._context.runtime_include('bindings/android/dictionary_to_native.h'), + self._context.runtime_include('bindings/android/dictionary_to_platform.h'), + }, + ios_output_includes={ + self._context.runtime_include('bindings/ios/dictionary_to_native.h'), + self._context.runtime_include('bindings/ios/dictionary_to_platform.h'), + }, + ) + + # any rule + self._create_reg_exp_rule( + RegExp.ANY, output_includes={'boost/any.hpp', self._context.runtime_include('bindings/platform.h')} + ) + + time_rule = self._create_reg_exp_rule(RegExp.TIME, output_includes={self._context.runtime_include('time.h')}) + + # bitmap rule + self._create_reg_exp_rule( + RegExp.BITMAP, + output_includes={self._context.runtime_include('platform_bitmap.h')}, + ios_output_includes={'UIKit/UIKit.h'}, + ) + + # image_provider rule + self._create_reg_exp_rule( + RegExp.IMAGE_PROVIDER, + output_includes={self._context.runtime_include('image/image_provider.h')}, + android_output_includes={self._context.runtime_include('image/android/image_provider_binding.h')}, + ios_output_includes={self._context.runtime_include('image/ios/image_provider_binding.h'), 'UIKit/UIKit.h'}, + ) + + # animated_image_provider rule + self._create_reg_exp_rule( + RegExp.ANIMATED_IMAGE_PROVIDER, + output_includes={self._context.runtime_include('image/animated_image_provider.h')}, + android_output_includes={self._context.runtime_include('image/android/animated_image_provider_binding.h')}, + ios_output_includes={ + self._context.runtime_include('image/ios/animated_image_provider_binding.h'), + self._context.runtime_objc_import('AnimatedImageProvider.h'), + }, + ) + + # model_provider and animated_model_provider rules + model_provider_rule = self._create_reg_exp_rule( + RegExp.MODEL_PROVIDER, output_includes={self._context.runtime_include('model/model_provider.h')} + ) + animated_model_provider_rule = self._create_reg_exp_rule( + RegExp.ANIMATED_MODEL_PROVIDER, + output_includes={self._context.runtime_include('model/animated_model_provider.h')}, + ) + if not unit.enabled('MAPS_MOBILE_PUBLIC_API'): + self._create_or_rule( + rules={model_provider_rule}, + android_output_includes={self._context.runtime_include('model/android/model_provider_binding.h')}, + ios_output_includes={ + self._context.runtime_include('model/ios/model_provider_binding.h'), + self._context.runtime_objc_import('ModelProvider.h'), + }, + ) + + self._create_or_rule( + rules={animated_model_provider_rule}, + android_output_includes={ + self._context.runtime_include('model/android/animated_model_provider_binding.h') + }, + ios_output_includes={ + self._context.runtime_include('model/ios/animated_model_provider_binding.h'), + self._context.runtime_objc_import('AnimatedModelProvider.h'), + }, + ) + + # view_provider rule + self._create_reg_exp_rule( + RegExp.VIEW_PROVIDER, + output_includes={self._context.runtime_include('ui_view/view_provider.h')}, + android_output_includes={self._context.runtime_include('ui_view/android/view_provider_binding.h')}, + ios_output_includes={ + self._context.runtime_include('ui_view/ios/view_provider_binding.h'), + self._context.runtime_objc_import('ViewProvider.h'), + }, + ) + + # platform_view rule + self._create_reg_exp_rule( + RegExp.PLATFORM_VIEW, + output_includes={self._context.runtime_include('view/platform_view.h')}, + android_output_includes={self._context.runtime_include('view/android/to_native.h')}, + ios_output_includes={ + self._context.runtime_include('view/ios/to_native.h'), + self._context.runtime_objc_import('PlatformView_Fwd.h'), + self._context.runtime_objc_import('PlatformView_Private.h'), + }, + ) + + # type_dictionary rule + self._create_reg_exp_rule( + RegExp.TYPE_DICTIONARY, + output_includes={ + self._context.runtime_include('bindings/platform.h'), + self._context.runtime_include('bindings/type_dictionary.h'), + }, + android_output_includes={ + self._context.runtime_include('bindings/android/type_dictionary_to_native.h'), + self._context.runtime_include('bindings/android/type_dictionary_to_platform.h'), + }, + ios_output_includes={ + self._context.runtime_include('bindings/ios/type_dictionary_to_native.h'), + self._context.runtime_include('bindings/ios/type_dictionary_to_platform.h'), + self._context.runtime_objc_import('TypeDictionary.h'), + }, + ) + + # color rule + self._create_reg_exp_rule( + RegExp.COLOR, + output_includes={self._context.runtime_include('color.h')}, + ios_output_includes={'UIKit/UIKit.h'}, + ) + + # error rule + self._create_reg_exp_rule( + RegExp.ERROR, + android_output_includes={self._context.runtime_include('android/make_error.h')}, + ios_output_includes={self._context.runtime_include('ios/make_error.h')}, + ) + + navi_serialization = self._context.unit.enabled('MAPS_MOBILE_ENABLE_NAVI_SERIALIZATION') + if navi_serialization: + serialization_rule = self._create_or_rule( + {self._create_reg_exp_rule(RegExp.SERIALIZABLE), self._create_reg_exp_rule(RegExp.NAVI_SERIALIZABLE)} + ) + else: + serialization_rule = self._create_reg_exp_rule(RegExp.SERIALIZABLE) + + self._serialization_rule = self._create_or_rule( + rules={serialization_rule, variant_rule}, + output_includes={ + 'boost/serialization/nvp.hpp', + self._context.runtime_include('serialization/ptr.h'), + self._context.runtime_include('bindings/internal/archive_generator.h'), + self._context.runtime_include('bindings/internal/archive_reader.h'), + self._context.runtime_include('bindings/internal/archive_writer.h'), + }, + ) + + # point serialization rule + self._create_serialization_rule(point_rule, self._context.runtime_include('serialization/math.h')) + + # optional serialization rule + self._create_serialization_rule( + optional_rule, self._context.runtime_include('serialization/serialization_std.h') + ) + + # bridged struct serialization rule + self._create_serialization_rule(bridged_struct_rule, self._context.runtime_include('bindings/export.h')) + + # time serialization rule + self._create_serialization_rule(time_rule, self._context.runtime_include('serialization/chrono.h')) + + # string serialization rule + self._create_serialization_rule(string_rule, 'boost/serialization/string.hpp') + + # bytes serialization rule + self._create_serialization_rule(bytes_rule, 'boost/serialization/vector.hpp') + + # vector serialization rule + self._create_serialization_rule(vector_rule, 'boost/serialization/vector.hpp') + + # dictionary serialization rule + self._create_serialization_rule(dictionary_rule, 'boost/serialization/map.hpp') + + # variant serialization rule + self._create_serialization_rule(variant_rule, variant_serialization_header) + + def _create_reg_exp_rule(self, reg_exp_string, *args, **kwargs): + rule = RegExpRule(reg_exp_string, self._context, *args, **kwargs) + self._rules.add(rule) + return rule + + def _create_or_rule(self, rules, *args, **kwargs): + rule = OrRule(rules, self._context, *args, **kwargs) + self._rules.add(rule) + return rule + + def _create_and_rule(self, rules, *args, **kwargs): + rule = AndRule(rules, self._context, *args, **kwargs) + self._rules.add(rule) + return rule + + def _create_serialization_rule(self, additional_rule, serialization_header): + rule = self._create_and_rule( + rules={self._serialization_rule, additional_rule}, output_includes={serialization_header} + ) + return rule + + def _split_and_remove_comments(self, input_file): + inside_comment = False + for line in input_file: + current_line = line + + if inside_comment: + closing_index = current_line.find("*/") + if closing_index == -1: + continue + current_line = current_line[closing_index + 2 :] + inside_comment = False + + oneline_index = current_line.find("//") + if oneline_index != -1: + current_line = current_line[:oneline_index] + + opening_index = current_line.find("/*") + while opening_index != -1: + closing_index = current_line.find("*/") + if closing_index == -1: + current_line = current_line[:opening_index] + inside_comment = True + else: + current_line = current_line[:opening_index] + current_line[closing_index + 2 :] + opening_index = current_line.find("/*") + + yield current_line + + def _should_add_to_output_includes(self, output_type): + return self._context.add_generated_output_includes and self._context.output_name_generator.is_header( + output_type + ) + + def process_files(self): + for file_path in self._context.file_paths: + self._context.output_name_generator = OutputNameGenerator(file_path, self._context.frameworks) + + for rule in self._rules: + rule.start_file(file_path) + + with open(os.path.join(self._resolved_idl_dir, file_path), 'r') as f: + for line in self._split_and_remove_comments(f): + for rule in self._rules: + rule.process_line(line) + + for rule in self._rules: + for output_type in rule.get_output_types(): + self._outputs.add(self._context.output_name_generator.generate_path(output_type)) + + if self._should_add_to_output_includes(output_type): + self._output_includes.add(self._context.output_name_generator.generate_name(output_type)) + + self._output_includes.update(rule.get_output_includes()) + + def get_outputs(self): + return self._outputs + + def get_output_includes(self): + return self._output_includes + + +def process_files(unit, file_paths): + frameworks = _read_frameworks(unit) + + processor = IdlFileProcessor(unit, frameworks, file_paths) + processor.process_files() + outputs = processor.get_outputs() + output_includes = processor.get_output_includes() + + return (outputs, output_includes) + + +def on_process_maps_mobile_idl(unit, *args): + if not unit.enabled('MAPSMOBI_BUILD_TARGET'): + return + + idl_files, kwds = sort_by_keywords({'FILTER': -1, 'FILTER_OUT': -1, 'GLOBAL_OUTPUTS': 0}, args) + + if len(idl_files) == 0: + return + + is_global_outputs = 'GLOBAL_OUTPUTS' in kwds + filter_in = kwds.get('FILTER', []) + filter_out = kwds.get('FILTER_OUT', []) + + is_java_idl = unit.enabled("JAVA_IDL") + is_dart_idl = unit.enabled("MAPKIT_DART_IDL") + + outputs, output_includes = process_files(unit, idl_files) + + if filter_in: + outputs = [o for o in outputs if any([o.endswith(x) for x in filter_in])] + if filter_out: + outputs = [o for o in outputs if not any([o.endswith(x) for x in filter_out])] + + if len(outputs) == 0 and not is_java_idl: + return + + base_out_dir = '${{ARCADIA_BUILD_ROOT}}/{}'.format(unit.path()[3:]) + unit.onaddincl(['GLOBAL', '{}/include'.format(base_out_dir)]) + + include_dirs = _get_appended_values(unit, 'MAPKIT_IDL_INCLUDES') + include_dirs.append(unit.path()[3:]) + + framework_dir = unit.get('MAPKIT_IDL_FRAMEWORK') + + extra_inputs = unit.get('MAPKIT_IDL_EXTRA_INPUTS').split(' ') + + idl_args = [] + idl_args.extend(['OUT_BASE_ROOT', base_out_dir, 'OUT_ANDROID_ROOT', base_out_dir, 'OUT_IOS_ROOT', base_out_dir]) + + if framework_dir: + idl_args.extend(['FRAMEWORK_DIRS', framework_dir]) + + if include_dirs: + idl_args.append('INCLUDES') + idl_args.extend(include_dirs) + + idl_args.append('IN') + idl_args.extend(idl_files) + if extra_inputs: + idl_args.extend(extra_inputs) + + sorted_outputs = sorted(outputs) + dart_outputs = [] + global_outputs = [] + + if is_dart_idl: + dart_outputs = [x for x in sorted_outputs if x.endswith('.dart')] + if is_global_outputs: + global_outputs = [x for x in sorted_outputs if x.endswith('.cpp')] + elif not is_java_idl: + if is_global_outputs: + global_outputs = [x for x in sorted_outputs if x.endswith(('.cpp', '.m', '.mm'))] + + if not is_java_idl: + non_global_outputs = sorted(set(outputs) - set(global_outputs) - set(dart_outputs)) + + if global_outputs: + idl_args.append('OUT_NOAUTO') + idl_args.extend(global_outputs + dart_outputs) + unit.onglobal_srcs(global_outputs) + + if non_global_outputs: + idl_args.append('OUT') + idl_args.extend(non_global_outputs) + + idl_args.append('OUTPUT_INCLUDES') + idl_args.extend(sorted(set(output_includes) - set(outputs))) + + idl_args.append('IDLS') + idl_args.extend(idl_files) + + if is_java_idl: + unit.on_run_idl_tool_java(idl_args) + else: + unit.on_run_idl_tool(idl_args) diff --git a/build/plugins/mx_archive.py b/build/plugins/mx_archive.py new file mode 100644 index 0000000000..76b785fcde --- /dev/null +++ b/build/plugins/mx_archive.py @@ -0,0 +1,17 @@ +def onmx_formulas(unit, *args): + """ + @usage: MX_FORMULAS(BinFiles...) # deprecated, matrixnet + Create MatrixNet formulas archive + """ + + def iter_infos(): + for a in args: + if a.endswith('.bin'): + unit.on_mx_bin_to_info([a]) + yield a[:-3] + 'info' + else: + yield a + + infos = list(iter_infos()) + unit.onarchive_asm(['NAME', 'MxFormulas'] + infos) + unit.on_mx_gen_table(infos) diff --git a/build/plugins/nots.py b/build/plugins/nots.py new file mode 100644 index 0000000000..5157afc526 --- /dev/null +++ b/build/plugins/nots.py @@ -0,0 +1,509 @@ +import fnmatch +import os +import re + +import ymake +import ytest +from _common import get_norm_unit_path, rootrel_arc_src, to_yesno + + +class PluginLogger(object): + def __init__(self): + self.unit = None + self.prefix = "" + + def reset(self, unit, prefix=""): + self.unit = unit + self.prefix = prefix + + def get_state(self): + return (self.unit, self.prefix) + + def _stringify_messages(self, messages): + parts = [] + for m in messages: + if m is None: + parts.append("None") + else: + parts.append(m if isinstance(m, str) else repr(m)) + + # cyan color (code 36) for messages + return "\033[0;32m{}\033[0;49m \033[0;36m{}\033[0;49m".format(self.prefix, " ".join(parts)) + + def info(self, *messages): + if self.unit: + self.unit.message(["INFO", self._stringify_messages(messages)]) + + def warn(self, *messages): + if self.unit: + self.unit.message(["WARN", self._stringify_messages(messages)]) + + def error(self, *messages): + if self.unit: + self.unit.message(["ERROR", self._stringify_messages(messages)]) + + def print_vars(self, *variables): + if self.unit: + values = ["{}={}".format(v, self.unit.get(v)) for v in variables] + self.info(values) + + +logger = PluginLogger() + + +def _with_report_configure_error(fn): + def _wrapper(*args, **kwargs): + last_state = logger.get_state() + unit = args[0] + logger.reset(unit if unit.get("TS_LOG") == "yes" else None, fn.__name__) + try: + fn(*args, **kwargs) + except Exception as exc: + ymake.report_configure_error(str(exc)) + if unit.get("TS_RAISE") == "yes": + raise + else: + unit.message(["WARN", "Configure error is reported. Add -DTS_RAISE to see actual exception"]) + finally: + logger.reset(*last_state) + + return _wrapper + + +def _canonize_resource_name(name): + # type: (str) -> str + return re.sub(r"\W+", "_", name).strip("_").upper() + + +def _build_cmd_input_paths(paths, hide=False): + # type: (list[str], bool) -> str + return " ".join(["${{input{}:\"{}\"}}".format(";hide" if hide else "", p) for p in paths]) + + +def _create_pm(unit): + from lib.nots.package_manager import manager + + sources_path = unit.path() + module_path = unit.get("MODDIR") + if unit.get("TS_TEST_FOR"): + sources_path = unit.get("TS_TEST_FOR_DIR") + module_path = unit.get("TS_TEST_FOR_PATH") + + return manager( + sources_path=unit.resolve(sources_path), + build_root="$B", + build_path=unit.path().replace("$S", "$B", 1), + contribs_path=unit.get("NPM_CONTRIBS_PATH"), + nodejs_bin_path=None, + script_path=None, + module_path=module_path, + ) + + +def _create_erm_json(unit): + from lib.nots.erm_json_lite import ErmJsonLite + + erm_packages_path = unit.get("ERM_PACKAGES_PATH") + path = unit.resolve(unit.resolve_arc_path(erm_packages_path)) + + return ErmJsonLite.load(path) + + +@_with_report_configure_error +def on_from_npm_lockfiles(unit, *args): + pm = _create_pm(unit) + lf_paths = [] + + for lf_path in args: + abs_lf_path = unit.resolve(unit.resolve_arc_path(lf_path)) + if abs_lf_path: + lf_paths.append(abs_lf_path) + elif unit.get("TS_STRICT_FROM_NPM_LOCKFILES") == "yes": + ymake.report_configure_error("lockfile not found: {}".format(lf_path)) + + for pkg in pm.extract_packages_meta_from_lockfiles(lf_paths): + unit.on_from_npm([pkg.name, pkg.version, pkg.sky_id, pkg.integrity, pkg.integrity_algorithm, pkg.tarball_path]) + + +@_with_report_configure_error +def on_peerdir_ts_resource(unit, *resources): + pm = _create_pm(unit) + pj = pm.load_package_json_from_dir(pm.sources_path) + erm_json = _create_erm_json(unit) + dirs = [] + + nodejs_version = _select_matching_version(erm_json, "nodejs", pj.get_nodejs_version()) + + for tool in resources: + if tool == "nodejs": + dirs.append(os.path.join("build", "platform", tool, str(nodejs_version))) + elif erm_json.is_resource_multiplatform(tool): + v = _select_matching_version(erm_json, tool, pj.get_dep_specifier(tool)) + sb_resources = [ + sbr for sbr in erm_json.get_sb_resources(tool, v) if sbr.get("nodejs") == nodejs_version.major + ] + nodejs_dir = "NODEJS_{}".format(nodejs_version.major) + if len(sb_resources) > 0: + dirs.append(os.path.join("build", "external_resources", tool, str(v), nodejs_dir)) + else: + unit.message(["WARN", "Missing {}@{} for {}".format(tool, str(v), nodejs_dir)]) + else: + v = _select_matching_version(erm_json, tool, pj.get_dep_specifier(tool)) + dirs.append(os.path.join("build", "external_resources", tool, str(v))) + + unit.onpeerdir(dirs) + + +@_with_report_configure_error +def on_ts_configure(unit, tsconfig_path): + from lib.nots.package_manager.base import PackageJson + from lib.nots.package_manager.base.utils import build_pj_path + from lib.nots.typescript import TsConfig + + abs_tsconfig_path = unit.resolve(unit.resolve_arc_path(tsconfig_path)) + if not abs_tsconfig_path: + raise Exception("tsconfig not found: {}".format(tsconfig_path)) + + tsconfig = TsConfig.load(abs_tsconfig_path) + cur_dir = unit.get("TS_TEST_FOR_PATH") if unit.get("TS_TEST_FOR") else unit.get("MODDIR") + pj_path = build_pj_path(unit.resolve(unit.resolve_arc_path(cur_dir))) + dep_paths = PackageJson.load(pj_path).get_dep_paths_by_names() + config_files = tsconfig.inline_extend(dep_paths) + + mod_dir = unit.get("MODDIR") + config_files = _resolve_module_files(unit, mod_dir, config_files) + tsconfig.validate() + + unit.set(["TS_CONFIG_FILES", _build_cmd_input_paths(config_files, hide=True)]) + unit.set(["TS_CONFIG_ROOT_DIR", tsconfig.compiler_option("rootDir")]) + unit.set(["TS_CONFIG_OUT_DIR", tsconfig.compiler_option("outDir")]) + unit.set(["TS_CONFIG_SOURCE_MAP", to_yesno(tsconfig.compiler_option("sourceMap"))]) + unit.set(["TS_CONFIG_DECLARATION", to_yesno(tsconfig.compiler_option("declaration"))]) + unit.set(["TS_CONFIG_DECLARATION_MAP", to_yesno(tsconfig.compiler_option("declarationMap"))]) + unit.set(["TS_CONFIG_PRESERVE_JSX", to_yesno(tsconfig.compiler_option("jsx") == "preserve")]) + + _setup_eslint(unit) + + +@_with_report_configure_error +def on_set_external_resources(unit): + _setup_external_resources(unit) + + +def _get_ts_test_data_dirs(unit): + return list( + set( + [ + os.path.dirname(rootrel_arc_src(p, unit)) + for p in (ytest.get_values_list(unit, "_TS_TEST_DATA_VALUE") or []) + ] + ) + ) + + +def _resolve_config_path(unit, test_runner, rel_to): + config_path = ( + unit.get("ESLINT_CONFIG_PATH") if test_runner == "eslint" else unit.get("TS_TEST_CONFIG_PATH") + ) + arc_config_path = unit.resolve_arc_path(config_path) + abs_config_path = unit.resolve(arc_config_path) + if not abs_config_path: + raise Exception("{} config not found: {}".format(test_runner, config_path)) + + unit.onsrcs([arc_config_path]) + abs_rel_to = unit.resolve(unit.resolve_arc_path(unit.get(rel_to))) + return os.path.relpath(abs_config_path, start=abs_rel_to) + + +def _is_tests_enabled(unit): + if unit.get("TIDY") == "yes": + return False + + return True + + +def _get_test_runner_handlers(): + return { + "jest": _add_jest_ts_test, + "hermione": _add_hermione_ts_test, + } + + +def _add_jest_ts_test(unit, test_runner, test_files, deps, test_record): + test_record.update( + { + "CONFIG-PATH": _resolve_config_path(unit, test_runner, rel_to="TS_TEST_FOR_PATH"), + } + ) + _add_test(unit, test_runner, test_files, deps, test_record) + + +def _add_hermione_ts_test(unit, test_runner, test_files, deps, test_record): + unit.on_ts_configure(unit.get("TS_CONFIG_PATH")) + test_tags = list(set(["ya:fat", "ya:external"] + ytest.get_values_list(unit, "TEST_TAGS_VALUE"))) + test_requirements = list(set(["network:full"] + ytest.get_values_list(unit, "TEST_REQUIREMENTS_VALUE"))) + + test_record.update( + { + "TS-ROOT-DIR": unit.get("TS_CONFIG_ROOT_DIR"), + "TS-OUT-DIR": unit.get("TS_CONFIG_OUT_DIR"), + "SIZE": "LARGE", + "TAG": ytest.serialize_list(test_tags), + "REQUIREMENTS": ytest.serialize_list(test_requirements), + "CONFIG-PATH": _resolve_config_path(unit, test_runner, rel_to="MODDIR"), + } + ) + + if not len(test_record["TS-TEST-DATA-DIRS"]): + _add_default_hermione_test_data(unit, test_record) + + _add_test(unit, test_runner, test_files, deps, test_record) + + +def _add_default_hermione_test_data(unit, test_record): + mod_dir = unit.get("MODDIR") + root_dir = test_record["TS-ROOT-DIR"] + out_dir = test_record["TS-OUT-DIR"] + test_for_path = test_record["TS-TEST-FOR-PATH"] + + abs_root_dir = os.path.normpath(os.path.join(unit.resolve(unit.path()), root_dir)) + file_paths = _find_file_paths(abs_root_dir, "**/screens/*/*/*.png") + file_dirs = [os.path.dirname(f) for f in file_paths] + + rename_from, rename_to = [ + os.path.relpath(os.path.normpath(os.path.join(mod_dir, d)), test_for_path) for d in [root_dir, out_dir] + ] + + test_record.update( + { + "TS-TEST-DATA-DIRS": ytest.serialize_list(_resolve_module_files(unit, mod_dir, file_dirs)), + "TS-TEST-DATA-DIRS-RENAME": "{}:{}".format(rename_from, rename_to), + } + ) + + +def _setup_eslint(unit): + if not _is_tests_enabled(unit): + return + + if unit.get("_NO_LINT_VALUE") == "none": + return + + lint_files = ytest.get_values_list(unit, "_TS_LINT_SRCS_VALUE") + if not lint_files: + return + + mod_dir = unit.get("MODDIR") + lint_files = _resolve_module_files(unit, mod_dir, lint_files) + deps = _create_pm(unit).get_peers_from_package_json() + test_record = { + "ESLINT-ROOT-VAR-NAME": unit.get("ESLINT-ROOT-VAR-NAME"), + "ESLINT_CONFIG_PATH": _resolve_config_path(unit, "eslint", rel_to="MODDIR"), + } + + _add_test(unit, "eslint", lint_files, deps, test_record, mod_dir) + + +def _resolve_module_files(unit, mod_dir, file_paths): + resolved_files = [] + + for path in file_paths: + resolved = rootrel_arc_src(path, unit) + if resolved.startswith(mod_dir): + mod_dir_with_sep_len = len(mod_dir) + 1 + resolved = resolved[mod_dir_with_sep_len:] + resolved_files.append(resolved) + + return resolved_files + + +def _find_file_paths(abs_path, pattern): + file_paths = [] + _, ext = os.path.splitext(pattern) + + for root, _, filenames in os.walk(abs_path): + if not any(f.endswith(ext) for f in filenames): + continue + + abs_file_paths = [os.path.join(root, f) for f in filenames] + + for file_path in fnmatch.filter(abs_file_paths, pattern): + file_paths.append(file_path) + + return file_paths + + +def _add_test(unit, test_type, test_files, deps=None, test_record=None, test_cwd=None): + from lib.nots.package_manager import constants + + def sort_uniq(text): + return list(sorted(set(text))) + + if deps: + unit.ondepends(sort_uniq(deps)) + + test_dir = get_norm_unit_path(unit) + full_test_record = { + "TEST-NAME": test_type.lower(), + "TEST-TIMEOUT": unit.get("TEST_TIMEOUT") or "", + "TEST-ENV": ytest.prepare_env(unit.get("TEST_ENV_VALUE")), + "TESTED-PROJECT-NAME": os.path.splitext(unit.filename())[0], + "TEST-RECIPES": ytest.prepare_recipes(unit.get("TEST_RECIPES_VALUE")), + "SCRIPT-REL-PATH": test_type, + "SOURCE-FOLDER-PATH": test_dir, + "BUILD-FOLDER-PATH": test_dir, + "BINARY-PATH": os.path.join(test_dir, unit.filename()), + "SPLIT-FACTOR": unit.get("TEST_SPLIT_FACTOR") or "", + "FORK-MODE": unit.get("TEST_FORK_MODE") or "", + "SIZE": unit.get("TEST_SIZE_NAME") or "", + "TEST-FILES": ytest.serialize_list(test_files), + "TEST-CWD": test_cwd or "", + "TAG": ytest.serialize_list(ytest.get_values_list(unit, "TEST_TAGS_VALUE")), + "REQUIREMENTS": ytest.serialize_list(ytest.get_values_list(unit, "TEST_REQUIREMENTS_VALUE")), + "NODEJS-ROOT-VAR-NAME": unit.get("NODEJS-ROOT-VAR-NAME"), + "NODE-MODULES-BUNDLE-FILENAME": constants.NODE_MODULES_WORKSPACE_BUNDLE_FILENAME, + "CUSTOM-DEPENDENCIES": " ".join(sort_uniq((deps or []) + ytest.get_values_list(unit, "TEST_DEPENDS_VALUE"))), + } + + for k, v in full_test_record.items(): + if not isinstance(v, str): + unit.message(["WARN", k]) + + if test_record: + full_test_record.update(test_record) + + data = ytest.dump_test(unit, full_test_record) + if data: + unit.set_property(["DART_DATA", data]) + + +def _setup_external_resources(unit): + pm = _create_pm(unit) + pj = pm.load_package_json_from_dir(pm.sources_path) + erm_json = _create_erm_json(unit) + + nodejs_version = _select_matching_version(erm_json, "nodejs", pj.get_nodejs_version()) + + # Add NodeJS vars + _set_resource_vars(unit, erm_json, "nodejs", pj.get_nodejs_version()) + + # Add NPM-packages vars + for tool in erm_json.list_npm_packages(): + version_range = pj.get_dep_specifier(tool) + _set_resource_vars(unit, erm_json, tool, version_range, nodejs_version.major) + + +def _set_resource_vars(unit, erm_json, resource_name, version_range, nodejs_major=None): + # type: (any, ErmJsonLite, str, str|None, int|None) -> None + + # example: Version(12, 18, 4) | Version(7, 0, 4) + version = _select_matching_version(erm_json, resource_name, version_range) + + # example: hermione -> HERMIONE, super-package -> SUPER_PACKAGE + canon_resource_name = _canonize_resource_name(resource_name) + + # example: NODEJS_12_18_4 | HERMIONE_7_0_4_NODEJS_18 + version_str = str(version).replace(".", "_") + yamake_resource_name = "{}_{}".format(canon_resource_name, version_str) + + if erm_json.is_resource_multiplatform(resource_name): + yamake_resource_name += "_NODEJS_{}".format(nodejs_major) + + yamake_resource_var = "{}_RESOURCE_GLOBAL".format(yamake_resource_name) + + unit.set(["{}_ROOT".format(canon_resource_name), "${}".format(yamake_resource_var)]) + unit.set(["{}-ROOT-VAR-NAME".format(canon_resource_name), yamake_resource_var]) + + +def _select_matching_version(erm_json, resource_name, range_str): + # type: (ErmJsonLite, str, str) -> Version + try: + version = erm_json.select_version_of(resource_name, range_str) + if version: + return version + + raise ValueError("There is no allowed version to satisfy this range: '{}'".format(range_str)) + except Exception as error: + toolchain_versions = erm_json.get_versions_of(erm_json.get_resource(resource_name)) + + raise Exception( + "Requested {} version range '{}' could not be satisfied. \n" + "Please use a range that would include one of the following: {}. \n" + "For further details please visit the link: {} \nOriginal error: {} \n".format( + resource_name, + range_str, + map(str, toolchain_versions), + "https://nda.ya.ru/t/ulU4f5Ru5egzHV", + str(error), + ) + ) + + +@_with_report_configure_error +def on_node_modules_configure(unit): + pm = _create_pm(unit) + pj = pm.load_package_json_from_dir(pm.sources_path) + + if pj.has_dependencies(): + unit.onpeerdir(pm.get_local_peers_from_package_json()) + ins, outs = pm.calc_node_modules_inouts() + unit.on_set_node_modules_ins_outs(["IN"] + sorted(ins) + ["OUT"] + sorted(outs)) + else: + # default "noop" command + unit.set(["_NODE_MODULES_CMD", "$TOUCH_UNIT"]) + + +@_with_report_configure_error +def on_set_node_modules_bundle_as_output(unit): + pm = _create_pm(unit) + pj = pm.load_package_json_from_dir(pm.sources_path) + if pj.has_dependencies(): + unit.set(["NODE_MODULES_BUNDLE_AS_OUTPUT", '${output;hide:"workspace_node_modules.tar"}']) + + +@_with_report_configure_error +def on_ts_test_for_configure(unit, test_runner, default_config): + if not _is_tests_enabled(unit): + return + + for_mod_path = unit.get("TS_TEST_FOR_PATH") + unit.onpeerdir([for_mod_path]) + unit.on_setup_extract_node_modules_recipe([for_mod_path]) + + unit.set(["TS_TEST_NM", os.path.join(("$B"), for_mod_path, "node_modules.tar")]) + + config_path = unit.get("TS_TEST_CONFIG_PATH") + if not config_path: + config_path = os.path.join(for_mod_path, default_config) + unit.set(["TS_TEST_CONFIG_PATH", config_path]) + + test_record = _add_ts_resources_to_test_record(unit, { + "TS-TEST-FOR-PATH": for_mod_path, + "TS-TEST-DATA-DIRS": ytest.serialize_list(_get_ts_test_data_dirs(unit)), + "TS-TEST-DATA-DIRS-RENAME": unit.get("_TS_TEST_DATA_DIRS_RENAME_VALUE"), + }) + + test_files = ytest.get_values_list(unit, "_TS_TEST_SRCS_VALUE") + test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files) + if not test_files: + ymake.report_configure_error("No tests found") + return + + deps = _create_pm(unit).get_peers_from_package_json() + add_ts_test = _get_test_runner_handlers()[test_runner] + add_ts_test(unit, test_runner, test_files, deps, test_record) + +@_with_report_configure_error +def on_set_ts_test_for_vars(unit, for_mod): + unit.set(["TS_TEST_FOR", "yes"]) + unit.set(["TS_TEST_FOR_DIR", unit.resolve_arc_path(for_mod)]) + unit.set(["TS_TEST_FOR_PATH", rootrel_arc_src(for_mod, unit)]) + +def _add_ts_resources_to_test_record(unit, test_record): + erm_json = _create_erm_json(unit) + for tool in erm_json.list_npm_packages(): + tool_resource_label = "{}-ROOT-VAR-NAME".format(tool.upper()) + tool_resource_value = unit.get(tool_resource_label) + if tool_resource_value: + test_record[tool_resource_label] = tool_resource_value + return test_record diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py new file mode 100644 index 0000000000..ae30fd897e --- /dev/null +++ b/build/plugins/pybuild.py @@ -0,0 +1,742 @@ +import os +import collections +from hashlib import md5 + +import ymake +from _common import stripext, rootrel_arc_src, listid, pathid, lazy, get_no_lint_value + + +YA_IDE_VENV_VAR = 'YA_IDE_VENV' +PY_NAMESPACE_PREFIX = 'py/namespace' +BUILTIN_PROTO = 'builtin_proto' +DEFAULT_FLAKE8_FILE_PROCESSING_TIME = "1.5" # in seconds + + +def is_arc_src(src, unit): + return ( + src.startswith('${ARCADIA_ROOT}/') + or src.startswith('${CURDIR}/') + or unit.resolve_arc_path(src).startswith('$S/') + ) + + +def is_extended_source_search_enabled(path, unit): + if not is_arc_src(path, unit): + return False + if unit.get('NO_EXTENDED_SOURCE_SEARCH') == 'yes': + return False + return True + + +def to_build_root(path, unit): + if is_arc_src(path, unit): + return '${ARCADIA_BUILD_ROOT}/' + rootrel_arc_src(path, unit) + return path + + +def uniq_suffix(path, unit): + upath = unit.path() + if '/' not in path: + return '' + return '.{}'.format(pathid(upath)[:4]) + + +def pb2_arg(suf, path, mod, unit): + return '{path}__int__{suf}={mod}{modsuf}'.format( + path=stripext(to_build_root(path, unit)), suf=suf, mod=mod, modsuf=stripext(suf) + ) + + +def proto_arg(path, mod, unit): + return '{}.proto={}'.format(stripext(to_build_root(path, unit)), mod) + + +def pb_cc_arg(suf, path, unit): + return '{}{suf}'.format(stripext(to_build_root(path, unit)), suf=suf) + + +def ev_cc_arg(path, unit): + return '{}.ev.pb.cc'.format(stripext(to_build_root(path, unit))) + + +def ev_arg(path, mod, unit): + return '{}__int___ev_pb2.py={}_ev_pb2'.format(stripext(to_build_root(path, unit)), mod) + + +def mangle(name): + if '.' not in name: + return name + return ''.join('{}{}'.format(len(s), s) for s in name.split('.')) + + +def parse_pyx_includes(filename, path, source_root, seen=None): + normpath = lambda *x: os.path.normpath(os.path.join(*x)) + + abs_path = normpath(source_root, filename) + seen = seen or set() + if abs_path in seen: + return + seen.add(abs_path) + + if not os.path.exists(abs_path): + # File might be missing, because it might be generated + return + + with open(abs_path, 'rb') as f: + # Don't parse cimports and etc - irrelevant for cython, it's linker work + includes = ymake.parse_cython_includes(f.read()) + + abs_dirname = os.path.dirname(abs_path) + # All includes are relative to the file which include + path_dirname = os.path.dirname(path) + file_dirname = os.path.dirname(filename) + + for incfile in includes: + abs_path = normpath(abs_dirname, incfile) + if os.path.exists(abs_path): + incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile) + yield (incname, incpath) + # search for includes in the included files + for e in parse_pyx_includes(incname, incpath, source_root, seen): + yield e + else: + # There might be arcadia root or cython relative include. + # Don't treat such file as missing, because there must be PEERDIR on py_library + # which contains it. + for path in [ + source_root, + source_root + "/contrib/tools/cython/Cython/Includes", + ]: + if os.path.exists(normpath(path, incfile)): + break + else: + ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path)) + + +def has_pyx(args): + return any(arg.endswith('.pyx') for arg in args) + + +def get_srcdir(path, unit): + return rootrel_arc_src(path, unit)[: -len(path)].rstrip('/') + + +def add_python_lint_checks(unit, py_ver, files): + @lazy + def get_resolved_files(): + resolved_files = [] + for path in files: + resolved = unit.resolve_arc_path([path]) + if resolved.startswith('$S'): # path was resolved as source file. + resolved_files.append(resolved) + return resolved_files + + no_lint_value = get_no_lint_value(unit) + if no_lint_value == "none": + + no_lint_allowed_paths = ( + "contrib/", + "devtools/", + "junk/", + # temporary allowed, TODO: remove + "taxi/uservices/", + "travel/", + "market/report/lite/", # MARKETOUT-38662, deadline: 2021-08-12 + "passport/backend/oauth/", # PASSP-35982 + "testenv/", # CI-3229 + ) + + upath = unit.path()[3:] + + if not upath.startswith(no_lint_allowed_paths): + ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths)) + + if files and no_lint_value not in ("none", "none_internal"): + resolved_files = get_resolved_files() + if resolved_files: + flake8_cfg = 'build/config/tests/flake8/flake8.conf' + resource = "build/external_resources/flake8_py{}".format(py_ver) + lint_name = "py{}_flake8".format(py_ver) + params = [lint_name, "tools/flake8_linter/flake8_linter"] + params += ["FILES"] + resolved_files + params += ["GLOBAL_RESOURCES", resource] + params += ["CONFIGS", flake8_cfg, "build/rules/flake8/migrations.yaml"] + params += [ + "FILE_PROCESSING_TIME", + unit.get("FLAKE8_FILE_PROCESSING_TIME") or DEFAULT_FLAKE8_FILE_PROCESSING_TIME, + ] + extra_params = [] + if unit.get("DISABLE_FLAKE8_MIGRATIONS") == "yes": + extra_params.append("DISABLE_FLAKE8_MIGRATIONS=yes") + if extra_params: + params += ["EXTRA_PARAMS"] + extra_params + unit.on_add_linter_check(params) + + if files and unit.get('STYLE_PYTHON_VALUE') == 'yes' and is_py3(unit): + resolved_files = get_resolved_files() + if resolved_files: + black_cfg = ( + unit.get('STYLE_PYTHON_PYPROJECT_VALUE') or 'devtools/ya/handlers/style/python_style_config.toml' + ) + params = ['black', 'tools/black_linter/black_linter'] + params += ['FILES'] + resolved_files + params += ['CONFIGS', black_cfg] + unit.on_add_linter_check(params) + + +def is_py3(unit): + return unit.get("PYTHON3") == "yes" + + +def on_py_program(unit, *args): + py_program(unit, is_py3(unit)) + + +def py_program(unit, py3): + """ + Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/#modulpyprogramimakrospymain + """ + if py3: + peers = ['library/python/runtime_py3/main'] + if unit.get('PYTHON_SQLITE3') != 'no': + peers.append('contrib/tools/python3/src/Modules/_sqlite') + else: + peers = ['library/python/runtime/main'] + if unit.get('PYTHON_SQLITE3') != 'no': + peers.append('contrib/tools/python/src/Modules/_sqlite') + unit.onpeerdir(peers) + if unit.get('MODULE_TYPE') == 'PROGRAM': # can not check DLL + unit.onadd_check_py_imports() + + +def onpy_srcs(unit, *args): + """ + @usage PY_SRCS({| CYTHONIZE_PY} {| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} Files...) + + PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive. + The main disadvantage is the lack of IDE support; There is also no readline yet. + The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files. + At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path. + You can compile .py files as Cython sources with CYTHONIZE_PY directive (Use carefully, as build can get too slow). However, with it you won't have profiling info by default. To enable it, add "# cython: profile=True" line to the beginning of every cythonized source. + By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C. + Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them + __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh. + + Example of library declaration with PY_SRCS(): + PY2_LIBRARY(mymodule) + PY_SRCS(a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg) + END() + + PY_REGISTER honors Python2 and Python3 differences and adjusts itself to Python version of a current module + Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipylibrarypy3libraryimakrospysrcs + """ + # Each file arg must either be a path, or "${...}/buildpath=modname", where + # "${...}/buildpath" part will be used as a file source in a future macro, + # and "modname" will be used as a module name. + + upath = unit.path()[3:] + py3 = is_py3(unit) + py_main_only = unit.get('PROCESS_PY_MAIN_ONLY') + with_py = not unit.get('PYBUILD_NO_PY') + with_pyc = not unit.get('PYBUILD_NO_PYC') + in_proto_library = unit.get('PY_PROTO') or unit.get('PY3_PROTO') + venv = unit.get(YA_IDE_VENV_VAR) + need_gazetteer_peerdir = False + trim = 0 + + if ( + not upath.startswith('contrib/tools/python') + and not upath.startswith('library/python/runtime') + and unit.get('NO_PYTHON_INCLS') != 'yes' + ): + unit.onpeerdir(['contrib/libs/python']) + + unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') + if unit_needs_main: + py_program(unit, py3) + + py_namespace_value = unit.get('PY_NAMESPACE_VALUE') + if py_namespace_value == ".": + ns = "" + else: + ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.' + + cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' + cythonize_py = False + optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' + + cython_directives = [] + if cython_coverage: + cython_directives += ['-X', 'linetrace=True'] + + pyxs_c = [] + pyxs_c_h = [] + pyxs_c_api_h = [] + pyxs_cpp = [] + pyxs_cpp_h = [] + pyxs = pyxs_cpp + swigs_c = [] + swigs_cpp = [] + swigs = swigs_cpp + pys = [] + protos = [] + evs = [] + fbss = [] + py_namespaces = {} + + dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR') + dump_output = None + if dump_dir: + import thread + + pid = os.getpid() + tid = thread.get_ident() + dump_name = '{}-{}.dump'.format(pid, tid) + dump_output = open(os.path.join(dump_dir, dump_name), 'a') + + args = iter(args) + for arg in args: + # Namespace directives. + if arg == 'TOP_LEVEL': + ns = '' + elif arg == 'NAMESPACE': + ns = next(args) + '.' + # Cython directives. + elif arg == 'CYTHON_C': + pyxs = pyxs_c + elif arg == 'CYTHON_C_H': + pyxs = pyxs_c_h + elif arg == 'CYTHON_C_API_H': + pyxs = pyxs_c_api_h + elif arg == 'CYTHON_CPP': + pyxs = pyxs_cpp + elif arg == 'CYTHON_CPP_H': + pyxs = pyxs_cpp_h + elif arg == 'CYTHON_DIRECTIVE': + cython_directives += ['-X', next(args)] + elif arg == 'CYTHONIZE_PY': + cythonize_py = True + # SWIG. + elif arg == 'SWIG_C': + swigs = swigs_c + elif arg == 'SWIG_CPP': + swigs = swigs_cpp + # Unsupported but legal PROTO_LIBRARY arguments. + elif arg == 'GLOBAL' or not in_proto_library and arg.endswith('.gztproto'): + pass + elif arg == '_MR': + # GLOB support: convert arcadia-root-relative paths to module-relative + # srcs are assumed to start with ${ARCADIA_ROOT} + trim = len(unit.path()) + 14 + # Sources. + else: + main_mod = arg == 'MAIN' + if main_mod: + arg = next(args) + + if '=' in arg: + main_py = False + path, mod = arg.split('=', 1) + else: + if trim: + arg = arg[trim:] + if arg.endswith('.gztproto'): + need_gazetteer_peerdir = True + path = '{}.proto'.format(arg[:-9]) + else: + path = arg + main_py = path == '__main__.py' or path.endswith('/__main__.py') + if not py3 and unit_needs_main and main_py: + mod = '__main__' + else: + if arg.startswith('../'): + ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg)) + if arg.startswith('/'): + ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg)) + continue + mod_name = stripext(arg).replace('/', '.') + if py3 and path.endswith('.py') and is_extended_source_search_enabled(path, unit): + # Dig out real path from the file path. Unit.path is not enough because of SRCDIR and ADDINCL + root_rel_path = rootrel_arc_src(path, unit) + mod_root_path = root_rel_path[: -(len(path) + 1)] + py_namespaces.setdefault(mod_root_path, set()).add(ns if ns else '.') + mod = ns + mod_name + + if main_mod: + py_main(unit, mod + ":main") + elif py3 and unit_needs_main and main_py: + py_main(unit, mod) + + if py_main_only: + continue + + if py3 and mod == '__main__': + ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM') + + pathmod = (path, mod) + + if dump_output is not None: + dump_output.write( + '{path}\t{module}\t{py3}\n'.format( + path=rootrel_arc_src(path, unit), module=mod, py3=1 if py3 else 0 + ) + ) + + if path.endswith('.py'): + if cythonize_py: + pyxs.append(pathmod) + else: + pys.append(pathmod) + elif path.endswith('.pyx'): + pyxs.append(pathmod) + elif path.endswith('.proto'): + protos.append(pathmod) + elif path.endswith('.ev'): + evs.append(pathmod) + elif path.endswith('.swg'): + swigs.append(pathmod) + # Allow pyi files in PY_SRCS for autocomplete in IDE, but skip it during building + elif path.endswith('.pyi'): + pass + elif path.endswith('.fbs'): + fbss.append(pathmod) + else: + ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path)) + + if dump_output is not None: + dump_output.close() + + if pyxs: + py_files2res = set() + cpp_files2res = set() + # Include map stores files which were included in the processing pyx file, + # to be able to find source code of the included file inside generated file + # for currently processing pyx file. + include_map = collections.defaultdict(set) + + if cython_coverage: + + def process_pyx(filename, path, out_suffix, with_ext): + # skip generated files + if not is_arc_src(path, unit): + return + # source file + py_files2res.add((filename, path)) + # generated + if with_ext is None: + cpp_files2res.add( + (os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix) + ) + else: + cpp_files2res.add((filename + with_ext + out_suffix, path + with_ext + out_suffix)) + # used includes + for entry in parse_pyx_includes(filename, path, unit.resolve('$S')): + py_files2res.add(entry) + include_arc_rel = entry[0] + include_map[filename].add(include_arc_rel) + + else: + + def process_pyx(filename, path, out_suffix, with_ext): + pass + + obj_suff = unit.get('OBJ_SUF') + assert obj_suff is not None + for pyxs, cython, out_suffix, with_ext in [ + (pyxs_c, unit.on_buildwith_cython_c_dep, ".c", obj_suff), + (pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", None), + (pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", None), + (pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", obj_suff), + (pyxs_cpp_h, unit.on_buildwith_cython_cpp_h, ".cpp", None), + ]: + for path, mod in pyxs: + filename = rootrel_arc_src(path, unit) + cython_args = [path] + + dep = path + if path.endswith('.py'): + pxd = '/'.join(mod.split('.')) + '.pxd' + if unit.resolve_arc_path(pxd): + dep = pxd + cython_args.append(dep) + + cython_args += [ + '--module-name', + mod, + '--init-suffix', + mangle(mod), + '--source-root', + '${ARCADIA_ROOT}', + # set arcadia root relative __file__ for generated modules + '-X', + 'set_initial_path={}'.format(filename), + ] + cython_directives + + cython(cython_args) + py_register(unit, mod, py3) + process_pyx(filename, path, out_suffix, with_ext) + + if cythonize_py: + # Lint checks are not added for cythonized files by default, so we must add it here + # as we are doing for regular pys. + _23 = 3 if py3 else 2 + add_python_lint_checks( + unit, + _23, + [path for path, mod in pyxs if path.endswith(".py")] + + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split(), + ) + + if py_files2res: + # Compile original and generated sources into target for proper cython coverage calculation + for files2res in (py_files2res, cpp_files2res): + unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)]) + + if include_map: + data = [] + prefix = 'resfs/cython/include' + for line in sorted( + '{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) + for filename, files in include_map.iteritems() + ): + data += ['-', line] + unit.onresource(data) + + for swigs, on_swig_python in [ + (swigs_c, unit.on_swig_python_c), + (swigs_cpp, unit.on_swig_python_cpp), + ]: + for path, mod in swigs: + # Make output prefix basename match swig module name. + prefix = path[: path.rfind('/') + 1] + mod.rsplit('.', 1)[-1] + swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix) + on_swig_python([path, prefix]) + onpy_register(unit, mod + '_swg') + onpy_srcs(unit, swg_py + '=' + mod) + + if pys: + pys_seen = set() + pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))} + if pys_dups: + ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups)) + + res = [] + + if py3: + mod_list_md5 = md5() + for path, mod in pys: + mod_list_md5.update(mod) + if not (venv and is_extended_source_search_enabled(path, unit)): + dest = 'py/' + mod.replace('.', '/') + '.py' + if with_py: + res += ['DEST', dest, path] + if with_pyc: + root_rel_path = rootrel_arc_src(path, unit) + dst = path + uniq_suffix(path, unit) + unit.on_py3_compile_bytecode([root_rel_path + '-', path, dst]) + res += ['DEST', dest + '.yapyc3', dst + '.yapyc3'] + + if py_namespaces: + # Note: Add md5 to key to prevent key collision if two or more PY_SRCS() used in the same ya.make + ns_res = [] + for path, ns in sorted(py_namespaces.items()): + key = '{}/{}/{}'.format(PY_NAMESPACE_PREFIX, mod_list_md5.hexdigest(), path) + namespaces = ':'.join(sorted(ns)) + ns_res += ['-', '{}="{}"'.format(key, namespaces)] + unit.onresource(ns_res) + + unit.onresource_files(res) + add_python_lint_checks( + unit, 3, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split() + ) + else: + for path, mod in pys: + root_rel_path = rootrel_arc_src(path, unit) + if with_py: + key = '/py_modules/' + mod + res += [ + path, + key, + '-', + 'resfs/src/{}={}'.format(key, root_rel_path), + ] + if with_pyc: + src = unit.resolve_arc_path(path) or path + dst = path + uniq_suffix(path, unit) + unit.on_py_compile_bytecode([root_rel_path + '-', src, dst]) + res += [dst + '.yapyc', '/py_code/' + mod] + + unit.onresource(res) + add_python_lint_checks( + unit, 2, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split() + ) + + use_vanilla_protoc = unit.get('USE_VANILLA_PROTOC') == 'yes' + if use_vanilla_protoc: + cpp_runtime_path = 'contrib/libs/protobuf_std' + py_runtime_path = 'contrib/python/protobuf_std' + builtin_proto_path = cpp_runtime_path + '/' + BUILTIN_PROTO + else: + cpp_runtime_path = 'contrib/libs/protobuf' + py_runtime_path = 'contrib/python/protobuf' + builtin_proto_path = cpp_runtime_path + '/' + BUILTIN_PROTO + + if protos: + if not upath.startswith(py_runtime_path) and not upath.startswith(builtin_proto_path): + unit.onpeerdir(py_runtime_path) + + unit.onpeerdir(unit.get("PY_PROTO_DEPS").split()) + + proto_paths = [path for path, mod in protos] + unit.on_generate_py_protos_internal(proto_paths) + unit.onpy_srcs( + [ + pb2_arg(py_suf, path, mod, unit) + for path, mod in protos + for py_suf in unit.get("PY_PROTO_SUFFIXES").split() + ] + ) + + if optimize_proto and need_gazetteer_peerdir: + unit.onpeerdir(['kernel/gazetteer/proto']) + + if evs: + unit.onpeerdir([cpp_runtime_path]) + unit.on_generate_py_evs_internal([path for path, mod in evs]) + unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) + + if fbss: + unit.onpeerdir(unit.get('_PY_FBS_DEPS').split()) + pysrc_base_name = listid(fbss) + unit.onfbs_to_pysrc([pysrc_base_name] + [path for path, _ in fbss]) + unit.onsrcs(['GLOBAL', '{}.fbs.pysrc'.format(pysrc_base_name)]) + + +def _check_test_srcs(*args): + used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"} + if used: + param = list(used)[0] + ymake.report_configure_error( + 'in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param) + ) + + +def ontest_srcs(unit, *args): + _check_test_srcs(*args) + if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no': + unit.onpy_srcs(["NAMESPACE", "__tests__"] + list(args)) + + +def onpy_doctests(unit, *args): + """ + @usage PY_DOCTESTS(Packages...) + + Add to the test doctests for specified Python packages + The packages should be part of a test (listed as sources of the test or its PEERDIRs). + """ + if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no': + unit.onresource(['-', 'PY_DOCTEST_PACKAGES="{}"'.format(' '.join(args))]) + + +def py_register(unit, func, py3): + if py3: + unit.on_py3_register([func]) + else: + unit.on_py_register([func]) + + +def onpy_register(unit, *args): + """ + @usage: PY_REGISTER([package.]module_name) + + Python knows about which built-ins can be imported, due to their registration in the Assembly or at the start of the interpreter. + All modules from the sources listed in PY_SRCS() are registered automatically. + To register the modules from the sources in the SRCS(), you need to use PY_REGISTER(). + + PY_REGISTER(module_name) initializes module globally via call to initmodule_name() + PY_REGISTER(package.module_name) initializes module in the specified package + It renames its init function with CFLAGS(-Dinitmodule_name=init7package11module_name) + or CFLAGS(-DPyInit_module_name=PyInit_7package11module_name) + + Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#makrospyregister + """ + + py3 = is_py3(unit) + + for name in args: + assert '=' not in name, name + py_register(unit, name, py3) + if '.' in name: + shortname = name.rsplit('.', 1)[1] + if py3: + unit.oncflags(['-DPyInit_{}=PyInit_{}'.format(shortname, mangle(name))]) + else: + unit.oncflags(['-Dinit{}=init{}'.format(shortname, mangle(name))]) + + +def py_main(unit, arg): + if unit.get('IGNORE_PY_MAIN'): + return + unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') + if unit_needs_main: + py_program(unit, is_py3(unit)) + unit.onresource(['-', 'PY_MAIN={}'.format(arg)]) + + +def onpy_main(unit, arg): + """ + @usage: PY_MAIN(package.module[:func]) + + Specifies the module or function from which to start executing a python program + + Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipyprogrampy3programimakrospymain + """ + + arg = arg.replace('/', '.') + + if ':' not in arg: + arg += ':main' + + py_main(unit, arg) + + +def onpy_constructor(unit, arg): + """ + @usage: PY_CONSTRUCTOR(package.module[:func]) + + Specifies the module or function which will be started before python's main() + init() is expected in the target module if no function is specified + Can be considered as __attribute__((constructor)) for python + """ + if ':' not in arg: + arg = arg + '=init' + else: + arg[arg.index(':')] = '=' + unit.onresource(['-', 'py/constructors/{}'.format(arg)]) + + +def onpy_enums_serialization(unit, *args): + ns = '' + args = iter(args) + for arg in args: + # Namespace directives. + if arg == 'NAMESPACE': + ns = next(args) + else: + unit.on_py_enum_serialization_to_json(arg) + unit.on_py_enum_serialization_to_py(arg) + filename = arg.rsplit('.', 1)[0] + '.py' + if len(ns) != 0: + onpy_srcs(unit, 'NAMESPACE', ns, filename) + else: + onpy_srcs(unit, filename) + + +def oncpp_enums_serialization(unit, *args): + args = iter(args) + for arg in args: + # Namespace directives. + if arg == 'NAMESPACE': + next(args) + else: + unit.ongenerate_enum_serialization_with_header(arg) diff --git a/build/plugins/res.py b/build/plugins/res.py new file mode 100644 index 0000000000..31f9d77c10 --- /dev/null +++ b/build/plugins/res.py @@ -0,0 +1,138 @@ +import json +import os +import six +from _common import iterpair, listid, pathid, rootrel_arc_src, tobuilddir, filter_out_by_keyword +import ymake + + +def split(lst, limit): + # paths are specified with replaceable prefix + # real length is unknown at the moment, that why we use root_lenght + # as a rough estimation + root_lenght = 200 + filepath = None + lenght = 0 + bucket = [] + + for item in lst: + if filepath: + lenght += root_lenght + len(filepath) + len(item) + if lenght > limit and bucket: + yield bucket + bucket = [] + lenght = 0 + + bucket.append(filepath) + bucket.append(item) + filepath = None + else: + filepath = item + + if bucket: + yield bucket + + +def remove_prefix(text, prefix): + if text.startswith(prefix): + return text[len(prefix) :] + return text + + +def onresource_files(unit, *args): + """ + @usage: RESOURCE_FILES([PREFIX {prefix}] [STRIP prefix_to_strip] {path}) + + This macro expands into + RESOURCE(DONT_PARSE {path} resfs/file/{prefix}{path} + - resfs/src/resfs/file/{prefix}{remove_prefix(path, prefix_to_strip)}={rootrel_arc_src(path)} + ) + + resfs/src/{key} stores a source root (or build root) relative path of the + source of the value of the {key} resource. + + resfs/file/{key} stores any value whose source was a file on a filesystem. + resfs/src/resfs/file/{key} must store its path. + + DONT_PARSE disables parsing for source code files (determined by extension) + Please don't abuse: use separate DONT_PARSE macro call only for files subject to parsing + + This form is for use from other plugins: + RESOURCE_FILES([DEST {dest}] {path}) expands into RESOURCE({path} resfs/file/{dest}) + + @see: https://wiki.yandex-team.ru/devtools/commandsandvars/resourcefiles/ + """ + prefix = '' + prefix_to_strip = None + dest = None + res = [] + + if args and not unit.enabled('_GO_MODULE'): + # GO_RESOURCE currently doesn't support DONT_PARSE + res.append('DONT_PARSE') + + args = iter(args) + for arg in args: + if arg == 'PREFIX': + prefix, dest = next(args), None + elif arg == 'DEST': + dest, prefix = next(args), None + elif arg == 'STRIP': + prefix_to_strip = next(args) + else: + path = arg + key = 'resfs/file/' + ( + dest or (prefix + (path if not prefix_to_strip else remove_prefix(path, prefix_to_strip))) + ) + src = 'resfs/src/{}={}'.format(key, rootrel_arc_src(path, unit)) + res += ['-', src, path, key] + + if unit.enabled('_GO_MODULE'): + unit.on_go_resource(res) + else: + unit.onresource(res) + + +def onall_resource_files(unit, *args): + # This is only validation, actual work is done in ymake.core.conf implementation + for arg in args: + if '*' in arg or '?' in arg: + ymake.report_configure_error('Wildcards in [[imp]]ALL_RESOURCE_FILES[[rst]] are not allowed') + + +def on_ya_conf_json(unit, conf_file): + conf_abs_path = unit.resolve('$S/' + conf_file) + if not os.path.exists(conf_abs_path): + ymake.report_configure_error('File "{}" not found'.format(conf_abs_path)) + return + + # conf_file should be passed to the RESOURCE_FILES macro without path. + # To resolve it later by name only we must add it's path to SRCDIR(). + conf_dir = os.path.dirname(conf_file) + if conf_dir: + unit.onsrcdir(conf_dir) + unit.onresource_files(os.path.basename(conf_file)) + + with open(conf_abs_path) as f: + conf = json.load(f) + formulas = set() + for bottle_name, bottle in conf['bottles'].items(): + formula = bottle['formula'] + if isinstance(formula, six.string_types): + if formula.startswith(conf_dir): + abs_path = unit.resolve('$S/' + formula) + if os.path.exists(abs_path): + formulas.add(formula) + else: + ymake.report_configure_error( + 'File "{}" (referenced from bottle "{}" in "{}") is not found'.format( + abs_path, bottle_name, conf_abs_path + ) + ) + else: + ymake.report_configure_error( + 'File "{}" (referenced from bottle "{}" in "{}") must be located in "{}" file tree'.format( + formula, bottle_name, conf_file, conf_dir + ) + ) + for formula in formulas: + unit.onresource_files(formula) diff --git a/build/plugins/sandbox_registry.py b/build/plugins/sandbox_registry.py new file mode 100644 index 0000000000..dc1be399b3 --- /dev/null +++ b/build/plugins/sandbox_registry.py @@ -0,0 +1,21 @@ +import os + +import ymake + + +def onregister_sandbox_import(unit, *args): + args = iter(args) + for path in args: + path = os.path.normpath(path) + source = unit.resolve_arc_path(path) + abs_source = unit.resolve(source) + if not os.path.exists(abs_source): + ymake.report_configure_error('REGISTER_SANDBOX_IMPORT: File or directory {} does not exists'.format(path)) + splited_path = path.split(os.sep) + l, r = 0, len(splited_path) + if splited_path[-1] == "__init__.py": + r -= 1 + if not splited_path[0]: + l += 1 + path = ".".join(splited_path[l:r]) + unit.onresource(["-", "{}.{}={}".format("SANDBOX_TASK_REGISTRY", path, path)]) diff --git a/build/plugins/split_codegen.py b/build/plugins/split_codegen.py new file mode 100644 index 0000000000..4c3fe5cf2a --- /dev/null +++ b/build/plugins/split_codegen.py @@ -0,0 +1,43 @@ +from _common import sort_by_keywords + +# This hard-coded many times in CppParts in various codegens +_DEFAULT_CPP_PARTS = 20 +# See TCodegenParams::MethodStream usage in factor codegen +_ADDITIONAL_STREAM_COUNT = 5 + + +def onsplit_codegen(unit, *args): + ''' + @usage: SPLIT_CODEGEN(tool prefix opts... [OUT_NUM num] [OUTPUT_INCLUDES output_includes...]) + + Generator of a certain number of parts of the .cpp file + one header .h file from .in + + Supports keywords: + 1. OUT_NUM <the number of generated Prefix.N.cpp default 25 (N varies from 0 to 24)> + 2. OUTPUT_INCLUDES <path to files that will be included in generalnyj of macro files> + ''' + keywords = {"OUT_NUM": 1} + flat_args, spec_args = sort_by_keywords(keywords, args) + + num_outputs = _DEFAULT_CPP_PARTS + _ADDITIONAL_STREAM_COUNT + if "OUT_NUM" in spec_args: + num_outputs = int(spec_args["OUT_NUM"][0]) + + tool = flat_args[0] + prefix = flat_args[1] + + cmd = [tool, prefix, 'OUT'] + for num in range(num_outputs): + cmd.append('{}.{}.cpp'.format(prefix, num)) + + cpp_parts = int(num_outputs) - _ADDITIONAL_STREAM_COUNT + cpp_parts_args = ['--cpp-parts', str(cpp_parts)] + + if len(flat_args) > 2: + if flat_args[2] != 'OUTPUT_INCLUDES': + cmd.append('OPTS') + cmd += cpp_parts_args + flat_args[2:] + else: + cmd += ['OPTS'] + cpp_parts_args + + unit.on_split_codegen_base(cmd) diff --git a/build/plugins/ssqls.py b/build/plugins/ssqls.py new file mode 100644 index 0000000000..35461851de --- /dev/null +++ b/build/plugins/ssqls.py @@ -0,0 +1,41 @@ +from os.path import splitext + +import ymake +from _common import resolve_includes + + +class SSQLSParser(object): + def __init__(self, path, unit): + s = unit.resolve_arc_path(path) + assert s.startswith('$S/') and s.endswith('.ssqls'), s + h = '$B/' + s[3:-6] + '.h' + + import xml.etree.cElementTree as ET + + try: + doc = ET.parse(path) + except ET.ParseError as e: + unit.message(['error', 'malformed XML {}: {}'.format(path, e)]) + doc = ET.Element('DbObject') + xmls, headers = self.parse_doc(doc) + self._includes = resolve_includes(unit, s, xmls) + self._induced = {'cpp': [h], 'h': resolve_includes(unit, h, headers)} + + @staticmethod + def parse_doc(doc): + paths = lambda nodes: filter(None, (e.get('path') for e in nodes)) + includes = doc.findall('include') + ancestors = paths(doc.findall('ancestors/ancestor')) + headers = [e.text.strip('<>""') for e in includes] + headers += [splitext(s)[0] + '.h' for s in ancestors] + return paths(includes) + ancestors, headers + + def includes(self): + return self._includes + + def induced_deps(self): + return self._induced + + +def init(): + ymake.addparser('ssqls', SSQLSParser) diff --git a/build/plugins/suppressions.py b/build/plugins/suppressions.py new file mode 100644 index 0000000000..db5037646d --- /dev/null +++ b/build/plugins/suppressions.py @@ -0,0 +1,24 @@ +def onsuppressions(unit, *args): + """ + SUPPRESSIONS() - allows to specify files with suppression notation which will be used by + address, leak or thread sanitizer runtime by default. + Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer + and tsan.supp for thread sanitizer suppressions respectively. + See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks + for details. + """ + import os + + valid = ("asan.supp", "tsan.supp", "lsan.supp") + + if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"): + for x in args: + if os.path.basename(x) not in valid: + unit.message( + [ + 'error', + "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid), + ] + ) + return + unit.onsrcs(["GLOBAL"] + list(args)) diff --git a/build/plugins/tests/fake_ymake.py b/build/plugins/tests/fake_ymake.py new file mode 100644 index 0000000000..a20d28525a --- /dev/null +++ b/build/plugins/tests/fake_ymake.py @@ -0,0 +1,2 @@ +def addparser(): + pass diff --git a/build/plugins/tests/test_code_generator.py b/build/plugins/tests/test_code_generator.py new file mode 100644 index 0000000000..7f3267a55e --- /dev/null +++ b/build/plugins/tests/test_code_generator.py @@ -0,0 +1,25 @@ +import sys +from build.plugins.tests import fake_ymake + +sys.modules['ymake'] = fake_ymake + +from build.plugins import code_generator + + +def test_include_parser(): + template_file = """ + @ from 'util/namespace.macro' import namespace, change_namespace, close_namespaces + @ import 'market/tools/code_generator/templates/serialization/json.macro' as json + @ import 'market/tools/code_generator/templates/serialization/request_parameters.macro' as rp + #include <sss/abcdefg.h> + #include<fff/asd> + #include "hhh/quququ.h" + """ + + includes, induced = code_generator.CodeGeneratorTemplateParser.parse_includes(template_file.split('\n')) + assert includes == [ + 'util/namespace.macro', + 'market/tools/code_generator/templates/serialization/json.macro', + 'market/tools/code_generator/templates/serialization/request_parameters.macro', + ] + assert induced == ['sss/abcdefg.h', 'fff/asd', 'hhh/quququ.h'] diff --git a/build/plugins/tests/test_common.py b/build/plugins/tests/test_common.py new file mode 100644 index 0000000000..e1780354f8 --- /dev/null +++ b/build/plugins/tests/test_common.py @@ -0,0 +1,49 @@ +import pytest + +import build.plugins._common as pc + + +def test_sort_by_keywords(): + keywords = {'KEY1': 2, 'KEY2': 0, 'KEY3': 1} + args = 'aaaa bbbb KEY2 KEY1 kkk10 kkk11 ccc ddd KEY3 kkk3 eee'.split() + flat, spec = pc.sort_by_keywords(keywords, args) + assert flat == ['aaaa', 'bbbb', 'ccc', 'ddd', 'eee'] + assert spec == {'KEY1': ['kkk10', 'kkk11'], 'KEY2': True, 'KEY3': ['kkk3']} + + keywords = {'KEY1': 0, 'KEY2': 4} + args = 'aaaa KEY2 eee'.split() + flat, spec = pc.sort_by_keywords(keywords, args) + assert flat == ['aaaa'] + assert spec == {'KEY2': ['eee']} + + keywords = {'KEY1': 2, 'KEY2': 2} + args = 'KEY1 k10 KEY2 k20 KEY1 k11 KEY2 k21 KEY1 k13'.split() + flat, spec = pc.sort_by_keywords(keywords, args) + assert flat == [] + assert spec == {'KEY1': ['k10', 'k11', 'k13'], 'KEY2': ['k20', 'k21']} + + +def test_filter_out_by_keyword(): + assert pc.filter_out_by_keyword([], 'A') == [] + assert pc.filter_out_by_keyword(['x'], 'A') == ['x'] + assert pc.filter_out_by_keyword(['x', 'A'], 'A') == ['x'] + assert pc.filter_out_by_keyword(['x', 'A', 'B'], 'A') == ['x'] + assert pc.filter_out_by_keyword(['x', 'A', 'B', 'y'], 'A') == ['x', 'y'] + assert pc.filter_out_by_keyword(['x', 'A', 'A', 'y'], 'A') == ['x', 'y'] + assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A'], 'A') == ['x'] + assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y'], 'A') == ['x', 'y'] + assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y', 'A'], 'A') == ['x', 'y'] + assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y', 'A', 'F', 'z'], 'A') == ['x', 'y', 'z'] + + +test_data = [ + [[1, 2, 3], 1, [[1], [2], [3]]], + [[1, 2, 3], 2, [[1, 2], [3]]], + [[1, 2, 3, 4], 2, [[1, 2], [3, 4]]], + [[1], 5, [[1]]], +] + + +@pytest.mark.parametrize('lst, chunk_size, expected', test_data, ids=[str(num + 1) for num in range(len(test_data))]) +def test_generate_chunks(lst, chunk_size, expected): + assert list(pc.generate_chunks(lst, chunk_size)) == expected diff --git a/build/plugins/tests/test_requirements.py b/build/plugins/tests/test_requirements.py new file mode 100644 index 0000000000..0c41fbaad0 --- /dev/null +++ b/build/plugins/tests/test_requirements.py @@ -0,0 +1,91 @@ +import pytest + +import build.plugins._requirements as requirements +import build.plugins.lib.test_const as consts + + +class TestRequirements(object): + @pytest.mark.parametrize('test_size', consts.TestSize.sizes()) + def test_cpu(self, test_size): + max_cpu = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu) + min_cpu = consts.TestRequirementsConstants.MinCpu + assert requirements.check_cpu(-1, test_size) + assert requirements.check_cpu(min_cpu - 1, test_size) + assert requirements.check_cpu("unknown", test_size) + assert not requirements.check_cpu(1, test_size) + assert not requirements.check_cpu(3, test_size) + assert requirements.check_cpu(1000, test_size) + if max_cpu != consts.TestRequirementsConstants.All: + assert requirements.check_cpu(max_cpu + 1, test_size) + assert requirements.check_cpu(max_cpu + 4, test_size) + assert requirements.check_cpu(consts.TestRequirementsConstants.All, test_size) + else: + assert not requirements.check_cpu(consts.TestRequirementsConstants.All, test_size) + + @pytest.mark.parametrize('test_size', consts.TestSize.sizes()) + def test_ram(self, test_size): + max_ram = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram) + min_ram = consts.TestRequirementsConstants.MinRam + assert requirements.check_ram(-1, test_size) + assert requirements.check_ram(min_ram - 1, test_size) + assert requirements.check_ram(max_ram + 1, test_size) + assert not requirements.check_ram(1, test_size) + assert not requirements.check_ram(4, test_size) + assert not requirements.check_ram(5, test_size) + assert not requirements.check_ram(32, consts.TestSize.Large) + assert requirements.check_ram(48, consts.TestSize.Large) + + assert not requirements.check_ram(1, test_size, is_kvm=True) + assert not requirements.check_ram(4, test_size, is_kvm=True) + assert not requirements.check_ram(16, test_size, is_kvm=True) + assert requirements.check_ram(32, test_size, is_kvm=True) + + @pytest.mark.parametrize('test_size', consts.TestSize.sizes()) + def test_ram_disk(self, test_size): + max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk) + min_ram_disk = consts.TestRequirementsConstants.MinRamDisk + assert requirements.check_ram_disk(-1, test_size) + assert requirements.check_ram_disk(min_ram_disk - 1, test_size) + assert requirements.check_ram_disk(max_ram_disk + 1, test_size) + assert requirements.check_ram_disk(33, test_size) + assert not requirements.check_ram_disk(32, test_size) + assert not requirements.check_ram_disk(1, test_size) + assert not requirements.check_ram_disk(4, test_size) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '0', test_size, False, True, False, False, False, 1 + ) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, False, False, 1 + ) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, True, True, False, False, False, 0 + ) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, False, False, False, False, 0 + ) + if test_size != consts.TestSize.Large: + assert requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, False, False, 0 + ) + assert requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, True, False, False, 0 + ) + assert requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, True, False, 0 + ) + assert requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, False, True, 0 + ) + else: + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, False, False, 0 + ) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, True, False, False, 0 + ) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, True, False, 0 + ) + assert not requirements.validate_ram_disk_requirement( + 'ram_disk', '1', test_size, False, True, False, False, True, 0 + ) diff --git a/build/plugins/tests/test_ssqls.py b/build/plugins/tests/test_ssqls.py new file mode 100644 index 0000000000..e5daae3a57 --- /dev/null +++ b/build/plugins/tests/test_ssqls.py @@ -0,0 +1,28 @@ +import sys +from build.plugins.tests import fake_ymake + +sys.modules['ymake'] = fake_ymake + +import xml.etree.cElementTree as ET + +from build.plugins import ssqls + + +example = '''\ +<?xml version="1.0" encoding="utf-8"?> +<DbObject> + <include path="A.ssqls"><a.h></include> + <include>"b.h"</include> + + <ancestors> + <ancestor path="C.ssqls"/> + </ancestors> +</DbObject> +''' + + +def test_include_parser(): + doc = ET.fromstring(example) + xmls, headers = ssqls.SSQLSParser.parse_doc(doc) + assert headers == ['a.h', 'b.h', 'C.h'] + assert xmls == ['A.ssqls', 'C.ssqls'] diff --git a/build/plugins/tests/ya.make b/build/plugins/tests/ya.make new file mode 100644 index 0000000000..0c54899cc9 --- /dev/null +++ b/build/plugins/tests/ya.make @@ -0,0 +1,25 @@ +PY2TEST() + +OWNER(g:yatool) + +PEERDIR( + build/plugins +) + +PY_SRCS( + fake_ymake.py +) + +TEST_SRCS( + test_code_generator.py + test_common.py + test_requirements.py + test_ssqls.py +) + +NO_CHECK_IMPORTS( + build.plugins.code_generator + build.plugins.ssqls +) + +END() diff --git a/build/plugins/uservices.py b/build/plugins/uservices.py new file mode 100644 index 0000000000..69dffd325e --- /dev/null +++ b/build/plugins/uservices.py @@ -0,0 +1,37 @@ +import json +import ymake + + +def on_process_usrv_files(unit, *args): + mode = None + if args[0] == 'NO_DEPS': + for f in args[1:]: + if f == 'OUT_NOAUTO': + mode = f + continue + if mode is not None: + unit.on_move([f + '.usrv', mode, f]) + elif f.endswith('.cpp'): + unit.on_move([f + '.usrv', 'OUT', f]) + else: + unit.on_move([f + '.usrv', 'OUT_NOAUTO', f]) + return + + deps_file = unit.resolve(unit.resolve_arc_path(args[0])) + try: + all_deps = json.load(open(deps_file, 'r')) + except Exception as e: + ymake.report_configure_error('Malformed dependencies JSON `{}`: {}'.format(args[0], e.__repr__())) + return + mode = 'OUT' + for f in args[1:]: + if f == 'OUT_NOAUTO': + mode = f + continue + try: + deps = all_deps[f] + except KeyError: + ymake.report_configure_error('Dependencies for {} not found in {}'.format(f, args[0])) + unit.on_usrv_mv_with_deps([f]) + return + unit.on_move([f + '.usrv', mode, f, 'CPP_DEPS'] + deps) diff --git a/build/plugins/ya.make b/build/plugins/ya.make new file mode 100644 index 0000000000..e9ca97626d --- /dev/null +++ b/build/plugins/ya.make @@ -0,0 +1,24 @@ +OWNER(g:ymake) + +PY2_LIBRARY() + +PY_SRCS( + code_generator.py + ssqls.py + maps_mobile_idl.py + + _common.py + _requirements.py +) + +PEERDIR( + build/plugins/lib + build/plugins/lib/test_const +) + +END() + +RECURSE( + tests + lib/test_const +) diff --git a/build/plugins/yabs_generate_conf.py b/build/plugins/yabs_generate_conf.py new file mode 100644 index 0000000000..ad416aaa0b --- /dev/null +++ b/build/plugins/yabs_generate_conf.py @@ -0,0 +1,62 @@ +from _common import sort_by_keywords + + +def get_or_default(kv, name, default): + if name in kv: + return kv[name][0] + return default + + +def onyabs_generate_conf(unit, *args): + flat, kv = sort_by_keywords({'MODE': 1, 'SCRIPT': 1, 'SRC': 1, 'TOOL': 1, 'CONF_DIR': 1, 'DEST': 1}, args) + src = get_or_default(kv, 'SRC', 'yabs/server/phantom') + mode = get_or_default(kv, 'MODE', 'production') + + script = src + "/" + get_or_default(kv, 'SCRIPT', 'mkconf.py') + conf = src + "/" + get_or_default(kv, 'CONF_DIR', 'conf-tmpl') + tool = src + "/" + get_or_default(kv, 'TOOL', 'yabs_conf') + + for name in flat: + filename = "/".join([conf, name]) + unit.onpython( + [ + script, + "--cluster-conf-binary", + tool, + "--mode", + mode, + "--dest-dir", + "${BINDIR}", + filename, + "IN", + filename, + "OUT_NOAUTO", + "${BINDIR}/%s" % name, + "TOOL", + tool, + ] + ) + + +def onyabs_generate_phantom_conf_patch(unit, *args): + flat, kv = sort_by_keywords({'SRC': 1, 'DST': 1}, args) + src = '${ARCADIA_BUILD_ROOT}/' + get_or_default(kv, 'SRC', 'yabs/server/phantom/conf') + dst = '${ARCADIA_BUILD_ROOT}/' + get_or_default(kv, 'DST', 'yabs/server/phantom/conf-test') + for f in flat: + lhs = src + '/' + f + rhs = dst + '/' + f + unit.onpython(['mkdiff.py', lhs, rhs, 'IN', lhs, 'IN', rhs, 'STDOUT', f + ".patch"]) + + +def onyabs_generate_phantom_conf_test_check(unit, *args): + yabs_path = args[0] + for name in args[1:]: + unit.onpython( + """ + build/scripts/wrapper.py mkcheckconf.sh ${{ARCADIA_BUILD_ROOT}}/{yabs_path}/phantom/conf-test/yabs-{role}.conf yabs-check-{role}.conf + IN mkcheckconf.sh ${{ARCADIA_BUILD_ROOT}}/{yabs_path}/phantom/conf-test/yabs-{role}.conf + OUT yabs-check-{role}.conf +""".format( + yabs_path=yabs_path, role=name + ).split() # noqa + ) diff --git a/build/plugins/yql_python_udf.py b/build/plugins/yql_python_udf.py new file mode 100644 index 0000000000..b0f9570090 --- /dev/null +++ b/build/plugins/yql_python_udf.py @@ -0,0 +1,60 @@ +from _common import sort_by_keywords + + +def get_or_default(kv, name, default): + if name in kv: + return kv[name][0] + return default + + +def onregister_yql_python_udf(unit, *args): + flat, kv = sort_by_keywords({'NAME': 1, 'RESOURCE_NAME': 1, 'ADD_LIBRA_MODULES': 1}, args) + assert len(flat) == 0 + name = get_or_default(kv, 'NAME', 'CustomPython') + resource_name = get_or_default(kv, 'RESOURCE_NAME', name) + add_libra_modules = get_or_default(kv, 'ADD_LIBRA_MODULES', 'no') == 'yes' + + use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == 'yes' + py3 = unit.get('PYTHON3') == 'yes' + + unit.onyql_abi_version(['2', '27', '0']) + unit.onpeerdir(['yql/udfs/common/python/python_udf']) + unit.onpeerdir(['ydb/library/yql/public/udf']) + + if add_libra_modules: + unit.onpeerdir(['quality/user_sessions/libra_arc/noyql']) + unit.onpeerdir(['yql/udfs/quality/libra/module']) + + if use_arcadia_python: + flavor = 'Arcadia' + unit.onpeerdir( + ['library/python/runtime', 'yql/udfs/common/python/main'] + if not py3 + else ['library/python/runtime_py3', 'yql/udfs/common/python/main_py3'] + ) + else: + flavor = 'System' + + output_includes = [ + 'yql/udfs/common/python/python_udf/python_udf.h', + 'ydb/library/yql/public/udf/udf_registrator.h', + ] + if add_libra_modules: + output_includes.append('yql/udfs/quality/libra/module/module.h') + + path = name + '.yql_python_udf.cpp' + libra_flag = '1' if add_libra_modules else '0' + unit.onpython( + [ + 'build/scripts/gen_yql_python_udf.py', + flavor, + name, + resource_name, + path, + libra_flag, + 'OUT', + path, + 'OUTPUT_INCLUDES', + ] + + output_includes + ) diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py new file mode 100644 index 0000000000..79db740ca0 --- /dev/null +++ b/build/plugins/ytest.py @@ -0,0 +1,1300 @@ +import os +import re +import sys +import json +import copy +import base64 +import shlex +import _common +import lib.test_const as consts +import _requirements as reqs +import StringIO +import subprocess +import collections + +import ymake + + +MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/' +MDS_SCHEME = 'mds' +CANON_DATA_DIR_NAME = 'canondata' +CANON_OUTPUT_STORAGE = 'canondata_storage' +CANON_RESULT_FILE_NAME = 'result.json' +CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)') +CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))') + +BLOCK_SEPARATOR = '=============================================================' +SPLIT_FACTOR_MAX_VALUE = 1000 +SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250 +PARTITION_MODS = ('SEQUENTIAL', 'MODULO') +DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml" +DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json" +PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json" + + +tidy_config_map = None + + +def ontest_data(unit, *args): + ymake.report_configure_error("TEST_DATA is removed in favour of DATA") + + +def prepare_recipes(data): + data = data.replace('"USE_RECIPE_DELIM"', "\n") + data = data.replace("$TEST_RECIPES_VALUE", "") + return base64.b64encode(data or "") + + +def prepare_env(data): + data = data.replace("$TEST_ENV_VALUE", "") + return serialize_list(shlex.split(data)) + + +def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + perf test for configure + pool_re = re.compile(r"""['"]*pool['"]*\s*?=""") + cypress_root_re = re.compile(r"""['"]*cypress_root['"]*\s*=""") + with open(filename, 'r') as afile: + yt_spec = afile.read() + return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec) + + +def validate_test(unit, kw): + def get_list(key): + return deserialize_list(kw.get(key, "")) + + valid_kw = copy.deepcopy(kw) + errors = [] + warnings = [] + + if valid_kw.get('SCRIPT-REL-PATH') == 'boost.test': + project_path = valid_kw.get('BUILD-FOLDER-PATH', "") + if not project_path.startswith( + ("contrib", "mail", "maps", "tools/idl", "metrika", "devtools", "mds", "yandex_io", "smart_devices") + ): + errors.append("BOOSTTEST is not allowed here") + elif valid_kw.get('SCRIPT-REL-PATH') == 'gtest': + project_path = valid_kw.get('BUILD-FOLDER-PATH', "") + if not project_path.startswith(("contrib", "devtools", "mds")): + errors.append("GTEST_UGLY is not allowed here, use GTEST instead") + + size_timeout = collections.OrderedDict(sorted(consts.TestSize.DefaultTimeouts.items(), key=lambda t: t[1])) + + size = valid_kw.get('SIZE', consts.TestSize.Small).lower() + tags = set(get_list("TAG")) + requirements_orig = get_list("REQUIREMENTS") + in_autocheck = consts.YaTestTags.NotAutocheck not in tags and consts.YaTestTags.Manual not in tags + is_fat = consts.YaTestTags.Fat in tags + is_force_sandbox = consts.YaTestTags.ForceDistbuild not in tags and is_fat + is_ytexec_run = consts.YaTestTags.YtRunner in tags + is_fuzzing = valid_kw.get("FUZZING", False) + is_kvm = 'kvm' in requirements_orig + requirements = {} + secret_requirements = ('sb_vault', 'yav') + list_requirements = secret_requirements + for req in requirements_orig: + if req in ('kvm',): + requirements[req] = str(True) + continue + + if ":" in req: + req_name, req_value = req.split(":", 1) + if req_name in list_requirements: + requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value])) + else: + if req_name in requirements: + if req_value in ["0"]: + warnings.append( + "Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format( + req_name, requirements[req_name], req_value + ) + ) + del requirements[req_name] + elif requirements[req_name] != req_value: + warnings.append( + "Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format( + req_name, requirements[req_name], req_value + ) + ) + requirements[req_name] = req_value + else: + requirements[req_name] = req_value + else: + errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req)) + + if not errors: + for req_name, req_value in requirements.items(): + error_msg = reqs.validate_requirement( + req_name, + req_value, + size, + is_force_sandbox, + in_autocheck, + is_fuzzing, + is_kvm, + is_ytexec_run, + requirements, + ) + if error_msg: + errors += [error_msg] + + invalid_requirements_for_distbuild = [ + requirement for requirement in requirements.keys() if requirement not in ('ram', 'ram_disk', 'cpu', 'network') + ] + sb_tags = [tag for tag in tags if tag.startswith('sb:')] + + if is_fat: + if size != consts.TestSize.Large: + errors.append("Only LARGE test may have ya:fat tag") + + if in_autocheck and not is_force_sandbox: + if invalid_requirements_for_distbuild: + errors.append( + "'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format( + invalid_requirements_for_distbuild + ) + ) + if sb_tags: + errors.append( + "You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format( + sb_tags + ) + ) + if consts.YaTestTags.SandboxCoverage in tags: + errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.") + if is_ytexec_run: + errors.append( + "Running LARGE tests over YT (ya:yt) on Distbuild (ya:force_distbuild) is forbidden. Consider removing TAG(ya:force_distbuild)." + ) + else: + if is_force_sandbox: + errors.append('ya:force_sandbox can be used with LARGE tests only') + if consts.YaTestTags.NoFuse in tags: + errors.append('ya:nofuse can be used with LARGE tests only') + if consts.YaTestTags.Privileged in tags: + errors.append("ya:privileged can be used with LARGE tests only") + if in_autocheck and size == consts.TestSize.Large: + errors.append("LARGE test must have ya:fat tag") + + if consts.YaTestTags.Privileged in tags and 'container' not in requirements: + errors.append("Only tests with 'container' requirement can have 'ya:privileged' tag") + + if size not in size_timeout: + errors.append( + "Unknown test size: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format( + size.upper(), ", ".join([sz.upper() for sz in size_timeout.keys()]) + ) + ) + else: + try: + timeout = int(valid_kw.get('TEST-TIMEOUT', size_timeout[size]) or size_timeout[size]) + script_rel_path = valid_kw.get('SCRIPT-REL-PATH') + if timeout < 0: + raise Exception("Timeout must be > 0") + if size_timeout[size] < timeout and in_autocheck and script_rel_path != 'java.style': + suggested_size = None + for s, t in size_timeout.items(): + if timeout <= t: + suggested_size = s + break + + if suggested_size: + suggested_size = ", suggested size: [[imp]]{}[[rst]]".format(suggested_size.upper()) + else: + suggested_size = "" + errors.append( + "Max allowed timeout for test size [[imp]]{}[[rst]] is [[imp]]{} sec[[rst]]{}".format( + size.upper(), size_timeout[size], suggested_size + ) + ) + except Exception as e: + errors.append("Error when parsing test timeout: [[bad]]{}[[rst]]".format(e)) + + requirements_list = [] + for req_name, req_value in requirements.iteritems(): + requirements_list.append(req_name + ":" + req_value) + valid_kw['REQUIREMENTS'] = serialize_list(requirements_list) + + # Mark test with ya:external tag if it requests any secret from external storages + # It's not stable and nonreproducible by definition + for x in secret_requirements: + if x in requirements: + tags.add(consts.YaTestTags.External) + + if valid_kw.get("FUZZ-OPTS"): + for option in get_list("FUZZ-OPTS"): + if not option.startswith("-"): + errors.append( + "Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format( + option + ) + ) + break + eqpos = option.find("=") + if eqpos == -1 or len(option) == eqpos + 1: + errors.append( + "Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format( + option + ) + ) + break + if option[eqpos - 1] == " " or option[eqpos + 1] == " ": + errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option)) + break + if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"): + errors.append( + "You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format( + option + ) + ) + break + + if valid_kw.get("YT-SPEC"): + if not is_ytexec_run: + errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag") + else: + for filename in get_list("YT-SPEC"): + filename = unit.resolve('$S/' + filename) + if not os.path.exists(filename): + errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename)) + continue + if not is_yt_spec_contain_pool_info(filename): + tags.add(consts.YaTestTags.External) + tags.add("ya:yt_research_pool") + + if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test": + errors.append("PYTEST_SCRIPT is deprecated") + + partition = valid_kw.get('TEST_PARTITION', 'SEQUENTIAL') + if partition not in PARTITION_MODS: + raise ValueError('partition mode should be one of {}, detected: {}'.format(PARTITION_MODS, partition)) + + if valid_kw.get('SPLIT-FACTOR'): + if valid_kw.get('FORK-MODE') == 'none': + errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro') + + value = 1 + try: + value = int(valid_kw.get('SPLIT-FACTOR')) + if value <= 0: + raise ValueError("must be > 0") + if value > SPLIT_FACTOR_MAX_VALUE: + raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE)) + except ValueError as e: + errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e)) + + if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large: + nfiles = count_entries(valid_kw.get('TEST-FILES')) + if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE: + errors.append( + 'Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format( + nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value + ) + ) + + if tags: + valid_kw['TAG'] = serialize_list(tags) + + unit_path = _common.get_norm_unit_path(unit) + if ( + not is_fat + and consts.YaTestTags.Noretries in tags + and not is_ytexec_run + and not unit_path.startswith("devtools/dummy_arcadia/test/noretries") + ): + errors.append("Only LARGE tests can have 'ya:noretries' tag") + + if errors: + return None, warnings, errors + + return valid_kw, warnings, errors + + +def dump_test(unit, kw): + valid_kw, warnings, errors = validate_test(unit, kw) + for w in warnings: + unit.message(['warn', w]) + for e in errors: + ymake.report_configure_error(e) + if valid_kw is None: + return None + string_handler = StringIO.StringIO() + for k, v in valid_kw.iteritems(): + print >> string_handler, k + ': ' + v + print >> string_handler, BLOCK_SEPARATOR + data = string_handler.getvalue() + string_handler.close() + return data + + +def serialize_list(lst): + lst = filter(None, lst) + return '\"' + ';'.join(lst) + '\"' if lst else '' + + +def deserialize_list(val): + return filter(None, val.replace('"', "").split(";")) + + +def get_correct_expression_for_group_var(varname): + return "\"${join=\;:" + varname + "}\"" + + +def count_entries(x): + # see (de)serialize_list + assert x is None or isinstance(x, str), type(x) + if not x: + return 0 + return x.count(";") + 1 + + +def get_values_list(unit, key): + res = map(str.strip, (unit.get(key) or '').replace('$' + key, '').strip().split()) + return [r for r in res if r and r not in ['""', "''"]] + + +def get_norm_paths(unit, key): + # return paths without trailing (back)slash + return [x.rstrip('\\/').replace('${ARCADIA_ROOT}/', '') for x in get_values_list(unit, key)] + + +def get_unit_list_variable(unit, name): + items = unit.get(name) + if items: + items = items.split(' ') + assert items[0] == "${}".format(name), (items, name) + return items[1:] + return [] + + +def implies(a, b): + return bool((not a) or b) + + +def match_coverage_extractor_requirements(unit): + # we shouldn't add test if + return all( + [ + # tests are not requested + unit.get("TESTS_REQUESTED") == "yes", + # build doesn't imply clang coverage, which supports segment extraction from the binaries + unit.get("CLANG_COVERAGE") == "yes", + # contrib wasn't requested + implies( + _common.get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes" + ), + ] + ) + + +def get_tidy_config_map(unit, map_path): + config_map_path = unit.resolve(os.path.join("$S", map_path)) + config_map = {} + try: + with open(config_map_path, 'r') as afile: + config_map = json.load(afile) + except ValueError: + ymake.report_configure_error("{} is invalid json".format(map_path)) + except Exception as e: + ymake.report_configure_error(str(e)) + return config_map + + +def get_default_tidy_config(unit): + unit_path = _common.get_norm_unit_path(unit) + tidy_default_config_map = get_tidy_config_map(unit, DEFAULT_TIDY_CONFIG_MAP_PATH) + for project_prefix, config_path in tidy_default_config_map.items(): + if unit_path.startswith(project_prefix): + return config_path + return DEFAULT_TIDY_CONFIG + + +ordered_tidy_map = None + + +def get_project_tidy_config(unit): + global ordered_tidy_map + if ordered_tidy_map is None: + ordered_tidy_map = list(reversed(sorted(get_tidy_config_map(unit, PROJECT_TIDY_CONFIG_MAP_PATH).items()))) + unit_path = _common.get_norm_unit_path(unit) + + for project_prefix, config_path in ordered_tidy_map: + if unit_path.startswith(project_prefix): + return config_path + else: + return get_default_tidy_config(unit) + + +def onadd_ytest(unit, *args): + keywords = { + "DEPENDS": -1, + "DATA": -1, + "TIMEOUT": 1, + "FORK_MODE": 1, + "SPLIT_FACTOR": 1, + "FORK_SUBTESTS": 0, + "FORK_TESTS": 0, + } + flat_args, spec_args = _common.sort_by_keywords(keywords, args) + if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes": + unit.ondata_files(_common.get_norm_unit_path(unit)) + + test_data = sorted( + _common.filter_out_by_keyword( + spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED' + ) + ) + + if flat_args[1] == "fuzz.test": + unit.ondata("arcadia/fuzzing/{}/corpus.json".format(_common.get_norm_unit_path(unit))) + elif flat_args[1] == "go.test": + data, _ = get_canonical_test_resources(unit) + test_data += data + elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit): + # XXX + # Current ymake implementation doesn't allow to call macro inside the 'when' body + # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later + return + elif flat_args[1] == "clang_tidy" and unit.get("TIDY_ENABLED") != "yes": + # Graph is not prepared + return + elif unit.get("TIDY") == "yes" and unit.get("TIDY_ENABLED") != "yes": + # clang_tidy disabled for module + return + elif flat_args[1] == "no.test": + return + test_size = ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or '' + test_tags = serialize_list(_get_test_tags(unit, spec_args)) + test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or '' + test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE') + + if flat_args[1] != "clang_tidy" and unit.get("TIDY_ENABLED") == "yes": + # graph changed for clang_tidy tests + if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"): + flat_args[1] = "clang_tidy" + test_size = 'SMALL' + test_tags = '' + test_timeout = "60" + test_requirements = [] + unit.set(["TEST_YT_SPEC_VALUE", ""]) + else: + return + + if flat_args[1] == "clang_tidy" and unit.get("TIDY_ENABLED") == "yes": + if unit.get("TIDY_CONFIG"): + default_config_path = unit.get("TIDY_CONFIG") + project_config_path = unit.get("TIDY_CONFIG") + else: + default_config_path = get_default_tidy_config(unit) + project_config_path = get_project_tidy_config(unit) + + unit.set(["DEFAULT_TIDY_CONFIG", default_config_path]) + unit.set(["PROJECT_TIDY_CONFIG", project_config_path]) + + fork_mode = [] + if 'FORK_SUBTESTS' in spec_args: + fork_mode.append('subtests') + if 'FORK_TESTS' in spec_args: + fork_mode.append('tests') + fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split() + fork_mode = ' '.join(fork_mode) if fork_mode else '' + + unit_path = _common.get_norm_unit_path(unit) + + test_record = { + 'TEST-NAME': flat_args[0], + 'SCRIPT-REL-PATH': flat_args[1], + 'TESTED-PROJECT-NAME': unit.name(), + 'TESTED-PROJECT-FILENAME': unit.filename(), + 'SOURCE-FOLDER-PATH': unit_path, + # TODO get rid of BUILD-FOLDER-PATH + 'BUILD-FOLDER-PATH': unit_path, + 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()), + 'GLOBAL-LIBRARY-PATH': unit.global_filename(), + 'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')), + 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")), + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + # 'TEST-PRESERVE-ENV': 'da', + 'TEST-DATA': serialize_list(test_data), + 'TEST-TIMEOUT': test_timeout, + 'FORK-MODE': fork_mode, + 'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '', + 'SIZE': test_size, + 'TAG': test_tags, + 'REQUIREMENTS': serialize_list(test_requirements), + 'TEST-CWD': unit.get('TEST_CWD_VALUE') or '', + 'FUZZ-DICTS': serialize_list( + spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE') + ), + 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')), + 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')), + 'BLOB': unit.get('TEST_BLOB_DATA') or '', + 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '', + 'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '', + 'TEST_IOS_RUNTIME_TYPE': unit.get('TEST_IOS_RUNTIME_TYPE_VALUE') or '', + 'ANDROID_APK_TEST_ACTIVITY': unit.get('ANDROID_APK_TEST_ACTIVITY_VALUE') or '', + 'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL', + 'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '', + } + + if flat_args[1] == "go.bench": + if "ya:run_go_benchmark" not in test_record["TAG"]: + return + else: + test_record["TEST-NAME"] += "_bench" + + if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes': + test_record['FUZZING'] = '1' + # use all cores if fuzzing requested + test_record['REQUIREMENTS'] = serialize_list( + filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]) + ) + + data = dump_test(unit, test_record) + if data: + unit.set_property(["DART_DATA", data]) + + +def java_srcdirs_to_data(unit, var): + extra_data = [] + for srcdir in (unit.get(var) or '').replace('$' + var, '').split(): + if srcdir == '.': + srcdir = unit.get('MODDIR') + if srcdir.startswith('${ARCADIA_ROOT}/') or srcdir.startswith('$ARCADIA_ROOT/'): + srcdir = srcdir.replace('${ARCADIA_ROOT}/', '$S/') + srcdir = srcdir.replace('$ARCADIA_ROOT/', '$S/') + if srcdir.startswith('${CURDIR}') or srcdir.startswith('$CURDIR'): + srcdir = srcdir.replace('${CURDIR}', os.path.join('$S', unit.get('MODDIR'))) + srcdir = srcdir.replace('$CURDIR', os.path.join('$S', unit.get('MODDIR'))) + srcdir = unit.resolve_arc_path(srcdir) + if not srcdir.startswith('$'): + srcdir = os.path.join('$S', unit.get('MODDIR'), srcdir) + if srcdir.startswith('$S'): + extra_data.append(srcdir.replace('$S', 'arcadia')) + return serialize_list(extra_data) + + +def onadd_check(unit, *args): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + flat_args, spec_args = _common.sort_by_keywords( + { + "DEPENDS": -1, + "TIMEOUT": 1, + "DATA": -1, + "TAG": -1, + "REQUIREMENTS": -1, + "FORK_MODE": 1, + "SPLIT_FACTOR": 1, + "FORK_SUBTESTS": 0, + "FORK_TESTS": 0, + "SIZE": 1, + }, + args, + ) + check_type = flat_args[0] + + if check_type in ("check.data", "check.resource") and unit.get('VALIDATE_DATA') == "no": + return + + test_dir = _common.get_norm_unit_path(unit) + + test_timeout = '' + fork_mode = '' + extra_test_data = '' + extra_test_dart_data = {} + ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes' + use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') + uid_ext = '' + script_rel_path = check_type + test_files = flat_args[1:] + + if check_type in ["check.data", "check.resource"]: + uid_ext = unit.get("SBR_UID_EXT").split(" ", 1)[-1] # strip variable name + + if check_type in ["flake8.py2", "flake8.py3", "black"]: + fork_mode = unit.get('TEST_FORK_MODE') or '' + elif check_type == "JAVA_STYLE": + if ymake_java_test and not unit.get('ALL_SRCDIRS') or '': + return + if len(flat_args) < 2: + raise Exception("Not enough arguments for JAVA_STYLE check") + check_level = flat_args[1] + allowed_levels = { + 'base': '/yandex_checks.xml', + 'strict': '/yandex_checks_strict.xml', + 'extended': '/yandex_checks_extended.xml', + 'library': '/yandex_checks_library.xml', + } + if check_level not in allowed_levels: + raise Exception("'{}' is not allowed in LINT(), use one of {}".format(check_level, allowed_levels.keys())) + test_files[0] = allowed_levels[check_level] # replace check_level with path to config file + script_rel_path = "java.style" + test_timeout = '240' + fork_mode = unit.get('TEST_FORK_MODE') or '' + if ymake_java_test: + extra_test_data = java_srcdirs_to_data(unit, 'ALL_SRCDIRS') + + # jstyle should use the latest jdk + unit.onpeerdir([unit.get('JDK_LATEST_PEERDIR')]) + extra_test_dart_data['JDK_LATEST_VERSION'] = unit.get('JDK_LATEST_VERSION') + # TODO remove when ya-bin will be released (https://st.yandex-team.ru/DEVTOOLS-9611) + extra_test_dart_data['JDK_RESOURCE'] = 'JDK' + ( + unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT' + ) + elif check_type == "gofmt": + if test_files: + test_dir = os.path.dirname(test_files[0]).lstrip("$S/") + elif check_type == "check.data": + data_re = re.compile(r"sbr:/?/?(\d+)=?.*") + data = flat_args[1:] + resources = [] + for f in data: + matched = re.match(data_re, f) + if matched: + resources.append(matched.group(1)) + if resources: + test_files = resources + else: + return + + serialized_test_files = serialize_list(test_files) + + test_record = { + 'TEST-NAME': check_type.lower(), + 'TEST-TIMEOUT': test_timeout, + 'SCRIPT-REL-PATH': script_rel_path, + 'TESTED-PROJECT-NAME': os.path.basename(test_dir), + 'SOURCE-FOLDER-PATH': test_dir, + 'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])), + 'TEST-DATA': extra_test_data, + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + 'SBR-UID-EXT': uid_ext, + 'SPLIT-FACTOR': '', + 'TEST_PARTITION': 'SEQUENTIAL', + 'FORK-MODE': fork_mode, + 'FORK-TEST-FILES': '', + 'SIZE': 'SMALL', + 'TAG': '', + 'REQUIREMENTS': " ".join(spec_args.get('REQUIREMENTS', [])), + 'USE_ARCADIA_PYTHON': use_arcadia_python or '', + 'OLD_PYTEST': 'no', + 'PYTHON-PATHS': '', + # TODO remove FILES, see DEVTOOLS-7052 + 'FILES': serialized_test_files, + 'TEST-FILES': serialized_test_files, + } + test_record.update(extra_test_dart_data) + + data = dump_test(unit, test_record) + if data: + unit.set_property(["DART_DATA", data]) + + +def on_register_no_check_imports(unit): + s = unit.get('NO_CHECK_IMPORTS_FOR_VALUE') + if s not in ('', 'None'): + unit.onresource(['-', 'py/no_check_imports/{}="{}"'.format(_common.pathid(s), s)]) + + +def onadd_check_py_imports(unit, *args): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + if unit.get('NO_CHECK_IMPORTS_FOR_VALUE').strip() == "": + return + unit.onpeerdir(['library/python/testing/import_test']) + check_type = "py.imports" + test_dir = _common.get_norm_unit_path(unit) + + use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') + test_files = serialize_list([_common.get_norm_unit_path(unit, unit.filename())]) + test_record = { + 'TEST-NAME': "pyimports", + 'TEST-TIMEOUT': '', + 'SCRIPT-REL-PATH': check_type, + 'TESTED-PROJECT-NAME': os.path.basename(test_dir), + 'SOURCE-FOLDER-PATH': test_dir, + 'CUSTOM-DEPENDENCIES': '', + 'TEST-DATA': '', + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + 'SPLIT-FACTOR': '', + 'TEST_PARTITION': 'SEQUENTIAL', + 'FORK-MODE': '', + 'FORK-TEST-FILES': '', + 'SIZE': 'SMALL', + 'TAG': '', + 'USE_ARCADIA_PYTHON': use_arcadia_python or '', + 'OLD_PYTEST': 'no', + 'PYTHON-PATHS': '', + # TODO remove FILES, see DEVTOOLS-7052 + 'FILES': test_files, + 'TEST-FILES': test_files, + } + if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None": + test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"]) + else: + test_record["NO-CHECK"] = '' + data = dump_test(unit, test_record) + if data: + unit.set_property(["DART_DATA", data]) + + +def onadd_pytest_script(unit, *args): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + unit.set(["PYTEST_BIN", "no"]) + custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE') + timeout = filter(None, [unit.get(["TEST_TIMEOUT"])]) + if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes": + unit.ondata_files(_common.get_norm_unit_path(unit)) + + if timeout: + timeout = timeout[0] + else: + timeout = '0' + test_type = args[0] + fork_mode = unit.get('TEST_FORK_MODE').split() or '' + split_factor = unit.get('TEST_SPLIT_FACTOR') or '' + test_size = unit.get('TEST_SIZE_NAME') or '' + + test_files = get_values_list(unit, 'TEST_SRCS_VALUE') + tags = _get_test_tags(unit) + requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE') + test_data = get_norm_paths(unit, 'TEST_DATA_VALUE') + data, data_files = get_canonical_test_resources(unit) + test_data += data + python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE') + binary_path = os.path.join(_common.get_norm_unit_path(unit), unit.filename()) + test_cwd = unit.get('TEST_CWD_VALUE') or '' + _dump_test( + unit, + test_type, + test_files, + timeout, + _common.get_norm_unit_path(unit), + custom_deps, + test_data, + python_paths, + split_factor, + fork_mode, + test_size, + tags, + requirements, + binary_path, + test_cwd=test_cwd, + data_files=data_files, + ) + + +def onadd_pytest_bin(unit, *args): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + flat, kws = _common.sort_by_keywords({'RUNNER_BIN': 1}, args) + if flat: + ymake.report_configure_error('Unknown arguments found while processing add_pytest_bin macro: {!r}'.format(flat)) + + runner_bin = kws.get('RUNNER_BIN', [None])[0] + test_type = 'py3test.bin' if (unit.get("PYTHON3") == 'yes') else "pytest.bin" + + add_test_to_dart(unit, test_type, runner_bin=runner_bin) + + +def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes": + unit.ondata_files(_common.get_norm_unit_path(unit)) + custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE') + timeout = filter(None, [unit.get(["TEST_TIMEOUT"])]) + if timeout: + timeout = timeout[0] + else: + timeout = '0' + fork_mode = unit.get('TEST_FORK_MODE').split() or '' + split_factor = unit.get('TEST_SPLIT_FACTOR') or '' + test_size = unit.get('TEST_SIZE_NAME') or '' + test_cwd = unit.get('TEST_CWD_VALUE') or '' + + unit_path = unit.path() + test_files = get_values_list(unit, 'TEST_SRCS_VALUE') + tags = _get_test_tags(unit) + requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE') + test_data = get_norm_paths(unit, 'TEST_DATA_VALUE') + data, data_files = get_canonical_test_resources(unit) + test_data += data + python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE') + yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE') + if not binary_path: + binary_path = os.path.join(unit_path, unit.filename()) + _dump_test( + unit, + test_type, + test_files, + timeout, + _common.get_norm_unit_path(unit), + custom_deps, + test_data, + python_paths, + split_factor, + fork_mode, + test_size, + tags, + requirements, + binary_path, + test_cwd=test_cwd, + runner_bin=runner_bin, + yt_spec=yt_spec, + data_files=data_files, + ) + + +def extract_java_system_properties(unit, args): + if len(args) % 2: + return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path()) + + props = [] + for x, y in zip(args[::2], args[1::2]): + if x == 'FILE': + if y.startswith('${BINDIR}') or y.startswith('${ARCADIA_BUILD_ROOT}') or y.startswith('/'): + return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y) + + y = _common.rootrel_arc_src(y, unit) + if not os.path.exists(unit.resolve('$S/' + y)): + return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y) + + y = '${ARCADIA_ROOT}/' + y + props.append({'type': 'file', 'path': y}) + else: + props.append({'type': 'inline', 'key': x, 'value': y}) + + return props, None + + +def onjava_test(unit, *args): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + + assert unit.get('MODULE_TYPE') is not None + + if unit.get('MODULE_TYPE') == 'JTEST_FOR': + if not unit.get('UNITTEST_DIR'): + ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path())) + return + + java_cp_arg_type = unit.get('JAVA_CLASSPATH_CMD_TYPE_VALUE') or 'MANIFEST' + if java_cp_arg_type not in ('MANIFEST', 'COMMAND_FILE', 'LIST'): + ymake.report_configure_error( + '{}: TEST_JAVA_CLASSPATH_CMD_TYPE({}) are invalid. Choose argument from MANIFEST, COMMAND_FILE or LIST)'.format( + unit.path(), java_cp_arg_type + ) + ) + return + + unit_path = unit.path() + path = _common.strip_roots(unit_path) + if unit.get('ADD_SRCDIR_TO_TEST_DATA') == "yes": + unit.ondata_files(_common.get_norm_unit_path(unit)) + + test_data = get_norm_paths(unit, 'TEST_DATA_VALUE') + test_data.append('arcadia/build/scripts/run_junit.py') + test_data.append('arcadia/build/scripts/unpacking_jtest_runner.py') + + data, data_files = get_canonical_test_resources(unit) + test_data += data + + props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE')) + if error_mgs: + ymake.report_configure_error(error_mgs) + return + for prop in props: + if prop['type'] == 'file': + test_data.append(prop['path'].replace('${ARCADIA_ROOT}', 'arcadia')) + + props = base64.b64encode(json.dumps(props, encoding='utf-8')) + + test_cwd = unit.get('TEST_CWD_VALUE') or '' # TODO: validate test_cwd value + + if unit.get('MODULE_TYPE') == 'JUNIT5': + script_rel_path = 'junit5.test' + else: + script_rel_path = 'junit.test' + + ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes' + test_record = { + 'SOURCE-FOLDER-PATH': path, + 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)]), + 'SCRIPT-REL-PATH': script_rel_path, + 'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '', + 'TESTED-PROJECT-NAME': path, + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + # 'TEST-PRESERVE-ENV': 'da', + 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))), + 'FORK-MODE': unit.get('TEST_FORK_MODE') or '', + 'SPLIT-FACTOR': unit.get('TEST_SPLIT_FACTOR') or '', + 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')), + 'TAG': serialize_list(_get_test_tags(unit)), + 'SIZE': unit.get('TEST_SIZE_NAME') or '', + 'REQUIREMENTS': serialize_list(get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')), + 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")), + # JTEST/JTEST_FOR only + 'MODULE_TYPE': unit.get('MODULE_TYPE'), + 'UNITTEST_DIR': unit.get('UNITTEST_DIR') or '', + 'JVM_ARGS': serialize_list(get_values_list(unit, 'JVM_ARGS_VALUE')), + 'SYSTEM_PROPERTIES': props, + 'TEST-CWD': test_cwd, + 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '', + 'JAVA_CLASSPATH_CMD_TYPE': java_cp_arg_type, + 'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT'), + 'JDK_FOR_TESTS': 'JDK' + (unit.get('JDK_VERSION') or unit.get('JDK_REAL_VERSION') or '_DEFAULT') + '_FOR_TESTS', + 'YT-SPEC': serialize_list(get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')), + } + test_classpath_origins = unit.get('TEST_CLASSPATH_VALUE') + if test_classpath_origins: + test_record['TEST_CLASSPATH_ORIGINS'] = test_classpath_origins + test_record['TEST_CLASSPATH'] = '${TEST_CLASSPATH_MANAGED}' + elif ymake_java_test: + test_record['TEST_CLASSPATH'] = '${DART_CLASSPATH}' + test_record['TEST_CLASSPATH_DEPS'] = '${DART_CLASSPATH_DEPS}' + if unit.get('UNITTEST_DIR'): + test_record['TEST_JAR'] = '${UNITTEST_MOD}' + else: + test_record['TEST_JAR'] = '{}/{}.jar'.format(unit.get('MODDIR'), unit.get('REALPRJNAME')) + + data = dump_test(unit, test_record) + if data: + unit.set_property(['DART_DATA', data]) + + +def onjava_test_deps(unit, *args): + if unit.get("TIDY") == "yes": + # graph changed for clang_tidy tests + return + + assert unit.get('MODULE_TYPE') is not None + assert len(args) == 1 + mode = args[0] + + path = _common.get_norm_unit_path(unit) + ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes' + + test_record = { + 'SOURCE-FOLDER-PATH': path, + 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path), 'dependencies']).strip( + '-' + ), + 'SCRIPT-REL-PATH': 'java.dependency.test', + 'TEST-TIMEOUT': '', + 'TESTED-PROJECT-NAME': path, + 'TEST-DATA': '', + 'TEST_PARTITION': 'SEQUENTIAL', + 'FORK-MODE': '', + 'SPLIT-FACTOR': '', + 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')), + 'TAG': '', + 'SIZE': 'SMALL', + 'IGNORE_CLASSPATH_CLASH': ' '.join(get_values_list(unit, 'JAVA_IGNORE_CLASSPATH_CLASH_VALUE')), + # JTEST/JTEST_FOR only + 'MODULE_TYPE': unit.get('MODULE_TYPE'), + 'UNITTEST_DIR': '', + 'SYSTEM_PROPERTIES': '', + 'TEST-CWD': '', + } + if mode == 'strict': + test_record['STRICT_CLASSPATH_CLASH'] = 'yes' + + if ymake_java_test: + test_record['CLASSPATH'] = '$B/{}/{}.jar ${{DART_CLASSPATH}}'.format( + unit.get('MODDIR'), unit.get('REALPRJNAME') + ) + + data = dump_test(unit, test_record) + unit.set_property(['DART_DATA', data]) + + +def _get_test_tags(unit, spec_args=None): + if spec_args is None: + spec_args = {} + tags = spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE') + tags = set(tags) + if unit.get('EXPORT_SEM') == 'yes': + filter_only_tags = sorted(t for t in tags if ':' not in t) + unit.set(['FILTER_ONLY_TEST_TAGS', ' '.join(filter_only_tags)]) + # DEVTOOLS-7571 + if unit.get('SKIP_TEST_VALUE') and consts.YaTestTags.Fat in tags: + tags.add(consts.YaTestTags.NotAutocheck) + + return tags + + +def _dump_test( + unit, + test_type, + test_files, + timeout, + test_dir, + custom_deps, + test_data, + python_paths, + split_factor, + fork_mode, + test_size, + tags, + requirements, + binary_path='', + old_pytest=False, + test_cwd=None, + runner_bin=None, + yt_spec=None, + data_files=None, +): + + if test_type == "PY_TEST": + script_rel_path = "py.test" + else: + script_rel_path = test_type + + unit_path = unit.path() + fork_test_files = unit.get('FORK_TEST_FILES_MODE') + fork_mode = ' '.join(fork_mode) if fork_mode else '' + use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') + if test_cwd: + test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip() + test_name = os.path.basename(binary_path) + test_record = { + 'TEST-NAME': os.path.splitext(test_name)[0], + 'TEST-TIMEOUT': timeout, + 'SCRIPT-REL-PATH': script_rel_path, + 'TESTED-PROJECT-NAME': test_name, + 'SOURCE-FOLDER-PATH': test_dir, + 'CUSTOM-DEPENDENCIES': " ".join(custom_deps), + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + # 'TEST-PRESERVE-ENV': 'da', + 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))), + 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")), + 'SPLIT-FACTOR': split_factor, + 'TEST_PARTITION': unit.get('TEST_PARTITION') or 'SEQUENTIAL', + 'FORK-MODE': fork_mode, + 'FORK-TEST-FILES': fork_test_files, + 'TEST-FILES': serialize_list(test_files), + 'SIZE': test_size, + 'TAG': serialize_list(tags), + 'REQUIREMENTS': serialize_list(requirements), + 'USE_ARCADIA_PYTHON': use_arcadia_python or '', + 'OLD_PYTEST': 'yes' if old_pytest else 'no', + 'PYTHON-PATHS': serialize_list(python_paths), + 'TEST-CWD': test_cwd or '', + 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '', + 'BUILD-FOLDER-PATH': _common.strip_roots(unit_path), + 'BLOB': unit.get('TEST_BLOB_DATA') or '', + 'CANONIZE_SUB_PATH': unit.get('CANONIZE_SUB_PATH') or '', + } + if binary_path: + test_record['BINARY-PATH'] = _common.strip_roots(binary_path) + if runner_bin: + test_record['TEST-RUNNER-BIN'] = runner_bin + if yt_spec: + test_record['YT-SPEC'] = serialize_list(yt_spec) + data = dump_test(unit, test_record) + if data: + unit.set_property(["DART_DATA", data]) + + +def onsetup_pytest_bin(unit, *args): + use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == "yes" + if use_arcadia_python: + unit.onresource(['-', 'PY_MAIN={}'.format("library.python.pytest.main:main")]) # XXX + unit.onadd_pytest_bin(list(args)) + else: + unit.onno_platform() + unit.onadd_pytest_script(["PY_TEST"]) + + +def onrun(unit, *args): + exectest_cmd = unit.get(["EXECTEST_COMMAND_VALUE"]) or '' + exectest_cmd += "\n" + subprocess.list2cmdline(args) + unit.set(["EXECTEST_COMMAND_VALUE", exectest_cmd]) + + +def onsetup_exectest(unit, *args): + command = unit.get(["EXECTEST_COMMAND_VALUE"]) + if command is None: + ymake.report_configure_error("EXECTEST must have at least one RUN macro") + return + command = command.replace("$EXECTEST_COMMAND_VALUE", "") + if "PYTHON_BIN" in command: + unit.ondepends('contrib/tools/python') + unit.set(["TEST_BLOB_DATA", base64.b64encode(command)]) + add_test_to_dart(unit, "exectest", binary_path=os.path.join(unit.path(), unit.filename()).replace(".pkg", "")) + + +def onsetup_run_python(unit): + if unit.get("USE_ARCADIA_PYTHON") == "yes": + unit.ondepends('contrib/tools/python') + + +def get_canonical_test_resources(unit): + unit_path = unit.path() + canon_data_dir = os.path.join(unit.resolve(unit_path), CANON_DATA_DIR_NAME, unit.get('CANONIZE_SUB_PATH') or '') + + try: + _, dirs, files = next(os.walk(canon_data_dir)) + except StopIteration: + # path doesn't exist + return [], [] + + if CANON_RESULT_FILE_NAME in files: + return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path) + return [], [] + + +def _load_canonical_file(filename, unit_path): + try: + with open(filename) as results_file: + return json.load(results_file) + except Exception as e: + print >> sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename) + return {} + + +def _get_resource_from_uri(uri): + m = CANON_MDS_RESOURCE_REGEX.match(uri) + if m: + res_id = m.group(1) + return "{}:{}".format(MDS_SCHEME, res_id) + + m = CANON_SBR_RESOURCE_REGEX.match(uri) + if m: + # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name + # That's why we use notation with '=' to specify specific path for resource + uri = m.group(1) + res_id = m.group(2) + return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id])) + + +def _get_external_resources_from_canon_data(data): + # Method should work with both canonization versions: + # result.json: {'uri':X 'checksum':Y} + # result.json: {'testname': {'uri':X 'checksum':Y}} + # result.json: {'testname': [{'uri':X 'checksum':Y}]} + # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail + # That's why we check 'uri' and 'checksum' fields presence + # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format) + res = set() + + if isinstance(data, dict): + if 'uri' in data and 'checksum' in data: + resource = _get_resource_from_uri(data['uri']) + if resource: + res.add(resource) + else: + for k, v in data.iteritems(): + res.update(_get_external_resources_from_canon_data(v)) + elif isinstance(data, list): + for e in data: + res.update(_get_external_resources_from_canon_data(e)) + + return res + + +def _get_canonical_data_resources_v2(filename, unit_path): + return (_get_external_resources_from_canon_data(_load_canonical_file(filename, unit_path)), [filename]) + + +def on_add_linter_check(unit, *args): + if unit.get("TIDY") == "yes": + return + source_root_from_prefix = '${ARCADIA_ROOT}/' + source_root_to_prefix = '$S/' + unlimited = -1 + + no_lint_value = _common.get_no_lint_value(unit) + if no_lint_value in ("none", "none_internal"): + return + + if unit.get("OPENSOURCE") == "yes": + return + + keywords = { + "DEPENDS": unlimited, + "FILES": unlimited, + "CONFIGS": unlimited, + "GLOBAL_RESOURCES": unlimited, + "FILE_PROCESSING_TIME": 1, + "EXTRA_PARAMS": unlimited, + } + flat_args, spec_args = _common.sort_by_keywords(keywords, args) + if len(flat_args) != 2: + unit.message(['ERROR', '_ADD_LINTER_CHECK params: expected 2 free parameters']) + return + + configs = [] + for cfg in spec_args.get('CONFIGS', []): + filename = unit.resolve(source_root_to_prefix + cfg) + if not os.path.exists(filename): + unit.message(['ERROR', 'Configuration file {} is not found'.format(filename)]) + return + configs.append(cfg) + deps = [] + + lint_name, linter = flat_args + deps.append(os.path.dirname(linter)) + + test_files = [] + for path in spec_args.get('FILES', []): + if path.startswith(source_root_from_prefix): + test_files.append(path.replace(source_root_from_prefix, source_root_to_prefix, 1)) + elif path.startswith(source_root_to_prefix): + test_files.append(path) + if not test_files: + unit.message(['WARN', 'No files to lint for {}'.format(lint_name)]) + return + for arg in spec_args.get('EXTRA_PARAMS', []): + if '=' not in arg: + unit.message(['WARN', 'Wrong EXTRA_PARAMS value: "{}". Values must have format "name=value".'.format(arg)]) + return + + deps += spec_args.get('DEPENDS', []) + + for dep in deps: + unit.ondepends(dep) + + for resource in spec_args.get('GLOBAL_RESOURCES', []): + unit.onpeerdir(resource) + + test_record = { + 'TEST-NAME': lint_name, + 'SCRIPT-REL-PATH': 'custom_lint', + 'TESTED-PROJECT-NAME': unit.name(), + 'SOURCE-FOLDER-PATH': _common.get_norm_unit_path(unit), + 'CUSTOM-DEPENDENCIES': " ".join(deps), + 'TEST-DATA': '', + 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")), + 'TEST-TIMEOUT': '', + 'SPLIT-FACTOR': '', + 'TEST_PARTITION': 'SEQUENTIAL', + 'FORK-MODE': '', + 'FORK-TEST-FILES': '', + 'SIZE': 'SMALL', + 'TAG': '', + 'USE_ARCADIA_PYTHON': unit.get('USE_ARCADIA_PYTHON') or '', + 'OLD_PYTEST': 'no', + 'PYTHON-PATHS': '', + # TODO remove FILES, see DEVTOOLS-7052 + 'FILES': serialize_list(test_files), + 'TEST-FILES': serialize_list(test_files), + # Linter specific parameters + # TODO Add configs to DATA. See YMAKE-427 + 'LINT-CONFIGS': serialize_list(configs), + 'LINT-NAME': lint_name, + 'LINT-FILE-PROCESSING-TIME': spec_args.get('FILE_PROCESSING_TIME', [''])[0], + 'LINT-EXTRA-PARAMS': serialize_list(spec_args.get('EXTRA_PARAMS', [])), + 'LINTER': linter, + } + data = dump_test(unit, test_record) + if data: + unit.set_property(["DART_DATA", data]) diff --git a/build/plugins/ytest2.py b/build/plugins/ytest2.py new file mode 100644 index 0000000000..f395ce2475 --- /dev/null +++ b/build/plugins/ytest2.py @@ -0,0 +1,58 @@ +import os +import _common + + +def dir_stmts(unit, dir): + unit.onpeerdir(dir) + unit.onsrcdir(os.sep.join([dir, 'tests'])) + + +def pytest_base(unit, args): + related_prj_dir = args[0] + related_prj_name = args[1] + dir_stmts(unit, related_prj_dir) + ytest_base(unit, related_prj_dir, related_prj_name, args[2:]) + unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test']) + + +def ytest_base(unit, related_prj_dir, related_prj_name, args): + keywords = {"DEPENDS": -1, "DATA": -1} + flat_args, spec_args = _common.sort_by_keywords(keywords, args) + unit.set(['TEST-NAME', os.path.basename(flat_args[0])]) + unit.set(['SCRIPT-REL-PATH', flat_args[1]]) + unit.set(['SOURCE-FOLDER-PATH', related_prj_dir]) + unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)]) + unit.set(['TESTED-BINARY-PATH', flat_args[0]]) + + custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else '' + unit.set(['CUSTOM-DEPENDENCIES', custom_deps]) + data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ') + data_lst.sort() + data = '\"' + ';'.join(data_lst) + '\"' if data_lst else '' + unit.set(['TEST-DATA', data]) + + related_dirs_list = [ + '{ARCADIA_ROOT}/devtools/${YA_ROOT}', + '${ARCADIA_ROOT}/devtools/${YA_ROOT}', + '$RELATED_TARGET_SRCDIR', + ] + related_dirs_value = [] + for rel in related_dirs_list: + related_dirs_value.extend(['--test-related-path', rel]) + unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)]) + unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))]) + + +def on_unittest(unit, *args): + related_prj_name = args[0] + related_prj_dir = args[1][3:] + unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}']) + ytest_base(unit, related_prj_dir, related_prj_name, args) + + +def on_ytest(unit, *args): + pytest_base(unit, args) + + +def on_py_test(unit, *args): + pytest_base(unit, args) |