aboutsummaryrefslogtreecommitdiffstats
path: root/build/plugins
diff options
context:
space:
mode:
authorDevtools Arcadia <arcadia-devtools@yandex-team.ru>2022-02-07 18:08:42 +0300
committerDevtools Arcadia <arcadia-devtools@mous.vla.yp-c.yandex.net>2022-02-07 18:08:42 +0300
commit1110808a9d39d4b808aef724c861a2e1a38d2a69 (patch)
treee26c9fed0de5d9873cce7e00bc214573dc2195b7 /build/plugins
downloadydb-1110808a9d39d4b808aef724c861a2e1a38d2a69.tar.gz
intermediate changes
ref:cde9a383711a11544ce7e107a78147fb96cc4029
Diffstat (limited to 'build/plugins')
-rw-r--r--build/plugins/_common.py201
-rw-r--r--build/plugins/_custom_command.py65
-rw-r--r--build/plugins/_import_wrapper.py24
-rw-r--r--build/plugins/_requirements.py49
-rw-r--r--build/plugins/_test_const.py327
-rw-r--r--build/plugins/_unpickler.py42
-rw-r--r--build/plugins/_xsyn_includes.py60
-rw-r--r--build/plugins/build_mn_files.py29
-rw-r--r--build/plugins/bundle.py22
-rw-r--r--build/plugins/code_generator.py45
-rw-r--r--build/plugins/copy_files_to_build_prefix.py36
-rw-r--r--build/plugins/cp.py30
-rw-r--r--build/plugins/cpp_style.py19
-rw-r--r--build/plugins/create_init_py.py15
-rw-r--r--build/plugins/credits.py22
-rw-r--r--build/plugins/docs.py44
-rw-r--r--build/plugins/files.py5
-rw-r--r--build/plugins/gobuild.py309
-rw-r--r--build/plugins/ios_app_settings.py19
-rw-r--r--build/plugins/ios_assets.py30
-rw-r--r--build/plugins/java.py374
-rw-r--r--build/plugins/large_files.py39
-rw-r--r--build/plugins/lib/__init__.py0
-rw-r--r--build/plugins/lib/_metric_resolvers.py11
-rw-r--r--build/plugins/lib/nots/__init__.py0
-rw-r--r--build/plugins/lib/nots/package_manager/__init__.py9
-rw-r--r--build/plugins/lib/nots/package_manager/base/__init__.py11
-rw-r--r--build/plugins/lib/nots/package_manager/base/constants.py5
-rw-r--r--build/plugins/lib/nots/package_manager/base/lockfile.py68
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_json.py113
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_manager.py108
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/package_json.py114
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/ya.make13
-rw-r--r--build/plugins/lib/nots/package_manager/base/ya.make21
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/__init__.py9
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/lockfile.py163
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/package_manager.py181
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py320
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py58
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/ya.make15
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/utils.py19
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/workspace.py69
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/ya.make23
-rw-r--r--build/plugins/lib/nots/package_manager/ya.make14
-rw-r--r--build/plugins/lib/nots/typescript/__init__.py7
-rw-r--r--build/plugins/lib/nots/typescript/tests/tsc_wrapper.py168
-rw-r--r--build/plugins/lib/nots/typescript/tests/ya.make13
-rw-r--r--build/plugins/lib/nots/typescript/tsc_wrapper.py219
-rw-r--r--build/plugins/lib/nots/typescript/ya.make18
-rw-r--r--build/plugins/lib/nots/ya.make14
-rw-r--r--build/plugins/lib/ya.make7
-rw-r--r--build/plugins/linker_script.py12
-rw-r--r--build/plugins/lj_archive.py44
-rw-r--r--build/plugins/llvm_bc.py33
-rw-r--r--build/plugins/macros_with_error.py29
-rw-r--r--build/plugins/mx_archive.py16
-rw-r--r--build/plugins/nots.py46
-rw-r--r--build/plugins/print_module_type.py5
-rw-r--r--build/plugins/pybuild.py648
-rw-r--r--build/plugins/res.py106
-rw-r--r--build/plugins/rodata.py168
-rw-r--r--build/plugins/sandbox_registry.py21
-rw-r--r--build/plugins/scarab_cant_clash.py66
-rw-r--r--build/plugins/split_codegen.py43
-rw-r--r--build/plugins/ssqls.py40
-rw-r--r--build/plugins/suppressions.py19
-rw-r--r--build/plugins/swig.py164
-rw-r--r--build/plugins/tests/test_code_generator.py16
-rw-r--r--build/plugins/tests/test_common.py49
-rw-r--r--build/plugins/tests/test_requirements.py52
-rw-r--r--build/plugins/tests/test_ssqls.py23
-rw-r--r--build/plugins/tests/ya.make16
-rw-r--r--build/plugins/xsyn.py34
-rw-r--r--build/plugins/ya.make23
-rw-r--r--build/plugins/yql_python_udf.py55
-rw-r--r--build/plugins/ytest.py1113
-rw-r--r--build/plugins/ytest2.py54
77 files changed, 6491 insertions, 0 deletions
diff --git a/build/plugins/_common.py b/build/plugins/_common.py
new file mode 100644
index 0000000000..2f831a94db
--- /dev/null
+++ b/build/plugins/_common.py
@@ -0,0 +1,201 @@
+import sys
+import hashlib
+import base64
+
+
+class Result(object):
+ pass
+
+
+def lazy(func):
+ result = Result()
+
+ def wrapper():
+ try:
+ return result._result
+ except AttributeError:
+ result._result = func()
+
+ return result._result
+
+ return wrapper
+
+
+def pathid(path):
+ return base64.b32encode(hashlib.md5(path).digest()).lower().strip('=')
+
+
+def listid(l):
+ return pathid(str(sorted(l)))
+
+
+def unpair(lst):
+ for x, y in lst:
+ yield x
+ yield y
+
+
+def iterpair(lst):
+ y = None
+
+ for x in lst:
+ if y:
+ yield (y, x)
+
+ y = None
+ else:
+ y = x
+
+
+def stripext(fname):
+ return fname[:fname.rfind('.')]
+
+
+def tobuilddir(fname):
+ if not fname:
+ return '$B'
+ if fname.startswith('$S'):
+ return fname.replace('$S', '$B', 1)
+ else:
+ return fname
+
+
+def before(s, ss):
+ p = s.find(ss)
+
+ if p == -1:
+ return s
+
+ return s[:p]
+
+
+def sort_by_keywords(keywords, args):
+ flat = []
+ res = {}
+
+ cur_key = None
+ limit = -1
+ for arg in args:
+ if arg in keywords:
+ limit = keywords[arg]
+ if limit == 0:
+ res[arg] = True
+ cur_key = None
+ limit = -1
+ else:
+ cur_key = arg
+ continue
+ if limit == 0:
+ cur_key = None
+ limit = -1
+ if cur_key:
+ if cur_key in res:
+ res[cur_key].append(arg)
+ else:
+ res[cur_key] = [arg]
+ limit -= 1
+ else:
+ flat.append(arg)
+ return (flat, res)
+
+
+def resolve_common_const(path):
+ if path.startswith('${ARCADIA_ROOT}'):
+ return path.replace('${ARCADIA_ROOT}', '$S', 1)
+ if path.startswith('${ARCADIA_BUILD_ROOT}'):
+ return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1)
+ return path
+
+
+def resolve_to_abs_path(path, source_root, build_root):
+ if path.startswith('$S') and source_root is not None:
+ return path.replace('$S', source_root, 1)
+ if path.startswith('$B') and build_root is not None:
+ return path.replace('$B', build_root, 1)
+ return path
+
+
+def resolve_to_ymake_path(path):
+ return resolve_to_abs_path(path, '${ARCADIA_ROOT}', '${ARCADIA_BUILD_ROOT}')
+
+
+def join_intl_paths(*args):
+ return '/'.join(args)
+
+
+def get(fun, num):
+ return fun()[num][0]
+
+
+def make_tuples(arg_list):
+ def tpl():
+ for x in arg_list:
+ yield (x, [])
+
+ return list(tpl())
+
+
+def resolve_includes(unit, src, paths):
+ return unit.resolve_include([src] + paths) if paths else []
+
+
+def rootrel_arc_src(src, unit):
+ if src.startswith('${ARCADIA_ROOT}/'):
+ return src[16:]
+
+ if src.startswith('${ARCADIA_BUILD_ROOT}/'):
+ return src[22:]
+
+ elif src.startswith('${CURDIR}/'):
+ return unit.path()[3:] + '/' + src[10:]
+
+ else:
+ resolved = unit.resolve_arc_path(src)
+
+ if resolved.startswith('$S/'):
+ return resolved[3:]
+
+ return src # leave as is
+
+
+def skip_build_root(x):
+ if x.startswith('${ARCADIA_BUILD_ROOT}'):
+ return x[len('${ARCADIA_BUILD_ROOT}'):].lstrip('/')
+
+ return x
+
+
+def get_interpreter_path():
+ interpreter_path = [sys.executable]
+ if 'ymake' in interpreter_path[0]:
+ interpreter_path.append('--python')
+ return interpreter_path
+
+
+def filter_out_by_keyword(test_data, keyword):
+ def _iterate():
+ i = 0
+ while i < len(test_data):
+ if test_data[i] == keyword:
+ i += 2
+ else:
+ yield test_data[i]
+ i += 1
+
+ return list(_iterate())
+
+
+def generate_chunks(lst, chunk_size):
+ for i in xrange(0, len(lst), chunk_size):
+ yield lst[i:(i + chunk_size)]
+
+
+def strip_roots(path):
+ for prefix in ["$B/", "$S/"]:
+ if path.startswith(prefix):
+ return path[len(prefix):]
+ return path
+
+
+def to_yesno(x):
+ return "yes" if x else "no"
diff --git a/build/plugins/_custom_command.py b/build/plugins/_custom_command.py
new file mode 100644
index 0000000000..9692214b22
--- /dev/null
+++ b/build/plugins/_custom_command.py
@@ -0,0 +1,65 @@
+import subprocess
+import sys
+import os
+
+import _common as common
+
+
+class CustomCommand(object):
+ def __setstate__(self, sdict):
+ if isinstance(sdict, tuple):
+ for elem in sdict:
+ if isinstance(elem, dict):
+ for key in elem:
+ setattr(self, key, elem[key])
+
+ self._source_root = None
+ self._build_root = None
+
+ def set_source_root(self, path):
+ self._source_root = path
+
+ def set_build_root(self, path):
+ self._build_root = path
+
+ def call(self, args, **kwargs):
+ cwd = self._get_call_specs('cwd', kwargs)
+ stdout_path = self._get_call_specs('stdout', kwargs)
+
+ resolved_args = []
+
+ for arg in args:
+ resolved_args.append(self.resolve_path(arg))
+
+ if stdout_path:
+ stdout = open(stdout_path, 'wb')
+ else:
+ stdout = None
+
+ env = os.environ.copy()
+ env['ASAN_OPTIONS'] = 'detect_leaks=0'
+
+ rc = subprocess.call(resolved_args, cwd=cwd, stdout=stdout, env=env)
+
+ if stdout:
+ stdout.close()
+ if rc:
+ sys.exit(rc)
+
+ def resolve_path(self, path):
+ return common.resolve_to_abs_path(path, self._source_root, self._build_root)
+
+ def _get_call_specs(self, name, kwargs):
+ if isinstance(kwargs, dict):
+ param = kwargs.get(name, None)
+ if param:
+ return self.resolve_path(param)
+ return None
+
+
+def addrule(*unused):
+ pass
+
+
+def addparser(*unused, **kwargs):
+ pass
diff --git a/build/plugins/_import_wrapper.py b/build/plugins/_import_wrapper.py
new file mode 100644
index 0000000000..883f662314
--- /dev/null
+++ b/build/plugins/_import_wrapper.py
@@ -0,0 +1,24 @@
+try:
+ from ymake import CustomCommand as RealCustomCommand
+ from ymake import addrule
+ from ymake import addparser
+ from ymake import subst
+
+ class CustomCommand(RealCustomCommand):
+ def __init__(self, *args, **kwargs):
+ RealCustomCommand.__init__(*args, **kwargs)
+
+ def resolve_path(self, path):
+ return subst(path)
+
+except ImportError:
+ from _custom_command import CustomCommand # noqa
+ from _custom_command import addrule # noqa
+ from _custom_command import addparser # noqa
+
+
+try:
+ from ymake import engine_version
+except ImportError:
+ def engine_version():
+ return -1
diff --git a/build/plugins/_requirements.py b/build/plugins/_requirements.py
new file mode 100644
index 0000000000..c27635e852
--- /dev/null
+++ b/build/plugins/_requirements.py
@@ -0,0 +1,49 @@
+import _test_const as consts
+
+
+def check_cpu(suite_cpu_requirements, test_size, is_kvm=False):
+ min_cpu_requirements = consts.TestRequirementsConstants.MinCpu
+ max_cpu_requirements = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu)
+ if isinstance(suite_cpu_requirements, str):
+ if all(consts.TestRequirementsConstants.is_all_cpu(req) for req in (max_cpu_requirements, suite_cpu_requirements)):
+ return None
+ return "Wrong 'cpu' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
+
+ if not isinstance(suite_cpu_requirements, int):
+ return "Wrong 'cpu' requirements: {}, should be integer".format(suite_cpu_requirements)
+
+ if suite_cpu_requirements < min_cpu_requirements or suite_cpu_requirements > consts.TestRequirementsConstants.get_cpu_value(max_cpu_requirements):
+ return "Wrong 'cpu' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_cpu_requirements, min_cpu_requirements, max_cpu_requirements, test_size)
+
+ return None
+
+
+# TODO: Remove is_kvm param when there will be guarantees on RAM
+def check_ram(suite_ram_requirements, test_size, is_kvm=False):
+ if not isinstance(suite_ram_requirements, int):
+ return "Wrong 'ram' requirements: {}, should be integer".format(suite_ram_requirements)
+ min_ram_requirements = consts.TestRequirementsConstants.MinRam
+ max_ram_requirements = consts.MAX_RAM_REQUIREMENTS_FOR_KVM if is_kvm else consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram)
+ if suite_ram_requirements < min_ram_requirements or suite_ram_requirements > max_ram_requirements:
+ err_msg = "Wrong 'ram' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_requirements, min_ram_requirements, max_ram_requirements, test_size)
+ if is_kvm:
+ err_msg += ' with kvm requirements'
+ return err_msg
+ return None
+
+
+def check_ram_disk(suite_ram_disk, test_size, is_kvm=False):
+ min_ram_disk = consts.TestRequirementsConstants.MinRamDisk
+ max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk)
+ if isinstance(suite_ram_disk, str):
+ if all(consts.TestRequirementsConstants.is_all_ram_disk(req) for req in (max_ram_disk, suite_ram_disk)):
+ return None
+ return "Wrong 'ram_disk' requirements: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, 0, max_ram_disk, test_size)
+
+ if not isinstance(suite_ram_disk, int):
+ return "Wrong 'ram_disk' requirements: {}, should be integer".format(suite_ram_disk)
+
+ if suite_ram_disk < min_ram_disk or suite_ram_disk > consts.TestRequirementsConstants.get_ram_disk_value(max_ram_disk):
+ return "Wrong 'ram_disk' requirement: {}, should be in [{}..{}] for {}-size tests".format(suite_ram_disk, min_ram_disk, max_ram_disk, test_size)
+
+ return None
diff --git a/build/plugins/_test_const.py b/build/plugins/_test_const.py
new file mode 100644
index 0000000000..0d03cc3d17
--- /dev/null
+++ b/build/plugins/_test_const.py
@@ -0,0 +1,327 @@
+# coding: utf-8
+import re
+import sys
+
+
+RESTART_TEST_INDICATOR = '##restart-test##'
+INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
+
+RESTART_TEST_INDICATORS = [
+ RESTART_TEST_INDICATOR,
+ "network error",
+]
+
+# testing
+BIN_DIRECTORY = 'bin'
+CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
+CONSOLE_SNIPPET_LIMIT = 5000
+LIST_NODE_LOG_FILE = "test_list.log"
+LIST_NODE_RESULT_FILE = "test_list.json"
+LIST_RESULT_NODE_LOG_FILE = "list_result.log"
+MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
+MAX_TEST_RESTART_COUNT = 3
+REPORT_SNIPPET_LIMIT = 10000
+SANITIZER_ERROR_RC = 100
+TEST_SUBTEST_SEPARATOR = '::'
+TESTING_OUT_DIR_NAME = "testing_out_stuff"
+TESTING_OUT_TAR_NAME = TESTING_OUT_DIR_NAME + ".tar"
+TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+TRACE_FILE_NAME = "ytest.report.trace"
+TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, "run_test.log"}
+
+# kvm
+DEFAULT_RAM_REQUIREMENTS_FOR_KVM = 4
+MAX_RAM_REQUIREMENTS_FOR_KVM = 16
+
+# distbuild
+TEST_NODE_FINISHING_TIME = 5 * 60
+DEFAULT_TEST_NODE_TIMEOUT = 15 * 60
+
+# coverage
+COVERAGE_TESTS_TIMEOUT_FACTOR = 1.5
+COVERAGE_RESOLVED_FILE_NAME_PATTERN = "coverage_resolved.{}.json"
+CPP_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("cpp")
+JAVA_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("java")
+PYTHON_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("python")
+CLANG_COVERAGE_TEST_TYPES = ("unittest", "coverage_extractor", "pytest", "py3test", "gtest", "boost_test", "exectest")
+COVERAGE_TABLE_CHUNKS = 20
+COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
+COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
+COVERAGE_YT_TABLE_PREFIX = "datatable"
+
+# fuzzing
+CORPUS_DATA_FILE_NAME = 'corpus.json'
+CORPUS_DATA_ROOT_DIR = 'fuzzing'
+CORPUS_DIR_NAME = 'corpus'
+FUZZING_COMPRESSION_COEF = 1.1
+FUZZING_DEFAULT_TIMEOUT = 3600
+FUZZING_FINISHING_TIME = 600
+FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
+GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
+MAX_CORPUS_RESOURCES_ALLOWED = 5
+
+TEST_TOOL_HOST = 'TEST_TOOL_HOST_RESOURCE_GLOBAL'
+TEST_TOOL_TARGET = 'TEST_TOOL_TARGET_RESOURCE_GLOBAL'
+TEST_TOOL_HOST_LOCAL = 'TEST_TOOL_HOST_LOCAL'
+TEST_TOOL_TARGET_LOCAL = 'TEST_TOOL_TARGET_LOCAL'
+XCODE_TOOLS_RESOURCE = 'XCODE_TOOLS_ROOT_RESOURCE_GLOBAL'
+GO_TOOLS_RESOURCE = 'GO_TOOLS_RESOURCE_GLOBAL'
+LLVM_COV9_RESOURCE = 'LLVM_COV9_RESOURCE_GLOBAL'
+PEP8_PY2_RESOURCE = 'PEP8_PY2_RESOURCE_GLOBAL'
+PEP8_PY3_RESOURCE = 'PEP8_PY3_RESOURCE_GLOBAL'
+FLAKES_PY2_RESOURCE = 'FLAKES_PY2_RESOURCE_GLOBAL'
+FLAKES_PY3_RESOURCE = 'FLAKES_PY3_RESOURCE_GLOBAL'
+FLAKE8_PY2_RESOURCE = 'FLAKE8_PY2_RESOURCE_GLOBAL'
+FLAKE8_PY3_RESOURCE = 'FLAKE8_PY3_RESOURCE_GLOBAL'
+
+
+class Enum(object):
+
+ @classmethod
+ def enumerate(cls):
+ return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
+
+
+class TestRequirements(Enum):
+ Container = 'container'
+ Cpu = 'cpu'
+ DiskUsage = 'disk_usage'
+ Ram = 'ram'
+ RamDisk = 'ram_disk'
+ SbVault = 'sb_vault'
+ Network = 'network'
+ Dns = 'dns'
+ Kvm = 'kvm'
+
+
+class TestRequirementsConstants(Enum):
+ All = 'all'
+ AllCpuValue = 50
+ AllRamDiskValue = 50
+ MinCpu = 1
+ MinRam = 1
+ MinRamDisk = 0
+
+ @classmethod
+ def is_all_cpu(cls, value):
+ return value == cls.All
+
+ @classmethod
+ def get_cpu_value(cls, value):
+ return cls.AllCpuValue if cls.is_all_cpu(value) else value
+
+ @classmethod
+ def is_all_ram_disk(cls, value):
+ return value == cls.All
+
+ @classmethod
+ def get_ram_disk_value(cls, value):
+ return cls.AllRamDiskValue if cls.is_all_ram_disk(value) else value
+
+
+class TestSize(Enum):
+ Small = 'small'
+ Medium = 'medium'
+ Large = 'large'
+
+ DefaultTimeouts = {
+ Small: 60,
+ Medium: 600,
+ Large: 3600,
+ }
+
+ DefaultPriorities = {
+ Small: -1,
+ Medium: -2,
+ Large: -3,
+ }
+
+ DefaultRequirements = {
+ Small: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 2,
+ TestRequirements.RamDisk: 0,
+ },
+ Medium: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 4,
+ TestRequirements.RamDisk: 0,
+ },
+ Large: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 8,
+ TestRequirements.RamDisk: 0,
+ },
+ }
+
+ MaxRequirements = {
+ Small: {
+ TestRequirements.Cpu: 4,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 4,
+ TestRequirements.RamDisk: 4,
+ },
+ Medium: {
+ TestRequirements.Cpu: 4,
+ # TestRequirements.Cpu: 8,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 16,
+ TestRequirements.RamDisk: 4,
+ },
+ Large: {
+ TestRequirements.Cpu: 4,
+ TestRequirements.Ram: 32,
+ TestRequirements.RamDisk: 4,
+ },
+ }
+
+ @classmethod
+ def sizes(cls):
+ return cls.DefaultTimeouts.keys()
+
+ @classmethod
+ def get_default_timeout(cls, size):
+ if size in cls.DefaultTimeouts:
+ return cls.DefaultTimeouts[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_default_priorities(cls, size):
+ if size in cls.DefaultPriorities:
+ return cls.DefaultPriorities[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_default_requirements(cls, size):
+ if size in cls.DefaultRequirements:
+ return cls.DefaultRequirements[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_max_requirements(cls, size):
+ if size in cls.MaxRequirements:
+ return cls.MaxRequirements[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+
+class TestRunExitCode(Enum):
+ Skipped = 2
+ Failed = 3
+ TimeOut = 10
+ InfrastructureError = 12
+
+
+class YaTestTags(Enum):
+ Manual = "ya:manual"
+ Notags = "ya:notags"
+ Norestart = "ya:norestart"
+ Dirty = "ya:dirty"
+ Noretries = "ya:noretries"
+ Fat = "ya:fat"
+ RunWithAsserts = "ya:relwithdebinfo"
+ Privileged = "ya:privileged"
+ ExoticPlatform = "ya:exotic_platform"
+ NotAutocheck = "ya:not_autocheck"
+
+
+class Status(object):
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
+ SKIPPED = -100
+ NOT_LAUNCHED = -200
+ CANON_DIFF = -300
+ DESELECTED = -400
+ INTERNAL = -sys.maxint
+ FLAKY = -50
+ BY_NAME = {'good': GOOD, 'fail': FAIL, 'xfail': XFAIL, 'xpass': XPASS, 'missing': MISSING, 'crashed': CRASHED,
+ 'skipped': SKIPPED, 'flaky': FLAKY, 'not_launched': NOT_LAUNCHED, 'timeout': TIMEOUT, 'diff': CANON_DIFF,
+ 'internal': INTERNAL, 'deselected': DESELECTED}
+ TO_STR = {GOOD: 'good', FAIL: 'fail', XFAIL: 'xfail', XPASS: 'xpass', MISSING: 'missing', CRASHED: 'crashed',
+ SKIPPED: 'skipped', FLAKY: 'flaky', NOT_LAUNCHED: 'not_launched', TIMEOUT: 'timeout', CANON_DIFF: 'diff',
+ INTERNAL: 'internal', DESELECTED: 'deselected'}
+
+
+class _Colors(object):
+
+ _NAMES = [
+ "blue",
+ "cyan",
+ "default",
+ "green",
+ "grey",
+ "magenta",
+ "red",
+ "white",
+ "yellow",
+ ]
+ _PREFIXES = ["", "light", "dark"]
+
+ def __init__(self):
+ self._table = {}
+ for prefix in self._PREFIXES:
+ for value in self._NAMES:
+ name = value
+ if prefix:
+ name = "{}_{}".format(prefix, value)
+ value = "{}-{}".format(prefix, value)
+ self.__add_color(name.upper(), value)
+
+ def __add_color(self, name, value):
+ self._table[name] = value
+ self.__setattr__(name, value)
+
+
+Colors = _Colors()
+
+
+class _Highlight(object):
+
+ _MARKERS = {
+ # special
+ "RESET": "rst",
+
+ "IMPORTANT": "imp",
+ "UNIMPORTANT": "unimp",
+ "BAD": "bad",
+ "WARNING": "warn",
+ "GOOD": "good",
+ "PATH": "path",
+ "ALTERNATIVE1": "alt1",
+ "ALTERNATIVE2": "alt2",
+ "ALTERNATIVE3": "alt3",
+ }
+
+ def __init__(self):
+ # setting attributes because __getattr__ is much slower
+ for attr, value in self._MARKERS.items():
+ self.__setattr__(attr, value)
+
+
+Highlight = _Highlight()
+
+
+class _StatusColorMap(object):
+
+ _MAP = {
+ 'good': Highlight.GOOD,
+ 'fail': Highlight.BAD,
+ 'missing': Highlight.ALTERNATIVE1,
+ 'crashed': Highlight.WARNING,
+ 'skipped': Highlight.UNIMPORTANT,
+ 'not_launched': Highlight.BAD,
+ 'timeout': Highlight.BAD,
+ 'flaky': Highlight.ALTERNATIVE3,
+ 'xfail': Highlight.WARNING,
+ 'xpass': Highlight.WARNING,
+ 'diff': Highlight.BAD,
+ 'internal': Highlight.BAD,
+ 'deselected': Highlight.UNIMPORTANT,
+ }
+
+ def __getitem__(self, item):
+ return self._MAP[item]
+
+
+StatusColorMap = _StatusColorMap()
diff --git a/build/plugins/_unpickler.py b/build/plugins/_unpickler.py
new file mode 100644
index 0000000000..e01e7b3118
--- /dev/null
+++ b/build/plugins/_unpickler.py
@@ -0,0 +1,42 @@
+import sys
+
+sys.dont_write_bytecode = True
+
+import argparse
+import base64
+try:
+ import cPickle as pickle
+except Exception:
+ import pickle
+
+import _common as common
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--data', help='pickled object of TCustomCommand class', required=True)
+ parser.add_argument('--src-root', help='$S real path', required=True)
+ parser.add_argument('--build-root', help='$B real path', required=True)
+ parser.add_argument('--tools', help='binaries needed by command', required=True, nargs='+')
+ args, unknown_args = parser.parse_known_args()
+
+ encoded_cmd = args.data
+ src_root = args.src_root
+ build_root = args.build_root
+ tools = args.tools
+
+ assert (int(tools[0]) == len(tools[1:])), "tools quantity != tools number!"
+
+ cmd_object = pickle.loads(base64.b64decode(encoded_cmd))
+
+ cmd_object.set_source_root(src_root)
+ cmd_object.set_build_root(build_root)
+
+ if len(tools[1:]) == 0:
+ cmd_object.run(unknown_args, common.get_interpreter_path())
+ else:
+ cmd_object.run(unknown_args, *tools[1:])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/plugins/_xsyn_includes.py b/build/plugins/_xsyn_includes.py
new file mode 100644
index 0000000000..8d33cea2f0
--- /dev/null
+++ b/build/plugins/_xsyn_includes.py
@@ -0,0 +1,60 @@
+def get_include_callback():
+ """
+ .. function: get_include_callback returns function that processes each DOM element to get xsyn include from it, and it's aware of directory with all the xsyns.
+
+ :param xsyn_dir directory with xsyns.
+ """
+ def get_include(element):
+ """
+ .. function: get_include returns list of includes from this DOM element.
+
+ :param element DOM element.
+ """
+ res = []
+ if element.nodeType == element.ELEMENT_NODE and element.nodeName == "parse:include":
+ attrs = element.attributes
+ for i in xrange(attrs.length):
+ attr = attrs.item(i)
+ if attr.nodeName == "path":
+ include_filename = attr.nodeValue
+ res.append(include_filename)
+ return res
+
+ return get_include
+
+
+def traverse_xsyn(element, on_element):
+ """
+ .. function: traverse_xsyn traverses element and returns concatenated lists of calling on_element of each element.
+
+ :param element element in DOM.
+ :param on_element callback on element that returns list of values.
+ """
+ res = on_element(element)
+ for child in element.childNodes:
+ child_results = traverse_xsyn(child, on_element)
+ res += child_results
+ return res
+
+
+def process_xsyn(filepath, on_element):
+ """
+ .. function: process_xsyn processes xsyn file and return concatenated list of calling on_element on each DOM element.
+
+ :param filepath path to xsyn file
+ :param on_element callback called on each element in xsyn that returns list of values.
+
+ """
+
+ # keep a stack of filepathes if on_element calls process_xsyn recursively
+ with open(filepath) as xsyn_file:
+ from xml.dom.minidom import parse
+ tree = parse(xsyn_file)
+ tree.normalize()
+ res = traverse_xsyn(tree, on_element)
+ return res
+
+
+def get_all_includes(filepath):
+ callback = get_include_callback()
+ return process_xsyn(filepath, callback)
diff --git a/build/plugins/build_mn_files.py b/build/plugins/build_mn_files.py
new file mode 100644
index 0000000000..4da76f1852
--- /dev/null
+++ b/build/plugins/build_mn_files.py
@@ -0,0 +1,29 @@
+from os.path import basename, splitext
+
+
+def on_build_mns_files(unit, *args):
+ files = []
+ name = ''
+ ranking_suffix = ''
+ check = ''
+ index = 0
+ fml_unused_tool = ''
+ while index < len(args):
+ if args[index] == 'NAME':
+ index += 1
+ name = args[index]
+ elif args[index] == 'RANKING_SUFFIX':
+ index += 1
+ ranking_suffix = args[index]
+ elif args[index] == 'CHECK':
+ check = 'CHECK'
+ fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL'
+ else:
+ files.append(args[index])
+ index += 1
+
+ for filename in files:
+ file_basename, _ = splitext(basename(filename))
+ asmdataname = "staticMn{0}{1}Ptr".format(ranking_suffix, file_basename)
+ output_name = 'mn.staticMn{0}{1}Ptr.cpp'.format(ranking_suffix, file_basename)
+ unit.on_build_mns_file([filename, name, output_name, ranking_suffix, check, fml_unused_tool, asmdataname])
diff --git a/build/plugins/bundle.py b/build/plugins/bundle.py
new file mode 100644
index 0000000000..0bec8254ee
--- /dev/null
+++ b/build/plugins/bundle.py
@@ -0,0 +1,22 @@
+import os
+
+
+def onbundle(unit, *args):
+ """
+ @usage BUNDLE(<Dir [NAME Name]>...)
+
+ Brings build artefact from module Dir under optional Name to the current module (e.g. UNION)
+ If NAME is not specified, the name of the Dir's build artefact will be preserved
+ It makes little sense to specify BUNDLE on non-final targets and so this may stop working without prior notice.
+ Bundle on multimodule will select final target among multimodule variants and will fail if there are none or more than one.
+ """
+ i = 0
+ while i < len(args):
+ if i + 2 < len(args) and args[i + 1] == "NAME":
+ target, name = args[i], args[i + 2]
+ i += 3
+ else:
+ target, name = args[i], os.path.basename(args[i])
+ i += 1
+
+ unit.on_bundle_target([target, name])
diff --git a/build/plugins/code_generator.py b/build/plugins/code_generator.py
new file mode 100644
index 0000000000..ca8bb18c15
--- /dev/null
+++ b/build/plugins/code_generator.py
@@ -0,0 +1,45 @@
+import re
+import os
+
+import _import_wrapper as iw
+
+pattern = re.compile(r"#include\s*[<\"](?P<INDUCED>[^>\"]+)[>\"]|(?:@|{@)\s*(?:import|include|from)\s*[\"'](?P<INCLUDE>[^\"']+)[\"']")
+
+
+class CodeGeneratorTemplateParser(object):
+ def __init__(self, path, unit):
+ self._path = path
+ retargeted = os.path.join(unit.path(), os.path.relpath(path, unit.resolve(unit.path())))
+ with open(path, 'rb') as f:
+ includes, induced = CodeGeneratorTemplateParser.parse_includes(f.readlines())
+ self._includes = unit.resolve_include([retargeted] + includes) if includes else []
+ self._induced = unit.resolve_include([retargeted] + induced) if induced else []
+
+ @staticmethod
+ def parse_includes(lines):
+ includes = []
+ induced = []
+
+ for line in lines:
+ for match in pattern.finditer(line):
+ type = match.lastgroup
+ if type == 'INCLUDE':
+ includes.append(match.group(type))
+ elif type == 'INDUCED':
+ induced.append(match.group(type))
+ else:
+ raise Exception("Unexpected match! Perhaps it is a result of an error in pattern.")
+ return (includes, induced)
+
+ def includes(self):
+ return self._includes
+
+ def induced_deps(self):
+ return {
+ 'h+cpp': self._induced
+ }
+
+
+def init():
+ iw.addparser('markettemplate', CodeGeneratorTemplateParser)
+ iw.addparser('macro', CodeGeneratorTemplateParser)
diff --git a/build/plugins/copy_files_to_build_prefix.py b/build/plugins/copy_files_to_build_prefix.py
new file mode 100644
index 0000000000..c8a6e07511
--- /dev/null
+++ b/build/plugins/copy_files_to_build_prefix.py
@@ -0,0 +1,36 @@
+from _common import sort_by_keywords
+
+
+SOURCE_ROOT = '${ARCADIA_ROOT}/'
+BUILD_ROOT = '${ARCADIA_BUILD_ROOT}/'
+CURDIR = '${CURDIR}/'
+BINDIR = '${BINDIR}/'
+
+
+def oncopy_files_to_build_prefix(unit, *args):
+ keywords = {'PREFIX': 1, 'GLOBAL': 0}
+ # NB! keyword 'GLOBAL' is a way to skip this word from the list of files
+
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+ prefix = spec_args['PREFIX'][0] if 'PREFIX' in spec_args else ''
+
+ if len(prefix) > 0:
+ build_prefix = '/'.join([BUILD_ROOT, prefix])
+ else:
+ build_prefix = BUILD_ROOT
+
+ for arg in flat_args:
+ if arg.startswith(build_prefix):
+ # nothing to do
+ pass
+ elif len(prefix) > 0 and arg.startswith(BUILD_ROOT):
+ unit.oncopy_file([arg, '{}/{}'.format(build_prefix, arg[len(BUILD_ROOT):])])
+ elif arg.startswith(SOURCE_ROOT):
+ unit.oncopy_file([arg, '{}/{}'.format(build_prefix, arg[len(SOURCE_ROOT):])])
+ else:
+ offset = 0
+ if arg.startswith(BINDIR):
+ offset = len(BINDIR)
+ elif arg.startswith(CURDIR):
+ offset = len(CURDIR)
+ unit.oncopy_file([arg, '{}/{}/{}'.format(build_prefix, unit.get(['MODDIR']), arg[offset:])])
diff --git a/build/plugins/cp.py b/build/plugins/cp.py
new file mode 100644
index 0000000000..5c663a3bdd
--- /dev/null
+++ b/build/plugins/cp.py
@@ -0,0 +1,30 @@
+import os
+
+from _common import sort_by_keywords
+
+
+def oncopy(unit, *args):
+ keywords = {'RESULT': 1, 'KEEP_DIR_STRUCT': 0, 'DESTINATION': 1, 'FROM': 1}
+
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+
+ dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else ''
+ from_dir = spec_args['FROM'][0] if 'FROM' in spec_args else ''
+ keep_struct = 'KEEP_DIR_STRUCT' in spec_args
+ save_in_var = 'RESULT' in spec_args
+ targets = []
+
+ for source in flat_args:
+ rel_path = ''
+ path_list = source.split(os.sep)
+ filename = path_list[-1]
+ if keep_struct:
+ if path_list[:-1]:
+ rel_path = os.path.join(*path_list[:-1])
+ source_path = os.path.join(from_dir, rel_path, filename)
+ target_path = os.path.join(dest_dir, rel_path, filename)
+ if save_in_var:
+ targets.append(target_path)
+ unit.oncopy_file([source_path, target_path])
+ if save_in_var:
+ unit.set([spec_args["RESULT"][0], " ".join(targets)])
diff --git a/build/plugins/cpp_style.py b/build/plugins/cpp_style.py
new file mode 100644
index 0000000000..3ab78b7320
--- /dev/null
+++ b/build/plugins/cpp_style.py
@@ -0,0 +1,19 @@
+import os
+
+from _common import sort_by_keywords
+
+
+def on_style(unit, *args):
+ def it():
+ yield 'DONT_PARSE'
+
+ for f in args:
+ f = f[len('${ARCADIA_ROOT}') + 1:]
+
+ if '/generated/' in f:
+ continue
+
+ yield f
+ yield '/cpp_style/files/' + f
+
+ unit.onresource(list(it()))
diff --git a/build/plugins/create_init_py.py b/build/plugins/create_init_py.py
new file mode 100644
index 0000000000..e41a4d22df
--- /dev/null
+++ b/build/plugins/create_init_py.py
@@ -0,0 +1,15 @@
+import os
+
+from _common import sort_by_keywords
+
+
+def oncreate_init_py_structure(unit, *args):
+ if unit.get('DISTBUILD'):
+ return
+ target_dir = unit.get('PY_PROTOS_FOR_DIR')
+ path_list = target_dir.split(os.path.sep)[1:]
+ inits = [os.path.join("${ARCADIA_BUILD_ROOT}", '__init__.py')]
+ for i in range(1, len(path_list) + 1):
+ inits.append(os.path.join("${ARCADIA_BUILD_ROOT}", os.path.join(*path_list[0:i]), '__init__.py'))
+ unit.ontouch(inits)
+
diff --git a/build/plugins/credits.py b/build/plugins/credits.py
new file mode 100644
index 0000000000..0ce1659326
--- /dev/null
+++ b/build/plugins/credits.py
@@ -0,0 +1,22 @@
+from _common import rootrel_arc_src
+
+
+def oncredits_disclaimer(unit, *args):
+ if unit.get('WITH_CREDITS'):
+ unit.message(["warn", "CREDITS WARNING: {}".format(' '.join(args))])
+
+def oncheck_contrib_credits(unit, *args):
+ module_path = rootrel_arc_src(unit.path(), unit)
+ excepts = set()
+ if 'EXCEPT' in args:
+ args = list(args)
+ except_pos = args.index('EXCEPT')
+ excepts = set(args[except_pos + 1:])
+ args = args[:except_pos]
+ for arg in args:
+ if module_path.startswith(arg) and not unit.get('CREDITS_TEXTS_FILE') and not unit.get('NO_CREDITS_TEXTS_FILE'):
+ for ex in excepts:
+ if module_path.startswith(ex):
+ break
+ else:
+ unit.message(["error", "License texts not found. See https://st.yandex-team.ru/DTCC-324"])
diff --git a/build/plugins/docs.py b/build/plugins/docs.py
new file mode 100644
index 0000000000..760fe3af7f
--- /dev/null
+++ b/build/plugins/docs.py
@@ -0,0 +1,44 @@
+import json
+
+
+def extract_macro_calls(unit, macro_value_name):
+ if not unit.get(macro_value_name):
+ return []
+
+ return filter(None, unit.get(macro_value_name).replace('$' + macro_value_name, '').split())
+
+
+def macro_calls_to_dict(unit, calls):
+ def split_args(arg):
+ if arg is None:
+ return None
+
+ kv = filter(None, arg.split('='))
+ if len(kv) != 2:
+ unit.message(['error', 'Invalid variables specification "{}": value expected to be in form %name%=%value% (with no spaces)'.format(arg)])
+ return None
+
+ return kv
+
+ return dict(filter(None, map(split_args, calls)))
+
+
+def onprocess_docs(unit, *args):
+ build_tool = unit.get('_DOCS_BUILDER_VALUE')
+ if build_tool:
+ if build_tool not in ['mkdocs', 'yfm']:
+ unit.message(['error', 'Unsupported build tool {}'.format(build_tool)])
+ else:
+ build_tool = 'yfm'
+ unit.ondocs_builder([build_tool])
+ if build_tool == 'yfm' and unit.enabled('_DOCS_USE_PLANTUML'):
+ unit.on_docs_yfm_use_plantuml([])
+ orig_variables = macro_calls_to_dict(unit, extract_macro_calls(unit, '_DOCS_VARS_VALUE'))
+ variables = {k: unit.get(k) or v for k, v in orig_variables.items()}
+ if variables:
+ if build_tool == 'mkdocs':
+ unit.set(['_DOCS_VARS_FLAG', ' '.join(['--var {}={}'.format(k, v) for k, v in variables.items()])])
+ elif build_tool == 'yfm':
+ unit.set(['_DOCS_VARS_FLAG', '--vars {}'.format(json.dumps(json.dumps(variables, sort_keys=True)))])
+ else:
+ assert False, 'Unexpected build_tool value: [{}]'.format(build_tool)
diff --git a/build/plugins/files.py b/build/plugins/files.py
new file mode 100644
index 0000000000..78a6fe6169
--- /dev/null
+++ b/build/plugins/files.py
@@ -0,0 +1,5 @@
+def onfiles(unit, *args):
+ args = list(args)
+ for arg in args:
+ if not arg.startswith('${ARCADIA_BUILD_ROOT}'):
+ unit.oncopy_file([arg, arg])
diff --git a/build/plugins/gobuild.py b/build/plugins/gobuild.py
new file mode 100644
index 0000000000..8df96ebc55
--- /dev/null
+++ b/build/plugins/gobuild.py
@@ -0,0 +1,309 @@
+import base64
+import itertools
+import md5
+import os
+from _common import rootrel_arc_src, tobuilddir
+import ymake
+
+
+runtime_cgo_path = os.path.join('runtime', 'cgo')
+runtime_msan_path = os.path.join('runtime', 'msan')
+runtime_race_path = os.path.join('runtime', 'race')
+arc_project_prefix = 'a.yandex-team.ru/'
+import_runtime_cgo_false = {
+ 'norace': (runtime_cgo_path, runtime_msan_path, runtime_race_path),
+ 'race': (runtime_cgo_path, runtime_msan_path),
+}
+import_syscall_false = {
+ 'norace': (runtime_cgo_path),
+ 'race': (runtime_cgo_path, runtime_race_path),
+}
+
+
+def get_import_path(unit):
+ # std_lib_prefix = unit.get('GO_STD_LIB_PREFIX')
+ # unit.get() doesn't evalutate the value of variable, so the line above doesn't really work
+ std_lib_prefix = unit.get('GOSTD') + '/'
+ arc_project_prefix = unit.get('GO_ARCADIA_PROJECT_PREFIX')
+ vendor_prefix = unit.get('GO_CONTRIB_PROJECT_PREFIX')
+
+ module_path = rootrel_arc_src(unit.path(), unit)
+ assert len(module_path) > 0
+ import_path = module_path.replace('\\', '/')
+ if import_path.startswith(std_lib_prefix):
+ import_path = import_path[len(std_lib_prefix):]
+ elif import_path.startswith(vendor_prefix):
+ import_path = import_path[len(vendor_prefix):]
+ else:
+ import_path = arc_project_prefix + import_path
+ assert len(import_path) > 0
+ return import_path
+
+
+def get_appended_values(unit, key):
+ value = []
+ raw_value = unit.get(key)
+ if raw_value:
+ value = filter(lambda x: len(x) > 0, raw_value.split(' '))
+ assert len(value) == 0 or value[0] == '$' + key
+ return value[1:] if len(value) > 0 else value
+
+
+def compare_versions(version1, version2):
+ def last_index(version):
+ index = version.find('beta')
+ return len(version) if index < 0 else index
+
+ v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.'))
+ v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.'))
+ if v1 == v2:
+ return 0
+ return 1 if v1 < v2 else -1
+
+
+def need_compiling_runtime(import_path, gostd_version):
+ return import_path in ('runtime', 'reflect', 'syscall') or \
+ import_path.startswith('runtime/internal/') or \
+ compare_versions('1.17', gostd_version) >= 0 and import_path == 'internal/bytealg'
+
+
+def go_package_name(unit):
+ name = unit.get('GO_PACKAGE_VALUE')
+ if not name:
+ name = unit.get('GO_TEST_IMPORT_PATH')
+ if name:
+ name = os.path.basename(os.path.normpath(name))
+ elif unit.get('MODULE_TYPE') == 'PROGRAM':
+ name = 'main'
+ else:
+ name = unit.get('REALPRJNAME')
+ return name
+
+
+def need_lint(path):
+ return not path.startswith('$S/vendor/') and not path.startswith('$S/contrib/')
+
+
+def on_go_process_srcs(unit):
+ """
+ _GO_PROCESS_SRCS() macro processes only 'CGO' files. All remaining *.go files
+ and other input files are currently processed by a link command of the
+ GO module (GO_LIBRARY, GO_PROGRAM)
+ """
+
+ srcs_files = get_appended_values(unit, '_GO_SRCS_VALUE')
+
+ asm_files = []
+ c_files = []
+ cxx_files = []
+ ev_files = []
+ fbs_files = []
+ go_files = []
+ in_files = []
+ proto_files = []
+ s_files = []
+ syso_files = []
+
+ classifed_files = {
+ '.c': c_files,
+ '.cc': cxx_files,
+ '.cpp': cxx_files,
+ '.cxx': cxx_files,
+ '.ev': ev_files,
+ '.fbs': fbs_files,
+ '.go': go_files,
+ '.in': in_files,
+ '.proto': proto_files,
+ '.s': asm_files,
+ '.syso': syso_files,
+ '.C': cxx_files,
+ '.S': s_files,
+ }
+
+ # Classify files specifed in _GO_SRCS() macro by extension and process CGO_EXPORT keyword
+ # which can preceed C/C++ files only
+ is_cgo_export = False
+ for f in srcs_files:
+ _, ext = os.path.splitext(f)
+ ext_files = classifed_files.get(ext)
+ if ext_files is not None:
+ if is_cgo_export:
+ is_cgo_export = False
+ if ext in ('.c', '.cc', '.cpp', '.cxx', '.C'):
+ unit.oncopy_file_with_context([f, f, 'OUTPUT_INCLUDES', '${BINDIR}/_cgo_export.h'])
+ f = '${BINDIR}/' + f
+ else:
+ ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS() macro')
+ ext_files.append(f)
+ elif f == 'CGO_EXPORT':
+ is_cgo_export = True
+ else:
+ # FIXME(snermolaev): We can report an unsupported files for _GO_SRCS here
+ pass
+ if is_cgo_export:
+ ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS() macro')
+
+ for f in go_files:
+ if f.endswith('_test.go'):
+ ymake.report_configure_error('file {} must be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f))
+ go_test_files = get_appended_values(unit, '_GO_TEST_SRCS_VALUE')
+ go_xtest_files = get_appended_values(unit, '_GO_XTEST_SRCS_VALUE')
+ for f in go_test_files + go_xtest_files:
+ if not f.endswith('_test.go'):
+ ymake.report_configure_error('file {} should not be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f))
+
+ is_test_module = unit.enabled('GO_TEST_MODULE')
+
+ # Add gofmt style checks
+ if unit.enabled('_GO_FMT_ADD_CHECK'):
+ resolved_go_files = []
+ go_source_files = [] if is_test_module and unit.get(['GO_TEST_FOR_DIR']) else go_files
+ for path in itertools.chain(go_source_files, go_test_files, go_xtest_files):
+ if path.endswith('.go'):
+ resolved = unit.resolve_arc_path([path])
+ if resolved != path and need_lint(resolved):
+ resolved_go_files.append(resolved)
+ if resolved_go_files:
+ basedirs = {}
+ for f in resolved_go_files:
+ basedir = os.path.dirname(f)
+ if basedir not in basedirs:
+ basedirs[basedir] = []
+ basedirs[basedir].append(f)
+ for basedir in basedirs:
+ unit.onadd_check(['gofmt'] + basedirs[basedir])
+
+ # Go coverage instrumentation (NOTE! go_files list is modified here)
+ if is_test_module and unit.enabled('GO_TEST_COVER'):
+ cover_info = []
+
+ for f in go_files:
+ if f.endswith('_test.go'):
+ continue
+ cover_var = 'GoCover' + base64.b32encode(f).rstrip('=')
+ cover_file = unit.resolve_arc_path(f)
+ unit.on_go_gen_cover_go([cover_file, cover_var])
+ if cover_file.startswith('$S/'):
+ cover_file = arc_project_prefix + cover_file[3:]
+ cover_info.append('{}:{}'.format(cover_var, cover_file))
+
+ # go_files should be empty now since the initial list shouldn't contain
+ # any non-go or go test file. The value of go_files list will be used later
+ # to update the value of _GO_SRCS_VALUE
+ go_files = []
+ unit.set(['GO_COVER_INFO_VALUE', ' '.join(cover_info)])
+
+ # We have cleaned up the list of files from _GO_SRCS_VALUE var and we have to update
+ # the value since it is used in module command line
+ unit.set(['_GO_SRCS_VALUE', ' '.join(itertools.chain(go_files, asm_files, syso_files))])
+
+ unit_path = unit.path()
+
+ # Add go vet check
+ if unit.enabled('_GO_VET_ADD_CHECK') and need_lint(unit_path):
+ vet_report_file_name = os.path.join(unit_path, '{}{}'.format(unit.filename(), unit.get('GO_VET_REPORT_EXT')))
+ unit.onadd_check(["govet", '$(BUILD_ROOT)/' + tobuilddir(vet_report_file_name)[3:]])
+
+ for f in ev_files:
+ ev_proto_file = '{}.proto'.format(f)
+ unit.oncopy_file_with_context([f, ev_proto_file])
+ proto_files.append(ev_proto_file)
+
+ # Process .proto files
+ for f in proto_files:
+ unit.on_go_proto_cmd(f)
+
+ # Process .fbs files
+ for f in fbs_files:
+ unit.on_go_flatc_cmd([f, go_package_name(unit)])
+
+ # Process .in files
+ for f in in_files:
+ unit.onsrc(f)
+
+ # Generate .symabis for .s files (starting from 1.12 version)
+ if len(asm_files) > 0:
+ symabis_flags = []
+ gostd_version = unit.get('GOSTD_VERSION')
+ if compare_versions('1.16', gostd_version) >= 0:
+ import_path = get_import_path(unit)
+ symabis_flags.extend(['FLAGS', '-p', import_path])
+ if need_compiling_runtime(import_path, gostd_version):
+ symabis_flags.append('-compiling-runtime')
+ unit.on_go_compile_symabis(asm_files + symabis_flags)
+
+ # Process cgo files
+ cgo_files = get_appended_values(unit, '_CGO_SRCS_VALUE')
+
+ cgo_cflags = []
+ if len(c_files) + len(cxx_files) + len(s_files) + len(cgo_files) > 0:
+ if is_test_module:
+ go_test_for_dir = unit.get('GO_TEST_FOR_DIR')
+ if go_test_for_dir and go_test_for_dir.startswith('$S/'):
+ unit.onaddincl(['FOR', 'c', go_test_for_dir[3:]])
+ unit.onaddincl(['FOR', 'c', unit.get('MODDIR')])
+ cgo_cflags = get_appended_values(unit, 'CGO_CFLAGS_VALUE')
+
+ for f in itertools.chain(c_files, cxx_files, s_files):
+ unit.onsrc([f] + cgo_cflags)
+
+ if len(cgo_files) > 0:
+ if not unit.enabled('CGO_ENABLED'):
+ ymake.report_configure_error('trying to build with CGO (CGO_SRCS is non-empty) when CGO is disabled')
+ import_path = get_import_path(unit)
+ if import_path != runtime_cgo_path:
+ go_std_root = unit.get('GOSTD')
+ unit.onpeerdir(os.path.join(go_std_root, runtime_cgo_path))
+ race_mode = 'race' if unit.enabled('RACE') else 'norace'
+ import_runtime_cgo = 'false' if import_path in import_runtime_cgo_false[race_mode] else 'true'
+ import_syscall = 'false' if import_path in import_syscall_false[race_mode] else 'true'
+ args = [import_path] + cgo_files + ['FLAGS', '-import_runtime_cgo=' + import_runtime_cgo, '-import_syscall=' + import_syscall]
+ unit.on_go_compile_cgo1(args)
+ cgo2_cflags = get_appended_values(unit, 'CGO2_CFLAGS_VALUE')
+ for f in cgo_files:
+ if f.endswith('.go'):
+ unit.onsrc([f[:-2] + 'cgo2.c'] + cgo_cflags + cgo2_cflags)
+ else:
+ ymake.report_configure_error('file {} should not be listed in CGO_SRCS() macros'.format(f))
+ args = [go_package_name(unit)] + cgo_files
+ if len(c_files) > 0:
+ args += ['C_FILES'] + c_files
+ if len(s_files) > 0:
+ args += ['S_FILES'] + s_files
+ if len(syso_files) > 0:
+ args += ['OBJ_FILES'] + syso_files
+ unit.on_go_compile_cgo2(args)
+
+
+def on_go_resource(unit, *args):
+ args = list(args)
+ files = args[::2]
+ keys = args[1::2]
+ suffix_md5 = md5.new('@'.join(args)).hexdigest()
+ resource_go = os.path.join("resource.{}.res.go".format(suffix_md5))
+
+ unit.onpeerdir(["library/go/core/resource"])
+
+ if len(files) != len(keys):
+ ymake.report_configure_error("last file {} is missing resource key".format(files[-1]))
+
+ for i, (key, filename) in enumerate(zip(keys, files)):
+ if not key:
+ ymake.report_configure_error("file key must be non empty")
+ return
+
+ if filename == "-" and "=" not in key:
+ ymake.report_configure_error("key \"{}\" must contain = sign".format(key))
+ return
+
+ # quote key, to avoid automatic substitution of filename by absolute
+ # path in RUN_PROGRAM
+ args[2*i+1] = "notafile" + args[2*i+1]
+
+ files = [file for file in files if file != "-"]
+ unit.onrun_program([
+ "library/go/core/resource/cc",
+ "-package", go_package_name(unit),
+ "-o", resource_go] + list(args) + [
+ "IN"] + files + [
+ "OUT", resource_go])
diff --git a/build/plugins/ios_app_settings.py b/build/plugins/ios_app_settings.py
new file mode 100644
index 0000000000..60ec0b4b52
--- /dev/null
+++ b/build/plugins/ios_app_settings.py
@@ -0,0 +1,19 @@
+import _common as common
+import ymake
+import os
+
+def onios_app_settings(unit, *args):
+ tail, kv = common.sort_by_keywords(
+ {'OS_VERSION': 1, 'DEVICES': -1},
+ args
+ )
+ if tail:
+ ymake.report_configure_error('Bad IOS_COMMON_SETTINGS usage - unknown data: ' + str(tail))
+ if kv.get('OS_VERSION', []):
+ unit.onios_app_common_flags(['--minimum-deployment-target', kv.get('OS_VERSION', [])[0]])
+ unit.onios_app_assets_flags(['--filter-for-device-os-version', kv.get('OS_VERSION', [])[0]])
+ devices_flags = []
+ for device in kv.get('DEVICES', []):
+ devices_flags += ['--target-device', device]
+ if devices_flags:
+ unit.onios_app_common_flags(devices_flags)
diff --git a/build/plugins/ios_assets.py b/build/plugins/ios_assets.py
new file mode 100644
index 0000000000..5f0ccb9467
--- /dev/null
+++ b/build/plugins/ios_assets.py
@@ -0,0 +1,30 @@
+import _common as common
+import ymake
+import os
+
+
+def onios_assets(unit, *args):
+ _, kv = common.sort_by_keywords(
+ {'ROOT': 1, 'CONTENTS': -1, 'FLAGS': -1},
+ args
+ )
+ if not kv.get('ROOT', []) and kv.get('CONTENTS', []):
+ ymake.report_configure_error('Please specify ROOT directory for assets')
+ origin_root = kv.get('ROOT')[0]
+ destination_root = os.path.normpath(os.path.join('$BINDIR', os.path.basename(origin_root)))
+ rel_list = []
+ for cont in kv.get('CONTENTS', []):
+ rel = os.path.relpath(cont, origin_root)
+ if rel.startswith('..'):
+ ymake.report_configure_error('{} is not subpath of {}'.format(cont, origin_root))
+ rel_list.append(rel)
+ if not rel_list:
+ return
+ results_list = [os.path.join('$B', unit.path()[3:], os.path.basename(origin_root), i) for i in rel_list]
+ if len(kv.get('CONTENTS', [])) != len(results_list):
+ ymake.report_configure_error('IOS_ASSETTS content length is not equals results')
+ for s, d in zip(kv.get('CONTENTS', []), results_list):
+ unit.oncopy_file([s, d])
+ if kv.get('FLAGS', []):
+ unit.onios_app_assets_flags(kv.get('FLAGS', []))
+ unit.on_ios_assets([destination_root] + results_list)
diff --git a/build/plugins/java.py b/build/plugins/java.py
new file mode 100644
index 0000000000..16fc126734
--- /dev/null
+++ b/build/plugins/java.py
@@ -0,0 +1,374 @@
+import _common as common
+import ymake
+import json
+import os
+import base64
+
+
+DELIM = '================================'
+
+
+def split_args(s): # TODO quotes, escapes
+ return filter(None, s.split())
+
+
+def extract_macro_calls(unit, macro_value_name, macro_calls_delim):
+ if not unit.get(macro_value_name):
+ return []
+
+ return filter(None, map(split_args, unit.get(macro_value_name).replace('$' + macro_value_name, '').split(macro_calls_delim)))
+
+
+def extract_macro_calls2(unit, macro_value_name):
+ if not unit.get(macro_value_name):
+ return []
+
+ calls = []
+ for call_encoded_args in unit.get(macro_value_name).strip().split():
+ call_args = json.loads(base64.b64decode(call_encoded_args), encoding='utf-8')
+ calls.append(call_args)
+
+ return calls
+
+
+def on_run_jbuild_program(unit, *args):
+ args = list(args)
+ """
+ Custom code generation
+ @link: https://wiki.yandex-team.ru/yatool/java/#kodogeneracijarunjavaprogram
+ """
+
+ flat, kv = common.sort_by_keywords({'IN': -1, 'IN_DIR': -1, 'OUT': -1, 'OUT_DIR': -1, 'CWD': 1, 'CLASSPATH': -1, 'CP_USE_COMMAND_FILE': 1, 'ADD_SRCS_TO_CLASSPATH': 0}, args)
+ depends = kv.get('CLASSPATH', []) + kv.get('JAR', [])
+ fake_out = None
+ if depends:
+ # XXX: hack to force ymake to build dependencies
+ fake_out = "fake.out.{}".format(hash(tuple(args)))
+ unit.on_run_java(['TOOL'] + depends + ["OUT", fake_out])
+
+ if not kv.get('CP_USE_COMMAND_FILE'):
+ args += ['CP_USE_COMMAND_FILE', unit.get(['JAVA_PROGRAM_CP_USE_COMMAND_FILE']) or 'yes']
+
+ if fake_out is not None:
+ args += ['FAKE_OUT', fake_out]
+
+ prev = unit.get(['RUN_JAVA_PROGRAM_VALUE']) or ''
+ new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
+ unit.set(['RUN_JAVA_PROGRAM_VALUE', new_val])
+
+
+def ongenerate_script(unit, *args):
+ """
+ heretic@ promised to make tutorial here
+ Don't forget
+ Feel free to remind
+ """
+ flat, kv = common.sort_by_keywords(
+ {'OUT': -1, 'TEMPLATE': -1, 'CUSTOM_PROPERTY': -1},
+ args
+ )
+ if len(kv.get('TEMPLATE', [])) > len(kv.get('OUT', [])):
+ ymake.report_configure_error('To many arguments for TEMPLATE parameter')
+ prev = unit.get(['GENERATE_SCRIPT_VALUE']) or ''
+ new_val = (prev + ' ' + base64.b64encode(json.dumps(list(args), encoding='utf-8'))).strip()
+ unit.set(['GENERATE_SCRIPT_VALUE', new_val])
+
+
+def onjava_module(unit, *args):
+ args_delim = unit.get('ARGS_DELIM')
+ idea_only = True if 'IDEA_ONLY' in args else False
+
+ if idea_only:
+ if unit.get('YA_IDE_IDEA') != 'yes':
+ return
+ if unit.get('YMAKE_JAVA_MODULES') != 'yes':
+ return
+
+ data = {
+ 'BUNDLE_NAME': unit.name(),
+ 'PATH': unit.path(),
+ 'IDEA_ONLY': 'yes' if idea_only else 'no',
+ 'MODULE_TYPE': unit.get('MODULE_TYPE'),
+ 'MODULE_ARGS': unit.get('MODULE_ARGS'),
+ 'MANAGED_PEERS': '${MANAGED_PEERS}',
+ 'MANAGED_PEERS_CLOSURE': '${MANAGED_PEERS_CLOSURE}',
+ 'NON_NAMAGEABLE_PEERS': '${NON_NAMAGEABLE_PEERS}',
+ 'TEST_CLASSPATH_MANAGED': '${TEST_CLASSPATH_MANAGED}',
+ 'EXCLUDE': extract_macro_calls(unit, 'EXCLUDE_VALUE', args_delim),
+ 'JAVA_SRCS': extract_macro_calls(unit, 'JAVA_SRCS_VALUE', args_delim),
+ 'JAVAC_FLAGS': extract_macro_calls(unit, 'JAVAC_FLAGS_VALUE', args_delim),
+ 'ANNOTATION_PROCESSOR': extract_macro_calls(unit, 'ANNOTATION_PROCESSOR_VALUE', args_delim),
+ 'EXTERNAL_JAR': extract_macro_calls(unit, 'EXTERNAL_JAR_VALUE', args_delim),
+ 'RUN_JAVA_PROGRAM': extract_macro_calls2(unit, 'RUN_JAVA_PROGRAM_VALUE'),
+ 'RUN_JAVA_PROGRAM_MANAGED': '${RUN_JAVA_PROGRAM_MANAGED}',
+ 'MAVEN_GROUP_ID': extract_macro_calls(unit, 'MAVEN_GROUP_ID_VALUE', args_delim),
+ 'JAR_INCLUDE_FILTER': extract_macro_calls(unit, 'JAR_INCLUDE_FILTER_VALUE', args_delim),
+ 'JAR_EXCLUDE_FILTER': extract_macro_calls(unit, 'JAR_EXCLUDE_FILTER_VALUE', args_delim),
+
+ # TODO remove when java test dart is in prod
+ 'UNITTEST_DIR': unit.get('UNITTEST_DIR'),
+ 'SYSTEM_PROPERTIES': extract_macro_calls(unit, 'SYSTEM_PROPERTIES_VALUE', args_delim),
+ 'JVM_ARGS': extract_macro_calls(unit, 'JVM_ARGS_VALUE', args_delim),
+ 'TEST_CWD': extract_macro_calls(unit, 'TEST_CWD_VALUE', args_delim),
+ 'TEST_DATA': extract_macro_calls(unit, '__test_data', args_delim),
+ 'TEST_FORK_MODE': extract_macro_calls(unit, 'TEST_FORK_MODE', args_delim),
+ 'SPLIT_FACTOR': extract_macro_calls(unit, 'TEST_SPLIT_FACTOR', args_delim),
+ 'TIMEOUT': extract_macro_calls(unit, 'TEST_TIMEOUT', args_delim),
+ 'TAG': extract_macro_calls(unit, 'TEST_TAGS_VALUE', args_delim),
+ 'SIZE': extract_macro_calls(unit, 'TEST_SIZE_NAME', args_delim),
+ 'DEPENDS': extract_macro_calls(unit, 'TEST_DEPENDS_VALUE', args_delim),
+ 'IDEA_EXCLUDE': extract_macro_calls(unit, 'IDEA_EXCLUDE_DIRS_VALUE', args_delim),
+ 'IDEA_RESOURCE': extract_macro_calls(unit, 'IDEA_RESOURCE_DIRS_VALUE', args_delim),
+ 'IDEA_MODULE_NAME': extract_macro_calls(unit, 'IDEA_MODULE_NAME_VALUE', args_delim),
+ 'GENERATE_SCRIPT': extract_macro_calls2(unit, 'GENERATE_SCRIPT_VALUE'),
+ 'FAKEID': extract_macro_calls(unit, 'FAKEID', args_delim),
+ 'TEST_DATA': extract_macro_calls(unit, 'TEST_DATA_VALUE', args_delim),
+ 'JAVA_FORBIDDEN_LIBRARIES': extract_macro_calls(unit, 'JAVA_FORBIDDEN_LIBRARIES_VALUE', args_delim),
+ 'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or '_DEFAULT')
+ }
+ if unit.get('ENABLE_PREVIEW_VALUE') == 'yes' and unit.get('JDK_VERSION') in ('15', '16', '17'):
+ data['ENABLE_PREVIEW'] = extract_macro_calls(unit, 'ENABLE_PREVIEW_VALUE', args_delim)
+
+ if unit.get('SAVE_JAVAC_GENERATED_SRCS_DIR') and unit.get('SAVE_JAVAC_GENERATED_SRCS_TAR'):
+ data['SAVE_JAVAC_GENERATED_SRCS_DIR'] = extract_macro_calls(unit, 'SAVE_JAVAC_GENERATED_SRCS_DIR', args_delim)
+ data['SAVE_JAVAC_GENERATED_SRCS_TAR'] = extract_macro_calls(unit, 'SAVE_JAVAC_GENERATED_SRCS_TAR', args_delim)
+
+ if unit.get('JAVA_ADD_DLLS_VALUE') == 'yes':
+ data['ADD_DLLS_FROM_DEPENDS'] = extract_macro_calls(unit, 'JAVA_ADD_DLLS_VALUE', args_delim)
+
+ if unit.get('ERROR_PRONE_VALUE') == 'yes':
+ data['ERROR_PRONE'] = extract_macro_calls(unit, 'ERROR_PRONE_VALUE', args_delim)
+
+ if unit.get('WITH_KOTLIN_VALUE') == 'yes':
+ data['WITH_KOTLIN'] = extract_macro_calls(unit, 'WITH_KOTLIN_VALUE', args_delim)
+ if unit.get('KOTLIN_JVM_TARGET'):
+ data['KOTLIN_JVM_TARGET'] = extract_macro_calls(unit, 'KOTLIN_JVM_TARGET', args_delim)
+ if unit.get('KOTLINC_FLAGS_VALUE'):
+ data['KOTLINC_FLAGS'] = extract_macro_calls(unit, 'KOTLINC_FLAGS_VALUE', args_delim)
+ if unit.get('KOTLINC_OPTS_VALUE'):
+ data['KOTLINC_OPTS'] = extract_macro_calls(unit, 'KOTLINC_OPTS_VALUE', args_delim)
+
+ if unit.get('DIRECT_DEPS_ONLY_VALUE') == 'yes':
+ data['DIRECT_DEPS_ONLY'] = extract_macro_calls(unit, 'DIRECT_DEPS_ONLY_VALUE', args_delim)
+
+ if unit.get('JAVA_EXTERNAL_DEPENDENCIES_VALUE'):
+ valid = []
+ for dep in sum(extract_macro_calls(unit, 'JAVA_EXTERNAL_DEPENDENCIES_VALUE', args_delim), []):
+ if os.path.normpath(dep).startswith('..'):
+ ymake.report_configure_error('{}: {} - relative paths in JAVA_EXTERNAL_DEPENDENCIES is not allowed'.format(unit.path(), dep))
+ elif os.path.isabs(dep):
+ ymake.report_configure_error('{}: {} absolute paths in JAVA_EXTERNAL_DEPENDENCIES is not allowed'.format(unit.path(), dep))
+ else:
+ valid.append(dep)
+ if valid:
+ data['EXTERNAL_DEPENDENCIES'] = [valid]
+
+ if unit.get('MAKE_UBERJAR_VALUE') == 'yes':
+ if unit.get('MODULE_TYPE') != 'JAVA_PROGRAM':
+ ymake.report_configure_error('{}: UBERJAR supported only for JAVA_PROGRAM module type'.format(unit.path()))
+ data['UBERJAR'] = extract_macro_calls(unit, 'MAKE_UBERJAR_VALUE', args_delim)
+ data['UBERJAR_PREFIX'] = extract_macro_calls(unit, 'UBERJAR_PREFIX_VALUE', args_delim)
+ data['UBERJAR_HIDE_EXCLUDE'] = extract_macro_calls(unit, 'UBERJAR_HIDE_EXCLUDE_VALUE', args_delim)
+ data['UBERJAR_PATH_EXCLUDE'] = extract_macro_calls(unit, 'UBERJAR_PATH_EXCLUDE_VALUE', args_delim)
+ data['UBERJAR_MANIFEST_TRANSFORMER_MAIN'] = extract_macro_calls(unit, 'UBERJAR_MANIFEST_TRANSFORMER_MAIN_VALUE', args_delim)
+ data['UBERJAR_MANIFEST_TRANSFORMER_ATTRIBUTE'] = extract_macro_calls(unit, 'UBERJAR_MANIFEST_TRANSFORMER_ATTRIBUTE_VALUE', args_delim)
+ data['UBERJAR_APPENDING_TRANSFORMER'] = extract_macro_calls(unit, 'UBERJAR_APPENDING_TRANSFORMER_VALUE', args_delim)
+ data['UBERJAR_SERVICES_RESOURCE_TRANSFORMER'] = extract_macro_calls(unit, 'UBERJAR_SERVICES_RESOURCE_TRANSFORMER_VALUE', args_delim)
+
+ if unit.get('WITH_JDK_VALUE') == 'yes':
+ if unit.get('MODULE_TYPE') != 'JAVA_PROGRAM':
+ ymake.report_configure_error('{}: JDK export supported only for JAVA_PROGRAM module type'.format(unit.path()))
+ data['WITH_JDK'] = extract_macro_calls(unit, 'WITH_JDK_VALUE', args_delim)
+
+ if not data['EXTERNAL_JAR']:
+ has_processor = extract_macro_calls(unit, 'GENERATE_VCS_JAVA_INFO_NODEP', args_delim)
+ data['EMBED_VCS'] = [[str(has_processor and has_processor[0] and has_processor[0][0])]]
+ # FORCE_VCS_INFO_UPDATE is responsible for setting special value of VCS_INFO_DISABLE_CACHE__NO_UID__
+ macro_val = extract_macro_calls(unit, 'FORCE_VCS_INFO_UPDATE', args_delim)
+ macro_str = macro_val[0][0] if macro_val and macro_val[0] and macro_val[0][0] else ''
+ if macro_str and macro_str == 'yes':
+ data['VCS_INFO_DISABLE_CACHE__NO_UID__'] = macro_val
+
+ for java_srcs_args in data['JAVA_SRCS']:
+ external = None
+
+ for i in xrange(len(java_srcs_args)):
+ arg = java_srcs_args[i]
+
+ if arg == 'EXTERNAL':
+ if not i + 1 < len(java_srcs_args):
+ continue # TODO configure error
+
+ ex = java_srcs_args[i + 1]
+
+ if ex in ('EXTERNAL', 'SRCDIR', 'PACKAGE_PREFIX', 'EXCLUDE'):
+ continue # TODO configure error
+
+ if external is not None:
+ continue # TODO configure error
+
+ external = ex
+
+ if external:
+ unit.onpeerdir(external)
+
+ for k, v in data.items():
+ if not v:
+ data.pop(k)
+
+ dart = 'JAVA_DART: ' + base64.b64encode(json.dumps(data)) + '\n' + DELIM + '\n'
+
+ unit.set_property(['JAVA_DART_DATA', dart])
+ if not idea_only and unit.get('MODULE_TYPE') in ('JAVA_PROGRAM', 'JAVA_LIBRARY', 'JTEST', 'TESTNG', 'JUNIT5') and not unit.path().startswith('$S/contrib/java'):
+ unit.on_add_classpath_clash_check()
+ if unit.get('LINT_LEVEL_VALUE') != "none":
+ unit.onadd_check(['JAVA_STYLE', unit.get('LINT_LEVEL_VALUE')])
+
+
+def on_add_java_style_checks(unit, *args):
+ if unit.get('LINT_LEVEL_VALUE') != "none":
+ unit.onadd_check(['JAVA_STYLE', unit.get('LINT_LEVEL_VALUE')] + list(args))
+
+
+def on_add_classpath_clash_check(unit, *args):
+ jdeps_val = (unit.get('CHECK_JAVA_DEPS_VALUE') or '').lower()
+ if jdeps_val and jdeps_val not in ('yes', 'no', 'strict'):
+ ymake.report_configure_error('CHECK_JAVA_DEPS: "yes", "no" or "strict" required')
+ if jdeps_val and jdeps_val != 'no':
+ unit.onjava_test_deps(jdeps_val)
+
+
+# Ymake java modules related macroses
+
+
+def onexternal_jar(unit, *args):
+ args = list(args)
+ flat, kv = common.sort_by_keywords({'SOURCES': 1}, args)
+ if not flat:
+ ymake.report_configure_error('EXTERNAL_JAR requires exactly one resource URL of compiled jar library')
+ res = flat[0]
+ resid = res[4:] if res.startswith('sbr:') else res
+ unit.set(['JAR_LIB_RESOURCE', resid])
+ unit.set(['JAR_LIB_RESOURCE_URL', res])
+
+
+def on_check_java_srcdir(unit, *args):
+ args = list(args)
+ for arg in args:
+ if not '$' in arg:
+ arc_srcdir = os.path.join(unit.get('MODDIR'), arg)
+ abs_srcdir = unit.resolve(os.path.join("$S/", arc_srcdir))
+ if not os.path.exists(abs_srcdir) or not os.path.isdir(abs_srcdir):
+ ymake.report_configure_error(
+ 'Trying to set a [[alt1]]JAVA_SRCS[[rst]] for a missing directory: [[imp]]$S/{}[[rst]]',
+ missing_dir=arc_srcdir
+ )
+ return
+ srcdir = unit.resolve_arc_path(arg)
+ if srcdir and not srcdir.startswith('$S'):
+ continue
+ abs_srcdir = unit.resolve(srcdir) if srcdir else unit.resolve(arg)
+ if not os.path.exists(abs_srcdir) or not os.path.isdir(abs_srcdir):
+ ymake.report_configure_error(
+ 'Trying to set a [[alt1]]JAVA_SRCS[[rst]] for a missing directory: [[imp]]{}[[rst]]',
+ missing_dir=srcdir
+ )
+
+
+def on_fill_jar_copy_resources_cmd(unit, *args):
+ if len(args) == 4:
+ varname, srcdir, base_classes_dir, reslist = tuple(args)
+ package = ''
+ else:
+ varname, srcdir, base_classes_dir, package, reslist = tuple(args)
+ dest_dir = os.path.join(base_classes_dir, *package.split('.')) if package else base_classes_dir
+ var = unit.get(varname)
+ var += ' && $FS_TOOLS copy_files {} {} {}'.format(srcdir if srcdir.startswith('"$') else '${CURDIR}/' + srcdir, dest_dir, reslist)
+ unit.set([varname, var])
+
+def on_fill_jar_gen_srcs(unit, *args):
+ varname, jar_type, srcdir, base_classes_dir, java_list, kt_list, groovy_list, res_list = tuple(args[0:8])
+ resolved_srcdir = unit.resolve_arc_path(srcdir)
+ if not resolved_srcdir.startswith('$') or resolved_srcdir.startswith('$S'):
+ return
+
+ exclude_pos = args.index('EXCLUDE')
+ globs = args[7:exclude_pos]
+ excludes = args[exclude_pos + 1:]
+ var = unit.get(varname)
+ var += ' && ${{cwd:BINDIR}} $YMAKE_PYTHON ${{input:"build/scripts/resolve_java_srcs.py"}} --append -d {} -s {} -k {} -g {} -r {} --include-patterns {}'.format(srcdir, java_list, kt_list, groovy_list, res_list, ' '.join(globs))
+ if jar_type == 'SRC_JAR':
+ var += ' --all-resources'
+ if len(excludes) > 0:
+ var += ' --exclude-patterns {}'.format(' '.join(excludes))
+ if unit.get('WITH_KOTLIN_VALUE') == 'yes':
+ var += ' --resolve-kotlin'
+ unit.set([varname, var])
+
+
+def on_check_run_java_prog_classpath(unit, *args):
+ if len(args) != 1:
+ ymake.report_configure_error('multiple CLASSPATH elements in RUN_JAVA_PROGRAM invocation no more supported. Use JAVA_RUNTIME_PEERDIR on the JAVA_PROGRAM module instead')
+
+
+def extract_words(words, keys):
+ kv = {}
+ k = None
+
+ for w in words:
+ if w in keys:
+ k = w
+ else:
+ if not k in kv:
+ kv[k] = []
+ kv[k].append(w)
+
+ return kv
+
+
+def parse_words(words):
+ kv = extract_words(words, {'OUT', 'TEMPLATE'})
+ ws = []
+ for item in ('OUT', 'TEMPLATE'):
+ for i, word in list(enumerate(kv[item])):
+ if word == 'CUSTOM_PROPERTY':
+ ws += kv[item][i:]
+ kv[item] = kv[item][:i]
+ tepmlates = kv['TEMPLATE']
+ outputs = kv['OUT']
+ if len(outputs) < len(tepmlates):
+ ymake.report_configure_error('To many arguments for TEMPLATE parameter')
+ return
+ if ws and ws[0] != 'CUSTOM_PROPERTY':
+ ymake.report_configure_error('''Can't parse {}'''.format(ws))
+ custom_props = []
+ for item in ws:
+ if item == 'CUSTOM_PROPERTY':
+ custom_props.append([])
+ else:
+ custom_props[-1].append(item)
+ props = []
+ for p in custom_props:
+ if not p:
+ ymake.report_configure_error('Empty CUSTOM_PROPERTY')
+ continue
+ props.append('-B')
+ if len(p) > 1:
+ props.append(base64.b64encode("{}={}".format(p[0], ' '.join(p[1:]))))
+ else:
+ ymake.report_configure_error('CUSTOM_PROPERTY "{}" value is not specified'.format(p[0]))
+ for i, o in enumerate(outputs):
+ yield o, tepmlates[min(i, len(tepmlates) - 1)], props
+
+
+def on_ymake_generate_script(unit, *args):
+ for out, tmpl, props in parse_words(list(args)):
+ unit.on_add_gen_java_script([out, tmpl] + list(props))
+
+def on_jdk_version_macro_check(unit, *args):
+ if len(args) != 1:
+ unit.message(["error", "Invalid syntax. Single argument required."])
+ jdk_version = args[0]
+ availible_versions = ('10', '11', '12', '13', '14', '15', '16', '17',)
+ if jdk_version not in availible_versions:
+ unit.message(["error", "Invalid jdk version: {}. {} are availible".format(jdk_version, availible_versions)])
diff --git a/build/plugins/large_files.py b/build/plugins/large_files.py
new file mode 100644
index 0000000000..33a78d7110
--- /dev/null
+++ b/build/plugins/large_files.py
@@ -0,0 +1,39 @@
+import os
+import ymake
+from _common import strip_roots
+
+PLACEHOLDER_EXT = "external"
+
+
+def onlarge_files(unit, *args):
+ """
+ @usage LARGE_FILES([AUTOUPDATED] Files...)
+
+ Use large file ether from working copy or from remote storage via placeholder <File>.external
+ If <File> is present locally (and not a symlink!) it will be copied to build directory.
+ Otherwise macro will try to locate <File>.external, parse it retrieve ot during build phase.
+ """
+ args = list(args)
+
+ if args and args[0] == 'AUTOUPDATED':
+ args = args[1:]
+
+ for arg in args:
+ if arg == 'AUTOUPDATED':
+ unit.message(["warn", "Please set AUTOUPDATED argument before other file names"])
+ continue
+
+ src = unit.resolve_arc_path(arg)
+ if src.startswith("$S"):
+ msg = "Used local large file {}. Don't forget to run 'ya upload --update-external' and commit {}.{}".format(src, src, PLACEHOLDER_EXT)
+ unit.message(["warn", msg])
+ unit.oncopy_file([arg, arg])
+ else:
+ out_file = strip_roots(os.path.join(unit.path(), arg))
+ external = "{}.{}".format(arg, PLACEHOLDER_EXT)
+ from_external_cmd = [external, out_file, 'OUT_NOAUTO', arg]
+ if os.path.dirname(arg):
+ from_external_cmd.extend(("RENAME", os.path.basename(arg)))
+ unit.on_from_external(from_external_cmd)
+ unit.onadd_check(['check.external', external])
+
diff --git a/build/plugins/lib/__init__.py b/build/plugins/lib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/__init__.py
diff --git a/build/plugins/lib/_metric_resolvers.py b/build/plugins/lib/_metric_resolvers.py
new file mode 100644
index 0000000000..270eb78345
--- /dev/null
+++ b/build/plugins/lib/_metric_resolvers.py
@@ -0,0 +1,11 @@
+import re
+
+VALUE_PATTERN = re.compile(r"^\s*(?P<value>\d+)\s*$")
+
+
+def resolve_value(val):
+ match = VALUE_PATTERN.match(val)
+ if not match:
+ return None
+ val = match.group('value')
+ return int(val)
diff --git a/build/plugins/lib/nots/__init__.py b/build/plugins/lib/nots/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/nots/__init__.py
diff --git a/build/plugins/lib/nots/package_manager/__init__.py b/build/plugins/lib/nots/package_manager/__init__.py
new file mode 100644
index 0000000000..52bf62644c
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/__init__.py
@@ -0,0 +1,9 @@
+from .pnpm import PnpmPackageManager
+from .base import constants
+
+
+manager = PnpmPackageManager
+
+__all__ = [
+ "constants",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/__init__.py b/build/plugins/lib/nots/package_manager/base/__init__.py
new file mode 100644
index 0000000000..1b55fe3f56
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/__init__.py
@@ -0,0 +1,11 @@
+from . import constants
+from .lockfile import BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+from .package_json import PackageJson
+from .package_manager import BasePackageManager, PackageManagerError, PackageManagerCommandError
+
+__all__ = [
+ "constants",
+ "BaseLockfile", "LockfilePackageMeta", "LockfilePackageMetaInvalidError",
+ "BasePackageManager", "PackageManagerError", "PackageManagerCommandError",
+ "PackageJson",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/constants.py b/build/plugins/lib/nots/package_manager/base/constants.py
new file mode 100644
index 0000000000..0b9fcb76af
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/constants.py
@@ -0,0 +1,5 @@
+PACKAGE_JSON_FILENAME = "package.json"
+NODE_MODULES_BUNDLE_FILENAME = "node_modules.tar"
+NPM_REGISTRY_URL = "http://npm.yandex-team.ru"
+PNPM_WS_FILENAME = "pnpm-workspace.yaml"
+PNPM_LOCKFILE_FILENAME = "pnpm-lock.yaml"
diff --git a/build/plugins/lib/nots/package_manager/base/lockfile.py b/build/plugins/lib/nots/package_manager/base/lockfile.py
new file mode 100644
index 0000000000..9b9c0be954
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/lockfile.py
@@ -0,0 +1,68 @@
+import os
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+class LockfilePackageMeta(object):
+ """
+ Basic struct representing package meta from lockfile.
+ """
+ __slots__ = ("name", "version", "sky_id", "integrity", "integrity_algorithm", "tarball_path")
+
+ @staticmethod
+ def from_str(s):
+ return LockfilePackageMeta(*s.strip().split(" "))
+
+ def __init__(self, name, version, sky_id, integrity, integrity_algorithm):
+ self.name = name
+ self.version = version
+ self.sky_id = sky_id
+ self.integrity = integrity
+ self.integrity_algorithm = integrity_algorithm
+ self.tarball_path = "{}-{}.tgz".format(name, version)
+
+ def to_str(self):
+ return " ".join([self.name, self.version, self.sky_id, self.integrity, self.integrity_algorithm])
+
+
+class LockfilePackageMetaInvalidError(RuntimeError):
+ pass
+
+
+@add_metaclass(ABCMeta)
+class BaseLockfile(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: lockfile path
+ :type path: str
+ :rtype: BaseLockfile
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ @abstractmethod
+ def read(self):
+ pass
+
+ @abstractmethod
+ def write(self, path=None):
+ pass
+
+ @abstractmethod
+ def get_packages_meta(self):
+ pass
+
+ @abstractmethod
+ def update_tarball_resolutions(self, fn):
+ pass
diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py
new file mode 100644
index 0000000000..3d0bf3238e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_json.py
@@ -0,0 +1,113 @@
+import os
+import json
+
+from six import iteritems
+
+from . import constants
+
+
+class PackageJsonWorkspaceError(RuntimeError):
+ pass
+
+
+class PackageJson(object):
+ DEP_KEY = "dependencies"
+ DEV_DEP_KEY = "devDependencies"
+ PEER_DEP_KEY = "peerDependencies"
+ OPT_DEP_KEY = "optionalDependencies"
+ DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY)
+
+ WORKSPACE_SCHEMA = "workspace:"
+
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: package.json path
+ :type path: str
+ :rtype: PackageJson
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ def read(self):
+ with open(self.path) as f:
+ self.data = json.load(f)
+
+ def get_name(self):
+ return self.data.get("name")
+
+ def get_workspace_dep_paths(self):
+ """
+ :return: Workspace dependencies.
+ :rtype: list of (str, str)
+ """
+ dep_paths = []
+ schema = self.WORKSPACE_SCHEMA
+ schema_len = len(schema)
+
+ for deps in map(lambda x: self.data.get(x), self.DEP_KEYS):
+ if not deps:
+ continue
+
+ for name, spec in iteritems(deps):
+ if not spec.startswith(schema):
+ continue
+
+ spec_path = spec[schema_len:]
+ if not (spec_path.startswith(".") or spec_path.startswith("..")):
+ raise PackageJsonWorkspaceError(
+ "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(spec, name, self.path))
+
+ dep_paths.append((name, spec_path))
+
+ return dep_paths
+
+ def get_workspace_deps(self):
+ """
+ :rtype: list of PackageJson
+ """
+ ws_deps = []
+ pj_dir = os.path.dirname(self.path)
+
+ for (name, rel_path) in self.get_workspace_dep_paths():
+ dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
+ dep_pj = PackageJson.load(os.path.join(dep_path, constants.PACKAGE_JSON_FILENAME))
+
+ if name != dep_pj.get_name():
+ raise PackageJsonWorkspaceError(
+ "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(name, dep_pj.get_name(), self.path))
+
+ ws_deps.append(dep_pj)
+
+ return ws_deps
+
+ def get_workspace_map(self):
+ """
+ :return: Absolute paths of workspace dependencies (including transitive) mapped to package.json and depth.
+ :rtype: dict of (PackageJson, int)
+ """
+ ws_deps = {}
+ # list of (pj, depth)
+ pj_queue = [(self, 0)]
+
+ while len(pj_queue):
+ (pj, depth) = pj_queue.pop()
+ pj_dir = os.path.dirname(pj.path)
+ if pj_dir in ws_deps:
+ continue
+
+ ws_deps[pj_dir] = (pj, depth)
+
+ for dep_pj in pj.get_workspace_deps():
+ pj_queue.append((dep_pj, depth + 1))
+
+ return ws_deps
diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py
new file mode 100644
index 0000000000..0de9d8acc3
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_manager.py
@@ -0,0 +1,108 @@
+import os
+import sys
+import subprocess
+import tarfile
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+from . import constants
+
+
+class PackageManagerError(RuntimeError):
+ pass
+
+
+class PackageManagerCommandError(PackageManagerError):
+ def __init__(self, cmd, code, stdout, stderr):
+ self.cmd = cmd
+ self.code = code
+ self.stdout = stdout
+ self.stderr = stderr
+
+ msg = "package manager exited with code {} while running {}:\n{}\n{}".format(code, cmd, stdout, stderr)
+ super(PackageManagerCommandError, self).__init__(msg)
+
+
+@add_metaclass(ABCMeta)
+class BasePackageManager(object):
+ def __init__(self, build_root, build_path, sources_path, nodejs_bin_path, script_path, contribs_path):
+ self.module_path = build_path[len(build_root) + 1:]
+ self.build_path = build_path
+ self.sources_path = sources_path
+ self.build_root = build_root
+ self.sources_root = sources_path[:-len(self.module_path) - 1]
+ self.nodejs_bin_path = nodejs_bin_path
+ self.script_path = script_path
+ self.contribs_path = contribs_path
+
+ @abstractmethod
+ def install(self):
+ pass
+
+ @abstractmethod
+ def get_peer_paths_from_package_json(self):
+ pass
+
+ @abstractmethod
+ def calc_node_modules_inouts(self):
+ pass
+
+ @abstractmethod
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ pass
+
+ def create_node_modules_bundle(self, path):
+ """
+ Creates tarball from the node_modules directory contents.
+ :param path: tarball path
+ :type path: str
+ """
+ with tarfile.open(path, "w") as tf:
+ tf.add(self._nm_path(), arcname=".")
+
+ def _exec_command(self, args, include_defaults=True):
+ if not self.nodejs_bin_path:
+ raise PackageManagerError("Unable to execute command: nodejs_bin_path is not configured")
+
+ cmd = [self.nodejs_bin_path, self.script_path] + args + (self._get_default_options() if include_defaults else [])
+ p = subprocess.Popen(
+ cmd,
+ cwd=self.build_path,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = p.communicate()
+
+ if p.returncode != 0:
+ self._dump_debug_log()
+
+ raise PackageManagerCommandError(cmd, p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8"))
+
+ def _nm_path(self, *parts):
+ return os.path.join(self.build_path, "node_modules", *parts)
+
+ def _contrib_tarball_path(self, pkg):
+ return os.path.join(self.contribs_path, pkg.tarball_path)
+
+ def _contrib_tarball_url(self, pkg):
+ return "file:" + self._contrib_tarball_path(pkg)
+
+ def _get_default_options(self):
+ return ["--registry", constants.NPM_REGISTRY_URL]
+
+ def _get_debug_log_path(self):
+ return None
+
+ def _dump_debug_log(self):
+ log_path = self._get_debug_log_path()
+
+ if not log_path:
+ return
+
+ try:
+ with open(log_path) as f:
+ sys.stderr.write("Package manager log {}:\n{}\n".format(log_path, f.read()))
+ except:
+ sys.stderr.write("Failed to dump package manager log {}.\n".format(log_path))
diff --git a/build/plugins/lib/nots/package_manager/base/tests/package_json.py b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
new file mode 100644
index 0000000000..3657e581bc
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
@@ -0,0 +1,114 @@
+import os
+import pytest
+
+from build.plugins.lib.nots.package_manager.base.package_json import PackageJson, PackageJsonWorkspaceError
+
+
+def test_get_workspace_dep_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths()
+
+ assert ws_dep_paths == [
+ ("@yandex-int/bar", "../bar"),
+ ("@yandex-int/baz", "../baz"),
+ ]
+
+
+def test_get_workspace_dep_paths_invalid_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:*",
+ },
+ }
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_dep_paths()
+
+ assert str(e.value) == "Expected relative path specifier for workspace dependency, but got 'workspace:*' for @yandex-int/bar in /packages/foo/package.json"
+
+
+def test_get_workspace_deps_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ ws_deps = pj.get_workspace_deps()
+
+ assert len(ws_deps) == 2
+ assert ws_deps[0].path == "/packages/bar/package.json"
+ assert ws_deps[1].path == "/packages/baz/package.json"
+
+
+def test_get_workspace_deps_with_wrong_name():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@shouldbe/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_deps()
+
+ assert str(e.value) == "Workspace dependency name mismatch, found '@yandex-int/bar' instead of '@shouldbe/bar' in /packages/foo/package.json"
+
+
+def test_get_workspace_map_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ name = os.path.basename(os.path.dirname(path))
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(name),
+ "dependencies": ({"@yandex-int/qux": "workspace:../qux"} if name == "bar" else {}),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ ws_map = pj.get_workspace_map()
+
+ assert len(ws_map) == 3
+ assert ws_map["/packages/foo"][0].path == "/packages/foo/package.json"
+ assert ws_map["/packages/foo"][1] == 0
+ assert ws_map["/packages/bar"][0].path == "/packages/bar/package.json"
+ assert ws_map["/packages/bar"][1] == 1
+ assert ws_map["/packages/qux"][0].path == "/packages/qux/package.json"
+ assert ws_map["/packages/qux"][1] == 2
diff --git a/build/plugins/lib/nots/package_manager/base/tests/ya.make b/build/plugins/lib/nots/package_manager/base/tests/ya.make
new file mode 100644
index 0000000000..1968fac42e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/ya.make
@@ -0,0 +1,13 @@
+PY23_TEST()
+
+OWNER(dankolesnikov)
+
+TEST_SRCS(
+ package_json.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/base/ya.make b/build/plugins/lib/nots/package_manager/base/ya.make
new file mode 100644
index 0000000000..aa73cfbe25
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/ya.make
@@ -0,0 +1,21 @@
+PY23_LIBRARY()
+
+OWNER(dankolesnikov)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ package_json.py
+ package_manager.py
+)
+
+PEERDIR(
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/__init__.py b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
new file mode 100644
index 0000000000..4b8f0d0e92
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
@@ -0,0 +1,9 @@
+from .lockfile import PnpmLockfile
+from .package_manager import PnpmPackageManager
+from .workspace import PnpmWorkspace
+
+__all__ = [
+ "PnpmLockfile",
+ "PnpmPackageManager",
+ "PnpmWorkspace",
+]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
new file mode 100644
index 0000000000..1c09f96432
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
@@ -0,0 +1,163 @@
+import base64
+import binascii
+import yaml
+import os
+
+from six.moves.urllib import parse as urlparse
+from six import iteritems
+
+from ..base import PackageJson, BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+
+
+class PnpmLockfile(BaseLockfile):
+ IMPORTER_KEYS = PackageJson.DEP_KEYS + ("specifiers",)
+
+ def read(self):
+ with open(self.path, "r") as f:
+ self.data = yaml.load(f, Loader=yaml.CSafeLoader)
+
+ def write(self, path=None):
+ """
+ :param path: path to store lockfile, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ yaml.dump(self.data, f, Dumper=yaml.CSafeDumper)
+
+ def get_packages_meta(self):
+ """
+ Extracts packages meta from lockfile.
+ :rtype: list of LockfilePackageMeta
+ """
+ packages = self.data.get("packages", {})
+
+ return map(lambda x: _parse_package_meta(*x), iteritems(packages))
+
+ def update_tarball_resolutions(self, fn):
+ """
+ :param fn: maps `LockfilePackageMeta` instance to new `resolution.tarball` value
+ :type fn: lambda
+ """
+ packages = self.data.get("packages", {})
+
+ for key, meta in iteritems(packages):
+ meta["resolution"]["tarball"] = fn(_parse_package_meta(key, meta))
+ packages[key] = meta
+
+ def get_importers(self):
+ """
+ Returns "importers" section from the lockfile or creates similar structure from "dependencies" and "specifiers".
+ :rtype: dict of dict of dict of str
+ """
+ importers = self.data.get("importers")
+ if importers is not None:
+ return importers
+
+ importer = {k: self.data[k] for k in self.IMPORTER_KEYS if k in self.data}
+
+ return ({".": importer} if importer else {})
+
+ def merge(self, lf):
+ """
+ Merges two lockfiles:
+ 1. Converts the lockfile to monorepo-like lockfile with "importers" section instead of "dependencies" and "specifiers".
+ 2. Merges `lf`'s dependencies and specifiers to importers.
+ 3. Merges `lf`'s packages to the lockfile.
+ :param lf: lockfile to merge
+ :type lf: PnpmLockfile
+ """
+ importers = self.get_importers()
+ build_path = os.path.dirname(self.path)
+
+ for [importer, imports] in iteritems(lf.get_importers()):
+ importer_path = os.path.normpath(os.path.join(os.path.dirname(lf.path), importer))
+ importer_rel_path = os.path.relpath(importer_path, build_path)
+ importers[importer_rel_path] = imports
+
+ self.data["importers"] = importers
+
+ for k in self.IMPORTER_KEYS:
+ self.data.pop(k, None)
+
+ packages = self.data.get("packages", {})
+ for k, v in iteritems(lf.data.get("packages", {})):
+ if k not in packages:
+ packages[k] = v
+ self.data["packages"] = packages
+
+
+def _parse_package_meta(key, meta):
+ """
+ :param key: uniq package key from lockfile
+ :type key: string
+ :param meta: package meta dict from lockfile
+ :type meta: dict
+ :rtype: LockfilePackageMetaInvalidError
+ """
+ try:
+ name, version = _parse_package_key(key)
+ sky_id = _parse_sky_id_from_tarball_url(meta["resolution"]["tarball"])
+ integrity_algorithm, integrity = _parse_package_integrity(meta["resolution"]["integrity"])
+ except KeyError as e:
+ raise TypeError("Invalid package meta for key {}, missing {} key".format(key, e))
+ except LockfilePackageMetaInvalidError as e:
+ raise TypeError("Invalid package meta for key {}, parse error: {}".format(key, e))
+
+ return LockfilePackageMeta(name, version, sky_id, integrity, integrity_algorithm)
+
+
+def _parse_package_key(key):
+ """
+ :param key: package key in format "/({scope}/)?{package_name}/{package_version}(_{peer_dependencies})?"
+ :type key: string
+ :return: tuple of scoped package name and version
+ :rtype: (str, str)
+ """
+ try:
+ tokens = key.split("/")[1:]
+ version = tokens.pop().split("_", 1)[0]
+
+ if len(tokens) < 1 or len(tokens) > 2:
+ raise TypeError()
+ except (IndexError, TypeError):
+ raise LockfilePackageMetaInvalidError("Invalid package key")
+
+ return ("/".join(tokens), version)
+
+
+def _parse_sky_id_from_tarball_url(tarball_url):
+ """
+ :param tarball_url: tarball url
+ :type tarball_url: string
+ :return: sky id
+ :rtype: string
+ """
+ if tarball_url.startswith("file:"):
+ return ""
+
+ rbtorrent_param = urlparse.parse_qs(urlparse.urlparse(tarball_url).query).get("rbtorrent")
+
+ if rbtorrent_param is None:
+ raise LockfilePackageMetaInvalidError("Missing rbtorrent param in tarball url {}".format(tarball_url))
+
+ return "rbtorrent:{}".format(rbtorrent_param[0])
+
+
+def _parse_package_integrity(integrity):
+ """
+ :param integrity: package integrity in format "{algo}-{base64_of_hash}"
+ :type integrity: string
+ :return: tuple of algorithm and hash (hex)
+ :rtype: (str, str)
+ """
+ algo, hash_b64 = integrity.split("-", 1)
+
+ try:
+ hash_hex = binascii.hexlify(base64.b64decode(hash_b64))
+ except TypeError as e:
+ raise LockfilePackageMetaInvalidError("Invalid package integrity encoding, integrity: {}, error: {}".format(integrity, e))
+
+ return (algo, hash_hex)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
new file mode 100644
index 0000000000..1a48675834
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
@@ -0,0 +1,181 @@
+import os
+import shutil
+import yaml
+
+from six import iteritems
+
+from ..base import PackageJson, BasePackageManager, PackageManagerError
+from .lockfile import PnpmLockfile
+from .workspace import PnpmWorkspace
+from .utils import build_pj_path, build_lockfile_path, build_ws_config_path, build_nm_bundle_path
+
+
+class PnpmPackageManager(BasePackageManager):
+ _STORE_NM_PATH = os.path.join(".pnpm", "store")
+ _VSTORE_NM_PATH = os.path.join(".pnpm", "virtual-store")
+ _STORE_VER = "v3"
+
+ def install(self):
+ """
+ Creates node_modules directory according to the lockfile.
+ """
+ self._prepare_workspace()
+ self._exec_command([
+ "install",
+ "--offline",
+ "--frozen-lockfile",
+ "--store-dir", self._nm_path(self._STORE_NM_PATH),
+ "--virtual-store-dir", self._nm_path(self._VSTORE_NM_PATH),
+ "--no-verify-store-integrity",
+ "--package-import-method", "hardlink",
+ "--ignore-pnpmfile",
+ "--ignore-scripts",
+ "--strict-peer-dependencies",
+ ])
+ self._fix_stores_in_modules_yaml()
+
+ def get_peer_paths_from_package_json(self):
+ """
+ Returns paths of direct workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ pj = PackageJson.load(build_pj_path(self.sources_path))
+
+ return map(lambda x: os.path.normpath(os.path.join(self.module_path, x[1])), pj.get_workspace_dep_paths())
+
+ def calc_node_modules_inouts(self):
+ """
+ Returns input and output paths for command that creates `node_modules` bundle.
+ :return: Pair of input and output paths with correct roots ($S or $B).
+ :rtype: (list of str, list of str)
+ """
+ # Inputs: source package.json and lockfile, built package.jsons, lockfiles and workspace configs of deps, tarballs.
+ ins = []
+ # Source lockfiles are used only to get tarballs info.
+ src_lf_paths = [build_lockfile_path(self.sources_path)]
+ pj = PackageJson.load(build_pj_path(self.sources_path))
+
+ for [dep_src_path, (dep_pj, depth)] in iteritems(pj.get_workspace_map()):
+ if dep_src_path == self.sources_path:
+ continue
+ dep_mod_path = dep_src_path[len(self.sources_root) + 1:]
+ # pnpm requires all package.jsons.
+ ins.append(build_pj_path(dep_mod_path))
+ dep_lf_src_path = build_lockfile_path(dep_src_path)
+ if not os.path.isfile(dep_lf_src_path):
+ continue
+ src_lf_paths.append(dep_lf_src_path)
+ # Merged workspace configs and lockfiles of direct deps.
+ if depth == 1:
+ ins.append(build_ws_config_path(dep_mod_path))
+ ins.append(build_lockfile_path(dep_mod_path))
+
+ for pkg in self.extract_packages_meta_from_lockfiles(src_lf_paths):
+ ins.append(self._contrib_tarball_path(pkg))
+
+ s_root = lambda x: os.path.join("$S", x)
+ b_root = lambda x: os.path.join("$B", x)
+
+ ins = map(b_root, ins) + [
+ s_root(build_pj_path(self.module_path)),
+ s_root(build_lockfile_path(self.module_path)),
+ ]
+
+ # Outputs: patched lockfile, generated workspace config, created node_modules bundle.
+ outs = [b_root(f(self.module_path)) for f in (build_lockfile_path, build_ws_config_path, build_nm_bundle_path)]
+
+ return (ins, outs)
+
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ """
+ :type lf_paths: iterable of BaseLockfile
+ :rtype: iterable of LockfilePackageMeta
+ """
+ tarballs = set()
+
+ for lf_path in lf_paths:
+ try:
+ for pkg in PnpmLockfile.load(lf_path).get_packages_meta():
+ if pkg.tarball_path not in tarballs:
+ tarballs.add(pkg.tarball_path)
+ yield pkg
+ except Exception as e:
+ raise PackageManagerError("Unable to process lockfile {}: {}".format(lf_path, e))
+
+ def _prepare_workspace(self):
+ pj = self._build_package_json()
+ ws = PnpmWorkspace(build_ws_config_path(self.build_path))
+ ws.set_from_package_json(pj)
+ dep_paths = ws.get_paths()
+ self._build_merged_workspace_config(ws, dep_paths)
+ self._build_merged_lockfile(dep_paths)
+
+ def _build_package_json(self):
+ """
+ :rtype: PackageJson
+ """
+ in_pj_path = build_pj_path(self.sources_path)
+ out_pj_path = build_pj_path(self.build_path)
+ shutil.copyfile(in_pj_path, out_pj_path)
+
+ return PackageJson.load(out_pj_path)
+
+ def _build_merged_lockfile(self, dep_paths):
+ """
+ :type dep_paths: list of str
+ :rtype: PnpmLockfile
+ """
+ in_lf_path = build_lockfile_path(self.sources_path)
+ out_lf_path = build_lockfile_path(self.build_path)
+
+ lf = PnpmLockfile.load(in_lf_path)
+ # Change to the output path for correct path calcs on merging.
+ lf.path = out_lf_path
+
+ for dep_path in dep_paths:
+ if dep_path is self.build_path:
+ continue
+ lf_path = build_lockfile_path(dep_path)
+ if os.path.isfile(lf_path):
+ lf.merge(PnpmLockfile.load(lf_path))
+
+ lf.update_tarball_resolutions(lambda p: self._contrib_tarball_url(p))
+ lf.write()
+
+ def _build_merged_workspace_config(self, ws, dep_paths):
+ """
+ :type ws: PnpmWorkspaceConfig
+ :type dep_paths: list of str
+ """
+ for dep_path in dep_paths:
+ if dep_path is self.build_path:
+ continue
+ ws_config_path = build_ws_config_path(dep_path)
+ if os.path.isfile(ws_config_path):
+ ws.merge(PnpmWorkspace.load(ws_config_path))
+
+ ws.write()
+
+ def _fix_stores_in_modules_yaml(self):
+ """
+ Ensures that store paths are the same as would be after installing deps in the source dir.
+ This is required to reuse `node_modules` after build.
+ """
+ with open(self._nm_path(".modules.yaml"), "r+") as f:
+ data = yaml.load(f, Loader=yaml.CSafeLoader)
+ # NOTE: pnpm requires absolute store path here.
+ data["storeDir"] = os.path.join(self.sources_path, "node_modules", self._STORE_NM_PATH, self._STORE_VER)
+ data["virtualStoreDir"] = self._VSTORE_NM_PATH
+ f.seek(0)
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+ f.truncate()
+
+ def _get_default_options(self):
+ return super(PnpmPackageManager, self)._get_default_options() + [
+ "--stream",
+ "--reporter", "append-only",
+ "--no-color",
+ ]
+
+ def _get_debug_log_path(self):
+ return self._nm_path(".pnpm-debug.log")
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py
new file mode 100644
index 0000000000..06315a4992
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py
@@ -0,0 +1,320 @@
+import pytest
+
+from build.plugins.lib.nots.package_manager.pnpm.lockfile import PnpmLockfile
+
+
+def test_lockfile_get_packages_meta_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.name == "@babel/cli"
+ assert pkg.version == "7.6.2"
+ assert pkg.sky_id == "rbtorrent:cb1849da3e4947e56a8f6bde6a1ec42703ddd187"
+ assert pkg.integrity == b"24367e4ff6ebf693df4f696600c272a490d34d31ccf5e3c3fc40f5d13463473255744572f89077891961cd8993b796243601efc561a55159cbb5dbfaaee883ad"
+ assert pkg.integrity_algorithm == "sha512"
+
+
+def test_lockfile_get_packages_empty():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {}
+
+ assert len(list(lf.get_packages_meta())) == 0
+
+
+def test_package_meta_invalid_key():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "in/valid": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key in/valid, parse error: Invalid package key"
+
+
+def test_package_meta_missing_resolution():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'resolution' key"
+
+
+def test_package_meta_missing_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {},
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'tarball' key"
+
+
+def test_package_meta_missing_rbtorrent():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {
+ "tarball": "valid-1.2.3.tgz",
+ },
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, parse error: Missing rbtorrent param in tarball url valid-1.2.3.tgz"
+
+
+def test_lockfile_meta_file_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "file:/some/abs/path.tgz",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.name == "@babel/cli"
+ assert pkg.version == "7.6.2"
+ assert pkg.sky_id == ""
+
+
+def test_lockfile_update_tarball_resolutions_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ lf.update_tarball_resolutions(lambda p: p.name)
+
+ assert lf.data["packages"]["/@babel/cli/7.6.2_@babel+core@7.6.2"]["resolution"]["tarball"] == "@babel/cli"
+
+
+def test_lockfile_merge():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf3 = PnpmLockfile(path="/another/baz/pnpm-lock.yaml")
+ lf3.data = {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+ lf4 = PnpmLockfile(path="/another/quux/pnpm-lock.yaml")
+ lf4.data = {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ }
+
+ lf1.merge(lf2)
+ lf1.merge(lf3)
+ lf1.merge(lf4)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ },
+ "../another/baz": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../another/qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ "../another/quux": {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+
+def test_lockfile_merge_dont_overrides_packages():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {
+ "overriden": True,
+ },
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf1.merge(lf2)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ },
+ }
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py
new file mode 100644
index 0000000000..f6a73e0d4c
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py
@@ -0,0 +1,58 @@
+from build.plugins.lib.nots.package_manager.base import PackageJson
+from build.plugins.lib.nots.package_manager.pnpm.workspace import PnpmWorkspace
+
+
+def test_workspace_get_paths():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_set_from_package_json():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ pj = PackageJson(path="/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@a/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@a/baz": "workspace:../../another/baz",
+ },
+ "peerDependencies": {
+ "@a/qux": "workspace:../../another/qux",
+ },
+ "optionalDependencies": {
+ "@a/quux": "workspace:../../another/quux",
+ }
+ }
+
+ ws.set_from_package_json(pj)
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/another/quux",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_merge():
+ ws1 = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws1.packages = set([".", "../bar", "../../another/baz"])
+ ws2 = PnpmWorkspace(path="/another/baz/pnpm-workspace.yaml")
+ ws2.packages = set([".", "../qux"])
+
+ ws1.merge(ws2)
+
+ assert sorted(ws1.get_paths()) == [
+ "/another/baz",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
new file mode 100644
index 0000000000..94712f1db9
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
@@ -0,0 +1,15 @@
+PY23_TEST()
+
+OWNER(dankolesnikov)
+
+TEST_SRCS(
+ lockfile.py
+ workspace.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/pnpm/utils.py b/build/plugins/lib/nots/package_manager/pnpm/utils.py
new file mode 100644
index 0000000000..d8e99e3ab8
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/utils.py
@@ -0,0 +1,19 @@
+import os
+
+from ..base.constants import PACKAGE_JSON_FILENAME, PNPM_LOCKFILE_FILENAME, PNPM_WS_FILENAME, NODE_MODULES_BUNDLE_FILENAME
+
+
+def build_pj_path(p):
+ return os.path.join(p, PACKAGE_JSON_FILENAME)
+
+
+def build_lockfile_path(p):
+ return os.path.join(p, PNPM_LOCKFILE_FILENAME)
+
+
+def build_ws_config_path(p):
+ return os.path.join(p, PNPM_WS_FILENAME)
+
+
+def build_nm_bundle_path(p):
+ return os.path.join(p, NODE_MODULES_BUNDLE_FILENAME)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
new file mode 100644
index 0000000000..635b77dcb2
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
@@ -0,0 +1,69 @@
+import os
+import yaml
+
+
+class PnpmWorkspace(object):
+ @classmethod
+ def load(cls, path):
+ ws = cls(path)
+ ws.read()
+
+ return ws
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ # NOTE: pnpm requires relative workspace paths.
+ self.packages = set()
+
+ def read(self):
+ with open(self.path) as f:
+ self.packages = set(yaml.load(f, Loader=yaml.CSafeLoader).get("packages", []))
+
+ def write(self, path=None):
+ if not path:
+ path = self.path
+
+ with open(path, "w") as f:
+ data = {
+ "packages": list(self.packages),
+ }
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+
+ def get_paths(self):
+ """
+ Returns absolute paths of workspace packages.
+ :rtype: list of str
+ """
+ dir_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(dir_path, pkg_path)) for pkg_path in self.packages]
+
+ def set_from_package_json(self, package_json):
+ """
+ Sets packages to "workspace" deps from given package.json.
+ :param package_json: package.json of workspace
+ :type package_json: PackageJson
+ """
+ if os.path.dirname(package_json.path) != os.path.dirname(self.path):
+ raise TypeError(
+ "package.json should be in workspace directory {}, given: {}".format(os.path.dirname(self.path), package_json.path))
+
+ self.packages = set(path for name, path in package_json.get_workspace_dep_paths())
+ # Add relative path to self.
+ self.packages.add(".")
+
+ def merge(self, ws):
+ """
+ Adds `ws`'s packages to the workspace.
+ :param ws: workspace to merge
+ :type ws: PnpmWorkspace
+ """
+ dir_path = os.path.dirname(self.path)
+ ws_dir_path = os.path.dirname(ws.path)
+
+ for p_rel_path in ws.packages:
+ p_path = os.path.normpath(os.path.join(ws_dir_path, p_rel_path))
+ self.packages.add(os.path.relpath(p_path, dir_path))
diff --git a/build/plugins/lib/nots/package_manager/pnpm/ya.make b/build/plugins/lib/nots/package_manager/pnpm/ya.make
new file mode 100644
index 0000000000..b2f2727c3f
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/ya.make
@@ -0,0 +1,23 @@
+PY23_LIBRARY()
+
+OWNER(dankolesnikov)
+
+PY_SRCS(
+ __init__.py
+ lockfile.py
+ package_manager.py
+ workspace.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ contrib/python/PyYAML
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/ya.make b/build/plugins/lib/nots/package_manager/ya.make
new file mode 100644
index 0000000000..79ee0ea175
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(dankolesnikov)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/typescript/__init__.py b/build/plugins/lib/nots/typescript/__init__.py
new file mode 100644
index 0000000000..4684004183
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/__init__.py
@@ -0,0 +1,7 @@
+from .tsc_wrapper import TscWrapper, TsConfig, TsValidationError
+
+__all__ = [
+ "TscWrapper",
+ "TsConfig",
+ "TsValidationError",
+]
diff --git a/build/plugins/lib/nots/typescript/tests/tsc_wrapper.py b/build/plugins/lib/nots/typescript/tests/tsc_wrapper.py
new file mode 100644
index 0000000000..b6c2845f79
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/tsc_wrapper.py
@@ -0,0 +1,168 @@
+import pytest
+
+from build.plugins.lib.nots.typescript import TsConfig, TsValidationError
+
+
+def test_ts_config_validate_valid():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "outDir": "./build",
+ },
+ }
+
+ cfg.validate()
+
+
+def test_ts_config_validate_empty():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ ]
+
+
+def test_ts_config_validate_invalid_common():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "preserveSymlinks": True,
+ "rootDirs": [],
+ "outFile": "./foo.js",
+ },
+ "references": [],
+ "files": [],
+ "include": [],
+ "exclude": [],
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ "'outFile' option is not supported",
+ "'preserveSymlinks' option is not supported due to pnpm limitations",
+ "'rootDirs' option is not supported, relative imports should have single root",
+ "'files' option is not supported, use 'include'",
+ "composite builds are not supported, use peerdirs in ya.make instead of 'references' option",
+ ]
+
+
+def test_ts_config_validate_invalid_subdirs():
+ cfg = TsConfig(path="/foo/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "/bar/src",
+ "outDir": "../bar/build",
+ },
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' should be a subdirectory of the module",
+ "'outDir' should be a subdirectory of the module",
+ ]
+
+
+def test_ts_config_transform():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "outDir": "./build",
+ "typeRoots": ["./node_modules/foo", "bar"],
+ },
+ "include": ["src/**/*"],
+ }
+
+ cfg.transform_paths(
+ build_path="bindir",
+ sources_path="srcdir",
+ )
+
+ assert cfg.data == {
+ "compilerOptions": {
+ "outDir": "bindir/build",
+ "rootDir": "srcdir/src",
+ "baseUrl": "bindir/node_modules",
+ "typeRoots": ["srcdir/node_modules/foo", "srcdir/bar", "bindir/node_modules/foo", "bindir/bar"]
+ },
+ "include": ["srcdir/src/**/*"],
+ "exclude": [],
+ }
+
+
+def test_ts_config_transform_when_root_eq_out():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": ".",
+ "outDir": ".",
+ },
+ }
+
+ cfg.transform_paths(
+ build_path="bindir",
+ sources_path="srcdir",
+ )
+
+ assert cfg.data == {
+ "compilerOptions": {
+ "rootDir": "srcdir",
+ "outDir": "bindir",
+ "baseUrl": "bindir/node_modules",
+ },
+ "include": [],
+ "exclude": [],
+ }
+
+
+def test_ts_config_transform_sets_correct_source_root():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "src",
+ "outDir": "build",
+ "sourceMap": True,
+ },
+ }
+
+ cfg.transform_paths(
+ build_path="bindir",
+ sources_path="srcdir",
+ )
+
+ assert cfg.data == {
+ "compilerOptions": {
+ "rootDir": "srcdir/src",
+ "outDir": "bindir/build",
+ "baseUrl": "bindir/node_modules",
+ "sourceMap": True,
+ "sourceRoot": "../src",
+ },
+ "include": [],
+ "exclude": [],
+ }
+
+
+def test_ts_config_compiler_options():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ assert cfg.compiler_option("invalid") is None
+
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "src",
+ },
+ }
+
+ assert cfg.compiler_option("rootDir") == "src"
diff --git a/build/plugins/lib/nots/typescript/tests/ya.make b/build/plugins/lib/nots/typescript/tests/ya.make
new file mode 100644
index 0000000000..f6a8e40ea1
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/ya.make
@@ -0,0 +1,13 @@
+PY23_TEST()
+
+OWNER(dankolesnikov)
+
+TEST_SRCS(
+ tsc_wrapper.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/nots/typescript/tsc_wrapper.py b/build/plugins/lib/nots/typescript/tsc_wrapper.py
new file mode 100644
index 0000000000..9fddf6707f
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tsc_wrapper.py
@@ -0,0 +1,219 @@
+import os
+import json
+import shutil
+import subprocess
+import tarfile
+
+from ..package_manager import constants
+
+
+class TsError(RuntimeError):
+ pass
+
+
+class TsValidationError(TsError):
+ def __init__(self, path, errors):
+ self.path = path
+ self.errors = errors
+
+ super(TsValidationError, self).__init__("Invalid tsconfig {}:\n{}".format(path, "\n".join(errors)))
+
+
+class TsCompilationError(TsError):
+ def __init__(self, code, stdout, stderr):
+ self.code = code
+ self.stdout = stdout
+ self.stderr = stderr
+
+ super(TsCompilationError, self).__init__("tsc exited with code {}:\n{}\n{}".format(code, stdout, stderr))
+
+
+class TsConfig(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: tsconfig.json path
+ :type path: str
+ :rtype: TsConfig
+ """
+ tsconfig = cls(path)
+ tsconfig.read()
+
+ return tsconfig
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = {}
+
+ def read(self):
+ try:
+ with open(self.path) as f:
+ self.data = json.load(f)
+ except Exception as e:
+ raise TsError("Failed to read tsconfig {}: {}".format(self.path, e))
+
+ def get_or_create_compiler_options(self):
+ """
+ Returns ref to the "compilerOptions" dict.
+ :rtype: dict
+ """
+ opts = self.data.get("compilerOptions")
+ if opts is None:
+ opts = {}
+ self.data["compilerOptions"] = opts
+
+ return opts
+
+ def compiler_option(self, name, default=None):
+ """
+ :param name: option key
+ :type name: str
+ :param default: default value
+ :type default: mixed
+ :rtype: mixed
+ """
+ return self.get_or_create_compiler_options().get(name, default)
+
+ def validate(self):
+ """
+ Checks whether the config is compatible with current toolchain.
+ """
+ opts = self.get_or_create_compiler_options()
+ errors = []
+ root_dir = opts.get("rootDir")
+ out_dir = opts.get("outDir")
+ config_dir = os.path.dirname(self.path)
+ is_mod_subdir = lambda p: not os.path.isabs(p) and os.path.normpath(os.path.join(config_dir, p)).startswith(config_dir)
+
+ if root_dir is None:
+ errors.append("'rootDir' option is required")
+ elif not is_mod_subdir(root_dir):
+ errors.append("'rootDir' should be a subdirectory of the module")
+
+ if out_dir is None:
+ errors.append("'outDir' option is required")
+ elif not is_mod_subdir(out_dir):
+ errors.append("'outDir' should be a subdirectory of the module")
+
+ if opts.get("outFile") is not None:
+ errors.append("'outFile' option is not supported")
+
+ if opts.get("preserveSymlinks"):
+ errors.append("'preserveSymlinks' option is not supported due to pnpm limitations")
+
+ if opts.get("rootDirs") is not None:
+ errors.append("'rootDirs' option is not supported, relative imports should have single root")
+
+ if self.data.get("files") is not None:
+ errors.append("'files' option is not supported, use 'include'")
+
+ if self.data.get("references") is not None:
+ errors.append("composite builds are not supported, use peerdirs in ya.make instead of 'references' option")
+
+ if len(errors):
+ raise TsValidationError(self.path, errors)
+
+ def transform_paths(self, build_path, sources_path):
+ """
+ Updates config with correct abs paths.
+ All source files/dirs will be mapped to `sources_path`, output files/dirs will be mapped to `build_path`.
+ :param build_path: module's build root
+ :type build_path: str
+ :param sources_path: module's source root
+ :type sources_path: str
+ """
+ opts = self.get_or_create_compiler_options()
+
+ sources_path_rel = lambda x: os.path.normpath(os.path.join(sources_path, x))
+ build_path_rel = lambda x: os.path.normpath(os.path.join(build_path, x))
+
+ root_dir = opts["rootDir"]
+ out_dir = opts["outDir"]
+
+ opts["rootDir"] = sources_path_rel(root_dir)
+ opts["outDir"] = build_path_rel(out_dir)
+
+ if opts.get("typeRoots"):
+ opts["typeRoots"] = list(map(sources_path_rel, opts["typeRoots"])) + list(map(build_path_rel, opts["typeRoots"]))
+
+ opts["baseUrl"] = build_path_rel("node_modules")
+
+ self.data["include"] = list(map(sources_path_rel, self.data.get("include", [])))
+ self.data["exclude"] = list(map(sources_path_rel, self.data.get("exclude", [])))
+
+ if opts.get("sourceMap"):
+ opts["sourceRoot"] = os.path.relpath(root_dir, out_dir)
+
+ def write(self, path=None):
+ """
+ :param path: tsconfig path, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ json.dump(self.data, f)
+
+
+class TscWrapper(object):
+ _TSCONFIG_FILENAME = "tsconfig.json"
+
+ def __init__(self, build_root, build_path, sources_path, nodejs_bin_path, script_path, config_path):
+ self.build_root = build_root
+ self.build_path = build_path
+ self.sources_path = sources_path
+ self.nodejs_bin_path = nodejs_bin_path
+ self.script_path = script_path
+ self.config_path = config_path
+
+ def compile(self):
+ self._prepare_dependencies()
+ config = self._build_config()
+ self._exec_tsc(["--build", config.path])
+
+ def _prepare_dependencies(self):
+ self._copy_package_json()
+ self._unpack_node_modules()
+
+ def _copy_package_json(self):
+ # TODO: Validate "main" and "files" - they should include files from the output directory.
+ shutil.copyfile(
+ os.path.join(self.sources_path, constants.PACKAGE_JSON_FILENAME),
+ os.path.join(self.build_path, constants.PACKAGE_JSON_FILENAME),
+ )
+
+ def _unpack_node_modules(self):
+ nm_bundle_path = os.path.join(self.build_path, constants.NODE_MODULES_BUNDLE_FILENAME)
+ if os.path.isfile(nm_bundle_path):
+ with tarfile.open(nm_bundle_path) as tf:
+ tf.extractall(os.path.join(self.build_path, "node_modules"))
+
+ def _build_config(self):
+ config = TsConfig.load(self.config_path)
+ config.validate()
+ config.transform_paths(
+ build_path=self.build_path,
+ sources_path=self.sources_path,
+ )
+
+ config.path = os.path.join(self.build_path, self._TSCONFIG_FILENAME)
+ config.write()
+
+ return config
+
+ def _exec_tsc(self, args):
+ p = subprocess.Popen(
+ [self.nodejs_bin_path, self.script_path] + args,
+ cwd=self.build_path,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = p.communicate()
+
+ if p.returncode != 0:
+ raise TsCompilationError(p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8"))
diff --git a/build/plugins/lib/nots/typescript/ya.make b/build/plugins/lib/nots/typescript/ya.make
new file mode 100644
index 0000000000..e83ce75e7a
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ya.make
@@ -0,0 +1,18 @@
+PY23_LIBRARY()
+
+OWNER(dankolesnikov)
+
+PY_SRCS(
+ __init__.py
+ tsc_wrapper.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/ya.make b/build/plugins/lib/nots/ya.make
new file mode 100644
index 0000000000..681a1dea1a
--- /dev/null
+++ b/build/plugins/lib/nots/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(dankolesnikov)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/ya.make b/build/plugins/lib/ya.make
new file mode 100644
index 0000000000..7e61d12080
--- /dev/null
+++ b/build/plugins/lib/ya.make
@@ -0,0 +1,7 @@
+OWNER(g:ymake)
+
+PY23_LIBRARY()
+ PY_SRCS(
+ _metric_resolvers.py
+ )
+END()
diff --git a/build/plugins/linker_script.py b/build/plugins/linker_script.py
new file mode 100644
index 0000000000..bee9777a4e
--- /dev/null
+++ b/build/plugins/linker_script.py
@@ -0,0 +1,12 @@
+def onlinker_script(unit, *args):
+ """
+ @usage: LINKER_SCRIPT(Files...)
+
+ Specify files to be used as a linker script
+ """
+ for arg in args:
+ if not arg.endswith(".ld") and not arg.endswith(".ld.in"):
+ unit.message(['error', "Invalid linker script extension: {}".format(arg)])
+ return
+
+ unit.onglobal_srcs(list(args))
diff --git a/build/plugins/lj_archive.py b/build/plugins/lj_archive.py
new file mode 100644
index 0000000000..1d80bb98f3
--- /dev/null
+++ b/build/plugins/lj_archive.py
@@ -0,0 +1,44 @@
+def onlj_archive(unit, *args):
+ """
+ @usage: LJ_ARCHIVE(NAME Name LuaFiles...)
+ Precompile .lua files using LuaJIT and archive both sources and results using sources names as keys
+ """
+ def iter_luas(l):
+ for a in l:
+ if a.endswith('.lua'):
+ yield a
+
+ def iter_objs(l):
+ for a in l:
+ s = a[:-3] + 'raw'
+ unit.on_luajit_objdump(['OUT', s, a])
+ yield s
+
+ luas = list(iter_luas(args))
+ objs = list(iter_objs(luas))
+
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaScripts.inc', 'KEYS', ':'.join(luas)] + objs)
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaSources.inc', 'KEYS', ':'.join(luas)] + luas)
+
+def onlj_21_archive(unit, *args):
+ """
+ @usage: LJ_21_ARCHIVE(NAME Name LuaFiles...) # deprecated
+ Precompile .lua files using LuaJIT 2.1 and archive both sources and results using sources names as keys
+ """
+ def iter_luas(l):
+ for a in l:
+ if a.endswith('.lua'):
+ yield a
+
+ def iter_objs(l):
+ for a in l:
+ s = a[:-3] + 'raw'
+ unit.on_luajit_21_objdump(['OUT', s, a])
+ yield s
+
+ luas = list(iter_luas(args))
+ objs = list(iter_objs(luas))
+
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaScripts.inc', 'KEYS', ':'.join(luas)] + objs)
+ unit.onarchive_by_keys(['DONTCOMPRESS', 'NAME', 'LuaSources.inc', 'KEYS', ':'.join(luas)] + luas)
+
diff --git a/build/plugins/llvm_bc.py b/build/plugins/llvm_bc.py
new file mode 100644
index 0000000000..2cfe43884c
--- /dev/null
+++ b/build/plugins/llvm_bc.py
@@ -0,0 +1,33 @@
+import sys
+
+from _common import rootrel_arc_src, sort_by_keywords, skip_build_root, stripext
+
+
+def onllvm_bc(unit, *args):
+ free_args, kwds = sort_by_keywords({'SYMBOLS': -1, 'NAME': 1, 'NO_COMPILE': 0}, args)
+ name = kwds['NAME'][0]
+ symbols = kwds.get('SYMBOLS')
+ obj_suf = unit.get('OBJ_SUF')
+ skip_compile_step = 'NO_COMPILE' in kwds
+ merged_bc = name + '_merged' + obj_suf + '.bc'
+ out_bc = name + '_optimized' + obj_suf + '.bc'
+ bcs = []
+ for x in free_args:
+ rel_path = rootrel_arc_src(x, unit)
+ bc_path = '${ARCADIA_BUILD_ROOT}/' + skip_build_root(rel_path) + obj_suf + '.bc'
+ if not skip_compile_step:
+ if x.endswith('.c'):
+ llvm_compile = unit.onllvm_compile_c
+ elif x.endswith('.ll'):
+ llvm_compile = unit.onllvm_compile_ll
+ else:
+ llvm_compile = unit.onllvm_compile_cxx
+ llvm_compile([rel_path, bc_path])
+ bcs.append(bc_path)
+ unit.onllvm_link([merged_bc] + bcs)
+ opt_opts = ['-O2', '-globalopt', '-globaldce']
+ if symbols:
+ # XXX: '#' used instead of ',' to overcome ymake tendency to split everything by comma
+ opt_opts += ['-internalize', '-internalize-public-api-list=' + '#'.join(symbols)]
+ unit.onllvm_opt([merged_bc, out_bc] + opt_opts)
+ unit.onresource([out_bc, '/llvm_bc/' + name])
diff --git a/build/plugins/macros_with_error.py b/build/plugins/macros_with_error.py
new file mode 100644
index 0000000000..e82fb56d2c
--- /dev/null
+++ b/build/plugins/macros_with_error.py
@@ -0,0 +1,29 @@
+import sys
+
+import _common
+
+import ymake
+
+
+def onmacros_with_error(unit, *args):
+ print >> sys.stderr, 'This macros will fail'
+ raise Exception('Expected fail in MACROS_WITH_ERROR')
+
+
+def onrestrict_path(unit, *args):
+ if args:
+ if 'MSG' in args:
+ pos = args.index('MSG')
+ paths, msg = args[:pos], args[pos + 1:]
+ msg = ' '.join(msg)
+ else:
+ paths, msg = args, 'forbidden'
+ if not _common.strip_roots(unit.path()).startswith(paths):
+ error_msg = "Path '[[imp]]{}[[rst]]' is restricted - [[bad]]{}[[rst]]. Valid path prefixes are: [[unimp]]{}[[rst]]".format(unit.path(), msg, ', '.join(paths))
+ ymake.report_configure_error(error_msg)
+
+def onassert(unit, *args):
+ val = unit.get(args[0])
+ if val and val.lower() == "no":
+ msg = ' '.join(args[1:])
+ ymake.report_configure_error(msg)
diff --git a/build/plugins/mx_archive.py b/build/plugins/mx_archive.py
new file mode 100644
index 0000000000..56b0d4d16e
--- /dev/null
+++ b/build/plugins/mx_archive.py
@@ -0,0 +1,16 @@
+def onmx_formulas(unit, *args):
+ """
+ @usage: MX_FORMULAS(BinFiles...) # deprecated, matrixnet
+ Create MatrixNet formulas archive
+ """
+ def iter_infos():
+ for a in args:
+ if a.endswith('.bin'):
+ unit.on_mx_bin_to_info([a])
+ yield a[:-3] + 'info'
+ else:
+ yield a
+
+ infos = list(iter_infos())
+ unit.onarchive_asm(['NAME', 'MxFormulas'] + infos)
+ unit.on_mx_gen_table(infos)
diff --git a/build/plugins/nots.py b/build/plugins/nots.py
new file mode 100644
index 0000000000..5018256ddc
--- /dev/null
+++ b/build/plugins/nots.py
@@ -0,0 +1,46 @@
+import os
+
+from _common import to_yesno
+from lib.nots.package_manager import manager
+from lib.nots.typescript import TsConfig
+
+
+def _create_pm(unit):
+ return manager(
+ sources_path=unit.resolve(unit.path()),
+ build_root="$B",
+ build_path=unit.path().replace("$S", "$B", 1),
+ contribs_path=unit.get("NPM_CONTRIBS_PATH"),
+ nodejs_bin_path=None,
+ script_path=None,
+ )
+
+
+def on_from_npm_lockfiles(unit, *args):
+ lf_paths = map(lambda p: unit.resolve(unit.resolve_arc_path(p)), args)
+
+ for pkg in _create_pm(unit).extract_packages_meta_from_lockfiles(lf_paths):
+ unit.onfrom_npm([pkg.name, pkg.version, pkg.sky_id, pkg.integrity, pkg.integrity_algorithm, pkg.tarball_path])
+
+
+def onnode_modules(unit):
+ pm = _create_pm(unit)
+ unit.onpeerdir(pm.get_peer_paths_from_package_json())
+ ins, outs = pm.calc_node_modules_inouts()
+ unit.on_node_modules(["IN"] + sorted(ins) + ["OUT"] + sorted(outs))
+
+
+def on_ts_configure(unit, tsconfig_path):
+ abs_tsconfig_path = unit.resolve(unit.resolve_arc_path(tsconfig_path))
+ if not abs_tsconfig_path:
+ raise Exception("tsconfig not found: {}".format(tsconfig_path))
+
+ tsconfig = TsConfig.load(abs_tsconfig_path)
+ tsconfig.validate()
+
+ unit.set(["TS_CONFIG_ROOT_DIR", tsconfig.compiler_option("rootDir")])
+ unit.set(["TS_CONFIG_OUT_DIR", tsconfig.compiler_option("outDir")])
+ unit.set(["TS_CONFIG_SOURCE_MAP", to_yesno(tsconfig.compiler_option("sourceMap"))])
+ unit.set(["TS_CONFIG_DECLARATION", to_yesno(tsconfig.compiler_option("declaration"))])
+ unit.set(["TS_CONFIG_DECLARATION_MAP", to_yesno(tsconfig.compiler_option("declarationMap"))])
+ unit.set(["TS_CONFIG_PRESERVE_JSX", to_yesno(tsconfig.compiler_option("jsx") == "preserve")])
diff --git a/build/plugins/print_module_type.py b/build/plugins/print_module_type.py
new file mode 100644
index 0000000000..cc54c55675
--- /dev/null
+++ b/build/plugins/print_module_type.py
@@ -0,0 +1,5 @@
+def onprint_module_type(unit, *args):
+ filepath = unit.get('KIWI_OUT_FILE')
+ if len(args) >= 2 and filepath is not None:
+ with open(filepath, "a") as file_handler:
+ print >>file_handler, "{0} {1} {2}".format(args[0], args[1], unit.path())
diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py
new file mode 100644
index 0000000000..f32a2d39a0
--- /dev/null
+++ b/build/plugins/pybuild.py
@@ -0,0 +1,648 @@
+import os
+import collections
+from hashlib import md5
+
+import ymake
+from _common import stripext, rootrel_arc_src, tobuilddir, listid, resolve_to_ymake_path, generate_chunks, pathid
+
+
+YA_IDE_VENV_VAR = 'YA_IDE_VENV'
+PY_NAMESPACE_PREFIX = 'py/namespace'
+BUILTIN_PROTO = 'builtin_proto'
+
+def is_arc_src(src, unit):
+ return (
+ src.startswith('${ARCADIA_ROOT}/') or
+ src.startswith('${CURDIR}/') or
+ unit.resolve_arc_path(src).startswith('$S/')
+ )
+
+def is_extended_source_search_enabled(path, unit):
+ if not is_arc_src(path, unit):
+ return False
+ if unit.get('NO_EXTENDED_SOURCE_SEARCH') == 'yes':
+ return False
+ return True
+
+def to_build_root(path, unit):
+ if is_arc_src(path, unit):
+ return '${ARCADIA_BUILD_ROOT}/' + rootrel_arc_src(path, unit)
+ return path
+
+def uniq_suffix(path, unit):
+ upath = unit.path()
+ if '/' not in path:
+ return ''
+ return '.{}'.format(pathid(path)[:4])
+
+def pb2_arg(suf, path, mod, unit):
+ return '{path}__int__{suf}={mod}{modsuf}'.format(
+ path=stripext(to_build_root(path, unit)),
+ suf=suf,
+ mod=mod,
+ modsuf=stripext(suf)
+ )
+
+def proto_arg(path, mod, unit):
+ return '{}.proto={}'.format(stripext(to_build_root(path, unit)), mod)
+
+def pb_cc_arg(suf, path, unit):
+ return '{}{suf}'.format(stripext(to_build_root(path, unit)), suf=suf)
+
+def ev_cc_arg(path, unit):
+ return '{}.ev.pb.cc'.format(stripext(to_build_root(path, unit)))
+
+def ev_arg(path, mod, unit):
+ return '{}__int___ev_pb2.py={}_ev_pb2'.format(stripext(to_build_root(path, unit)), mod)
+
+def mangle(name):
+ if '.' not in name:
+ return name
+ return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
+
+
+def parse_pyx_includes(filename, path, source_root, seen=None):
+ normpath = lambda *x: os.path.normpath(os.path.join(*x))
+
+ abs_path = normpath(source_root, filename)
+ seen = seen or set()
+ if abs_path in seen:
+ return
+ seen.add(abs_path)
+
+ if not os.path.exists(abs_path):
+ # File might be missing, because it might be generated
+ return
+
+ with open(abs_path, 'rb') as f:
+ # Don't parse cimports and etc - irrelevant for cython, it's linker work
+ includes = ymake.parse_cython_includes(f.read())
+
+ abs_dirname = os.path.dirname(abs_path)
+ # All includes are relative to the file which include
+ path_dirname = os.path.dirname(path)
+ file_dirname = os.path.dirname(filename)
+
+ for incfile in includes:
+ abs_path = normpath(abs_dirname, incfile)
+ if os.path.exists(abs_path):
+ incname, incpath = normpath(file_dirname, incfile), normpath(path_dirname, incfile)
+ yield (incname, incpath)
+ # search for includes in the included files
+ for e in parse_pyx_includes(incname, incpath, source_root, seen):
+ yield e
+ else:
+ # There might be arcadia root or cython relative include.
+ # Don't treat such file as missing, because there must be PEERDIR on py_library
+ # which contains it.
+ for path in [
+ source_root,
+ source_root + "/contrib/tools/cython/Cython/Includes",
+ ]:
+ if os.path.exists(normpath(path, incfile)):
+ break
+ else:
+ ymake.report_configure_error("'{}' includes missing file: {} ({})".format(path, incfile, abs_path))
+
+def has_pyx(args):
+ return any(arg.endswith('.pyx') for arg in args)
+
+def get_srcdir(path, unit):
+ return rootrel_arc_src(path, unit)[:-len(path)].rstrip('/')
+
+def add_python_lint_checks(unit, py_ver, files):
+ def get_resolved_files():
+ resolved_files = []
+ for path in files:
+ resolved = unit.resolve_arc_path([path])
+ if resolved.startswith('$S'): # path was resolved as source file.
+ resolved_files.append(resolved)
+ return resolved_files
+
+ if unit.get('LINT_LEVEL_VALUE') == "none":
+
+ no_lint_allowed_paths = (
+ "contrib/",
+ "devtools/",
+ "junk/",
+ # temporary allowed, TODO: remove
+ "taxi/uservices/",
+ "travel/",
+ "market/report/lite/", # MARKETOUT-38662, deadline: 2021-08-12
+ "passport/backend/oauth/", # PASSP-35982
+ )
+
+ upath = unit.path()[3:]
+
+ if not upath.startswith(no_lint_allowed_paths):
+ ymake.report_configure_error("NO_LINT() is allowed only in " + ", ".join(no_lint_allowed_paths))
+
+ if files and unit.get('LINT_LEVEL_VALUE') not in ("none", "none_internal"):
+ resolved_files = get_resolved_files()
+ flake8_cfg = 'build/config/tests/flake8/flake8.conf'
+ unit.onadd_check(["flake8.py{}".format(py_ver), flake8_cfg] + resolved_files)
+
+
+def is_py3(unit):
+ return unit.get("PYTHON3") == "yes"
+
+
+def on_py_program(unit, *args):
+ py_program(unit, is_py3(unit))
+
+
+def py_program(unit, py3):
+ """
+ Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/#modulpyprogramimakrospymain
+ """
+ if py3:
+ peers = ['library/python/runtime_py3/main']
+ if unit.get('PYTHON_SQLITE3') != 'no':
+ peers.append('contrib/tools/python3/src/Modules/_sqlite')
+ else:
+ peers = ['library/python/runtime/main']
+ if unit.get('PYTHON_SQLITE3') != 'no':
+ peers.append('contrib/tools/python/src/Modules/_sqlite')
+ unit.onpeerdir(peers)
+ if unit.get('MODULE_TYPE') == 'PROGRAM': # can not check DLL
+ unit.onadd_check_py_imports()
+
+
+def onpy_srcs(unit, *args):
+ """
+ @usage PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} Files...)
+
+ PY_SRCS() - is rule to build extended versions of Python interpreters and containing all application code in its executable file. It can be used to collect only the executables but not shared libraries, and, in particular, not to collect the modules that are imported using import directive.
+ The main disadvantage is the lack of IDE support; There is also no readline yet.
+ The application can be collect from any of the sources from which the C library, and with the help of PY_SRCS .py , .pyx,.proto and .swg files.
+ At the same time extensions for Python on C language generating from .pyx and .swg, will be registered in Python's as built-in modules, and sources on .py are stored as static data: when the interpreter starts, the initialization code will add a custom loader of these modules to sys.meta_path.
+ By default .pyx files are collected as C++-extensions. To collect them as C (similar to BUILDWITH_CYTHON_C, but with the ability to specify namespace), you must specify the Directive CYTHON_C.
+ Building with pyx automatically registers modules, you do not need to call PY_REGISTER for them
+ __init__.py never required, but if present (and specified in PY_SRCS), it will be imported when you import package modules with __init__.py Oh.
+
+ Example of library declaration with PY_SRCS():
+ PY2_LIBRARY(mymodule)
+ PY_SRCS(a.py sub/dir/b.py e.proto sub/dir/f.proto c.pyx sub/dir/d.pyx g.swg sub/dir/h.swg)
+ END()
+
+ PY_REGISTER honors Python2 and Python3 differences and adjusts itself to Python version of a current module
+ Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipylibrarypy3libraryimakrospysrcs
+ """
+ # Each file arg must either be a path, or "${...}/buildpath=modname", where
+ # "${...}/buildpath" part will be used as a file source in a future macro,
+ # and "modname" will be used as a module name.
+
+ upath = unit.path()[3:]
+ py3 = is_py3(unit)
+ py_main_only = unit.get('PROCESS_PY_MAIN_ONLY')
+ with_py = not unit.get('PYBUILD_NO_PY')
+ with_pyc = not unit.get('PYBUILD_NO_PYC')
+ in_proto_library = unit.get('PY_PROTO') or unit.get('PY3_PROTO')
+ venv = unit.get(YA_IDE_VENV_VAR)
+ need_gazetteer_peerdir = False
+ trim = 0
+
+ if not upath.startswith('contrib/tools/python') and not upath.startswith('library/python/runtime') and unit.get('NO_PYTHON_INCLS') != 'yes':
+ unit.onpeerdir(['contrib/libs/python'])
+
+ unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL')
+ if unit_needs_main:
+ py_program(unit, py3)
+
+ py_namespace_value = unit.get('PY_NAMESPACE_VALUE')
+ if py_namespace_value == ".":
+ ns = ""
+ else:
+ ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.'
+
+ cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes'
+ cythonize_py = False
+ optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes'
+
+ cython_directives = []
+ if cython_coverage:
+ cython_directives += ['-X', 'linetrace=True']
+
+ pyxs_c = []
+ pyxs_c_h = []
+ pyxs_c_api_h = []
+ pyxs_cpp = []
+ pyxs = pyxs_cpp
+ swigs_c = []
+ swigs_cpp = []
+ swigs = swigs_cpp
+ pys = []
+ protos = []
+ evs = []
+ fbss = []
+ py_namespaces = {}
+
+ dump_dir = unit.get('PYTHON_BUILD_DUMP_DIR')
+ dump_output = None
+ if dump_dir:
+ import thread
+ pid = os.getpid()
+ tid = thread.get_ident()
+ dump_name = '{}-{}.dump'.format(pid, tid)
+ dump_output = open(os.path.join(dump_dir, dump_name), 'a')
+
+ args = iter(args)
+ for arg in args:
+ # Namespace directives.
+ if arg == 'TOP_LEVEL':
+ ns = ''
+ elif arg == 'NAMESPACE':
+ ns = next(args) + '.'
+ # Cython directives.
+ elif arg == 'CYTHON_C':
+ pyxs = pyxs_c
+ elif arg == 'CYTHON_C_H':
+ pyxs = pyxs_c_h
+ elif arg == 'CYTHON_C_API_H':
+ pyxs = pyxs_c_api_h
+ elif arg == 'CYTHON_CPP':
+ pyxs = pyxs_cpp
+ elif arg == 'CYTHON_DIRECTIVE':
+ cython_directives += ['-X', next(args)]
+ elif arg == 'CYTHONIZE_PY':
+ cythonize_py = True
+ # SWIG.
+ elif arg == 'SWIG_C':
+ swigs = swigs_c
+ elif arg == 'SWIG_CPP':
+ swigs = swigs_cpp
+ # Unsupported but legal PROTO_LIBRARY arguments.
+ elif arg == 'GLOBAL' or not in_proto_library and arg.endswith('.gztproto'):
+ pass
+ elif arg == '_MR':
+ # GLOB support: convert arcadia-root-relative paths to module-relative
+ # srcs are assumed to start with ${ARCADIA_ROOT}
+ trim = len(unit.path()) + 14
+ # Sources.
+ else:
+ main_mod = arg == 'MAIN'
+ if main_mod:
+ arg = next(args)
+
+ if '=' in arg:
+ main_py = False
+ path, mod = arg.split('=', 1)
+ else:
+ if trim:
+ arg = arg[trim:]
+ if arg.endswith('.gztproto'):
+ need_gazetteer_peerdir = True
+ path = '{}.proto'.format(arg[:-9])
+ else:
+ path = arg
+ main_py = (path == '__main__.py' or path.endswith('/__main__.py'))
+ if not py3 and unit_needs_main and main_py:
+ mod = '__main__'
+ else:
+ if arg.startswith('../'):
+ ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg))
+ if arg.startswith('/'):
+ ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg))
+ continue
+ mod_name = stripext(arg).replace('/', '.')
+ if py3 and path.endswith('.py') and is_extended_source_search_enabled(path, unit):
+ # Dig out real path from the file path. Unit.path is not enough because of SRCDIR and ADDINCL
+ root_rel_path = rootrel_arc_src(path, unit)
+ mod_root_path = root_rel_path[:-(len(path) + 1)]
+ py_namespaces.setdefault(mod_root_path, set()).add(ns if ns else '.')
+ mod = ns + mod_name
+
+ if main_mod:
+ py_main(unit, mod + ":main")
+ elif py3 and unit_needs_main and main_py:
+ py_main(unit, mod)
+
+ if py_main_only:
+ continue
+
+ if py3 and mod == '__main__':
+ ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM')
+
+ pathmod = (path, mod)
+
+ if dump_output is not None:
+ dump_output.write('{path}\t{module}\n'.format(path=rootrel_arc_src(path, unit), module=mod))
+
+ if path.endswith('.py'):
+ if cythonize_py:
+ pyxs.append(pathmod)
+ else:
+ pys.append(pathmod)
+ elif path.endswith('.pyx'):
+ pyxs.append(pathmod)
+ elif path.endswith('.proto'):
+ protos.append(pathmod)
+ elif path.endswith('.ev'):
+ evs.append(pathmod)
+ elif path.endswith('.swg'):
+ swigs.append(pathmod)
+ # Allow pyi files in PY_SRCS for autocomplete in IDE, but skip it during building
+ elif path.endswith('.pyi'):
+ pass
+ elif path.endswith('.fbs'):
+ fbss.append(pathmod)
+ else:
+ ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path))
+
+ if dump_output is not None:
+ dump_output.close()
+
+ if pyxs:
+ files2res = set()
+ # Include map stores files which were included in the processing pyx file,
+ # to be able to find source code of the included file inside generated file
+ # for currently processing pyx file.
+ include_map = collections.defaultdict(set)
+
+ if cython_coverage:
+ def process_pyx(filename, path, out_suffix, noext):
+ # skip generated files
+ if not is_arc_src(path, unit):
+ return
+ # source file
+ files2res.add((filename, path))
+ # generated
+ if noext:
+ files2res.add((os.path.splitext(filename)[0] + out_suffix, os.path.splitext(path)[0] + out_suffix))
+ else:
+ files2res.add((filename + out_suffix, path + out_suffix))
+ # used includes
+ for entry in parse_pyx_includes(filename, path, unit.resolve('$S')):
+ files2res.add(entry)
+ include_arc_rel = entry[0]
+ include_map[filename].add(include_arc_rel)
+ else:
+ def process_pyx(filename, path, out_suffix, noext):
+ pass
+
+ for pyxs, cython, out_suffix, noext in [
+ (pyxs_c, unit.on_buildwith_cython_c_dep, ".c", False),
+ (pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", True),
+ (pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", True),
+ (pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", False),
+ ]:
+ for path, mod in pyxs:
+ filename = rootrel_arc_src(path, unit)
+ cython_args = [path]
+
+ dep = path
+ if path.endswith('.py'):
+ pxd = '/'.join(mod.split('.')) + '.pxd'
+ if unit.resolve_arc_path(pxd):
+ dep = pxd
+ cython_args.append(dep)
+
+ cython_args += [
+ '--module-name', mod,
+ '--init-suffix', mangle(mod),
+ '--source-root', '${ARCADIA_ROOT}',
+ # set arcadia root relative __file__ for generated modules
+ '-X', 'set_initial_path={}'.format(filename),
+ ] + cython_directives
+
+ cython(cython_args)
+ py_register(unit, mod, py3)
+ process_pyx(filename, path, out_suffix, noext)
+
+ if files2res:
+ # Compile original and generated sources into target for proper cython coverage calculation
+ unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)])
+
+ if include_map:
+ data = []
+ prefix = 'resfs/cython/include'
+ for line in sorted('{}/{}={}'.format(prefix, filename, ':'.join(sorted(files))) for filename, files in include_map.iteritems()):
+ data += ['-', line]
+ unit.onresource(data)
+
+ for swigs, on_swig_python in [
+ (swigs_c, unit.on_swig_python_c),
+ (swigs_cpp, unit.on_swig_python_cpp),
+ ]:
+ for path, mod in swigs:
+ # Make output prefix basename match swig module name.
+ prefix = path[:path.rfind('/') + 1] + mod.rsplit('.', 1)[-1]
+ swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix)
+ on_swig_python([path, prefix])
+ onpy_register(unit, mod + '_swg')
+ onpy_srcs(unit, swg_py + '=' + mod)
+
+ if pys:
+ pys_seen = set()
+ pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))}
+ if pys_dups:
+ ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups))
+
+ res = []
+
+ if py3:
+ mod_list_md5 = md5()
+ for path, mod in pys:
+ mod_list_md5.update(mod)
+ if not (venv and is_extended_source_search_enabled(path, unit)):
+ dest = 'py/' + mod.replace('.', '/') + '.py'
+ if with_py:
+ res += ['DEST', dest, path]
+ if with_pyc:
+ root_rel_path = rootrel_arc_src(path, unit)
+ dst = path + uniq_suffix(path, unit)
+ unit.on_py3_compile_bytecode([root_rel_path + '-', path, dst])
+ res += ['DEST', dest + '.yapyc3', dst + '.yapyc3']
+
+ if py_namespaces:
+ # Note: Add md5 to key to prevent key collision if two or more PY_SRCS() used in the same ya.make
+ ns_res = []
+ for path, ns in sorted(py_namespaces.items()):
+ key = '{}/{}/{}'.format(PY_NAMESPACE_PREFIX, mod_list_md5.hexdigest(), path)
+ namespaces = ':'.join(sorted(ns))
+ ns_res += ['-', '{}="{}"'.format(key, namespaces)]
+ unit.onresource(ns_res)
+
+ unit.onresource_files(res)
+ add_python_lint_checks(unit, 3, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split())
+ else:
+ for path, mod in pys:
+ root_rel_path = rootrel_arc_src(path, unit)
+ if with_py:
+ key = '/py_modules/' + mod
+ res += [
+ path, key,
+ '-', 'resfs/src/{}={}'.format(key, root_rel_path),
+ ]
+ if with_pyc:
+ src = unit.resolve_arc_path(path) or path
+ dst = path + uniq_suffix(path, unit)
+ unit.on_py_compile_bytecode([root_rel_path + '-', src, dst])
+ res += [dst + '.yapyc', '/py_code/' + mod]
+
+ unit.onresource(res)
+ add_python_lint_checks(unit, 2, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split())
+
+ use_vanilla_protoc = unit.get('USE_VANILLA_PROTOC') == 'yes'
+ if use_vanilla_protoc:
+ cpp_runtime_path = 'contrib/libs/protobuf_std'
+ py_runtime_path = 'contrib/python/protobuf_std'
+ builtin_proto_path = cpp_runtime_path + '/' + BUILTIN_PROTO
+ else:
+ cpp_runtime_path = 'contrib/libs/protobuf'
+ py_runtime_path = 'contrib/python/protobuf'
+ builtin_proto_path = cpp_runtime_path + '/' + BUILTIN_PROTO
+
+ if protos:
+ if not upath.startswith(py_runtime_path) and not upath.startswith(builtin_proto_path):
+ unit.onpeerdir(py_runtime_path)
+
+ unit.onpeerdir(unit.get("PY_PROTO_DEPS").split())
+
+ proto_paths = [path for path, mod in protos]
+ unit.on_generate_py_protos_internal(proto_paths)
+ unit.onpy_srcs([
+ pb2_arg(py_suf, path, mod, unit)
+ for path, mod in protos
+ for py_suf in unit.get("PY_PROTO_SUFFIXES").split()
+ ])
+
+ if optimize_proto and need_gazetteer_peerdir:
+ unit.onpeerdir(['kernel/gazetteer/proto'])
+
+ if evs:
+ unit.onpeerdir([cpp_runtime_path])
+ unit.on_generate_py_evs_internal([path for path, mod in evs])
+ unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs])
+
+ if fbss:
+ unit.onpeerdir(unit.get('_PY_FBS_DEPS').split())
+ pysrc_base_name = listid(fbss)
+ unit.onfbs_to_pysrc([pysrc_base_name] + [path for path, _ in fbss])
+ unit.onsrcs(['GLOBAL', '{}.fbs.pysrc'.format(pysrc_base_name)])
+
+
+def _check_test_srcs(*args):
+ used = set(args) & {"NAMESPACE", "TOP_LEVEL", "__main__.py"}
+ if used:
+ param = list(used)[0]
+ ymake.report_configure_error('in TEST_SRCS: you cannot use {} here - it would broke testing machinery'.format(param))
+
+
+def ontest_srcs(unit, *args):
+ _check_test_srcs(*args)
+ if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no':
+ unit.onpy_srcs(["NAMESPACE", "__tests__"] + list(args))
+
+
+def onpy_doctests(unit, *args):
+ """
+ @usage PY_DOCTEST(Packages...)
+
+ Add to the test doctests for specified Python packages
+ The packages should be part of a test (listed as sources of the test or its PEERDIRs).
+ """
+ if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no':
+ unit.onresource(['-', 'PY_DOCTEST_PACKAGES="{}"'.format(' '.join(args))])
+
+
+def py_register(unit, func, py3):
+ if py3:
+ unit.on_py3_register([func])
+ else:
+ unit.on_py_register([func])
+
+
+def onpy_register(unit, *args):
+ """
+ @usage: PY_REGISTER([package.]module_name)
+
+ Python knows about which built-ins can be imported, due to their registration in the Assembly or at the start of the interpreter.
+ All modules from the sources listed in PY_SRCS() are registered automatically.
+ To register the modules from the sources in the SRCS(), you need to use PY_REGISTER().
+
+ PY_REGISTER(module_name) initializes module globally via call to initmodule_name()
+ PY_REGISTER(package.module_name) initializes module in the specified package
+ It renames its init function with CFLAGS(-Dinitmodule_name=init7package11module_name)
+ or CFLAGS(-DPyInit_module_name=PyInit_7package11module_name)
+
+ Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#makrospyregister
+ """
+
+ py3 = is_py3(unit)
+
+ for name in args:
+ assert '=' not in name, name
+ py_register(unit, name, py3)
+ if '.' in name:
+ shortname = name.rsplit('.', 1)[1]
+ if py3:
+ unit.oncflags(['-DPyInit_{}=PyInit_{}'.format(shortname, mangle(name))])
+ else:
+ unit.oncflags(['-Dinit{}=init{}'.format(shortname, mangle(name))])
+
+
+def py_main(unit, arg):
+ if unit.get('IGNORE_PY_MAIN'):
+ return
+ unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL')
+ if unit_needs_main:
+ py_program(unit, is_py3(unit))
+ unit.onresource(['-', 'PY_MAIN={}'.format(arg)])
+
+
+def onpy_main(unit, arg):
+ """
+ @usage: PY_MAIN(package.module[:func])
+
+ Specifies the module or function from which to start executing a python program
+
+ Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipyprogrampy3programimakrospymain
+ """
+
+ arg = arg.replace('/', '.')
+
+ if ':' not in arg:
+ arg += ':main'
+
+ py_main(unit, arg)
+
+
+def onpy_constructor(unit, arg):
+ """
+ @usage: PY_CONSTRUCTOR(package.module[:func])
+
+ Specifies the module or function which will be started before python's main()
+ init() is expected in the target module if no function is specified
+ Can be considered as __attribute__((constructor)) for python
+ """
+ if ':' not in arg:
+ arg = arg + '=init'
+ else:
+ arg[arg.index(':')] = '='
+ unit.onresource(['-', 'py/constructors/{}'.format(arg)])
+
+def onpy_enums_serialization(unit, *args):
+ ns = ''
+ args = iter(args)
+ for arg in args:
+ # Namespace directives.
+ if arg == 'NAMESPACE':
+ ns = next(args)
+ else:
+ unit.on_py_enum_serialization_to_json(arg)
+ unit.on_py_enum_serialization_to_py(arg)
+ filename = arg.rsplit('.', 1)[0] + '.py'
+ if len(ns) != 0:
+ onpy_srcs(unit, 'NAMESPACE', ns, filename)
+ else:
+ onpy_srcs(unit, filename)
+
+def oncpp_enums_serialization(unit, *args):
+ args = iter(args)
+ for arg in args:
+ # Namespace directives.
+ if arg == 'NAMESPACE':
+ next(args)
+ else:
+ unit.ongenerate_enum_serialization_with_header(arg)
diff --git a/build/plugins/res.py b/build/plugins/res.py
new file mode 100644
index 0000000000..a937caba81
--- /dev/null
+++ b/build/plugins/res.py
@@ -0,0 +1,106 @@
+from _common import iterpair, listid, pathid, rootrel_arc_src, tobuilddir, filter_out_by_keyword
+
+
+def split(lst, limit):
+ # paths are specified with replaceable prefix
+ # real length is unknown at the moment, that why we use root_lenght
+ # as a rough estimation
+ root_lenght = 200
+ filepath = None
+ lenght = 0
+ bucket = []
+
+ for item in lst:
+ if filepath:
+ lenght += root_lenght + len(filepath) + len(item)
+ if lenght > limit and bucket:
+ yield bucket
+ bucket = []
+ lenght = 0
+
+ bucket.append(filepath)
+ bucket.append(item)
+ filepath = None
+ else:
+ filepath = item
+
+ if bucket:
+ yield bucket
+
+
+def remove_prefix(text, prefix):
+ if text.startswith(prefix):
+ return text[len(prefix):]
+ return text
+
+
+def onfat_resource(unit, *args):
+ unit.onpeerdir(['library/cpp/resource'])
+
+ # Since the maximum length of lpCommandLine string for CreateProcess is 8kb (windows) characters,
+ # we make several calls of rescompiler
+ # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx
+ for part_args in split(args, 8000):
+ output = listid(part_args) + '.cpp'
+ inputs = [x for x, y in iterpair(part_args) if x != '-']
+ if inputs:
+ inputs = ['IN'] + inputs
+
+ unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output])
+ unit.onsrcs(['GLOBAL', output])
+
+
+def onresource_files(unit, *args):
+ """
+ @usage: RESOURCE_FILES([DONT_PARSE] [PREFIX {prefix}] [STRIP prefix_to_strip] {path})
+
+ This macro expands into
+ RESOURCE([DONT_PARSE] {path} resfs/file/{prefix}{path}
+ - resfs/src/resfs/file/{prefix}{remove_prefix(path, prefix_to_strip)}={rootrel_arc_src(path)}
+ )
+
+ resfs/src/{key} stores a source root (or build root) relative path of the
+ source of the value of the {key} resource.
+
+ resfs/file/{key} stores any value whose source was a file on a filesystem.
+ resfs/src/resfs/file/{key} must store its path.
+
+ DONT_PARSE disables parsing for source code files (determined by extension)
+ Please don't abuse: use separate DONT_PARSE macro call only for files subject to parsing
+
+ This form is for use from other plugins:
+ RESOURCE_FILES([DEST {dest}] {path}) expands into RESOURCE({path} resfs/file/{dest})
+
+ @see: https://wiki.yandex-team.ru/devtools/commandsandvars/resourcefiles/
+ """
+ prefix = ''
+ prefix_to_strip = None
+ dest = None
+ res = []
+ first = 0
+
+ if args and not unit.enabled('_GO_MODULE'):
+ # GO_RESOURCE currently doesn't support DONT_PARSE
+ res.append('DONT_PARSE')
+
+ if args and args[0] == 'DONT_PARSE':
+ first = 1
+
+ args = iter(args[first:])
+ for arg in args:
+ if arg == 'PREFIX':
+ prefix, dest = next(args), None
+ elif arg == 'DEST':
+ dest, prefix = next(args), None
+ elif arg == 'STRIP':
+ prefix_to_strip = next(args)
+ else:
+ path = arg
+ key = 'resfs/file/' + (dest or (prefix + (path if not prefix_to_strip else remove_prefix(path, prefix_to_strip))))
+ src = 'resfs/src/{}={}'.format(key, rootrel_arc_src(path, unit))
+ res += ['-', src, path, key]
+
+ if unit.enabled('_GO_MODULE'):
+ unit.on_go_resource(res)
+ else:
+ unit.onresource(res)
diff --git a/build/plugins/rodata.py b/build/plugins/rodata.py
new file mode 100644
index 0000000000..3ecb0f9a83
--- /dev/null
+++ b/build/plugins/rodata.py
@@ -0,0 +1,168 @@
+import argparse
+import os
+
+import _common as common
+import _import_wrapper as iw
+
+
+class ROData(iw.CustomCommand):
+ def __init__(self, path, unit):
+ self._path = path
+ self._flags = []
+
+ prefix = unit.get('ASM_PREFIX')
+
+ if prefix:
+ self._flags += ['--prefix=' + prefix]
+
+ self._pre_include = []
+
+ flags = unit.get('YASM_FLAGS')
+ if flags:
+ self.parse_flags(path, unit, collections.deque(flags.split(' ')))
+
+ if unit.enabled('DARWIN') or unit.enabled('IOS'):
+ self._platform = ['DARWIN', 'UNIX']
+ self._fmt = 'macho'
+ elif unit.enabled('WIN64') or unit.enabled('CYGWIN'):
+ self._platform = ['WIN64']
+ self._fmt = 'win'
+ elif unit.enabled('WIN32'):
+ self._platform = ['WIN32']
+ self._fmt = 'win'
+ else:
+ self._platform = ['UNIX']
+ self._fmt = 'elf'
+
+ if 'elf' in self._fmt:
+ self._flags += ['-g', 'dwarf2']
+
+ self._fmt += unit.get('HARDWARE_ARCH')
+ self._type = unit.get('HARDWARE_TYPE')
+
+ if unit.enabled('DARWIN') or unit.enabled('IOS') or (unit.enabled('WINDOWS') and unit.enabled('ARCH_TYPE_32')):
+ self._prefix = '_'
+ else:
+ self._prefix = ''
+
+ def parse_flags(self, path, unit, flags):
+ while flags:
+ flag = flags.popleft()
+ if flag.startswith('-I'):
+ raise Exception('Use ADDINCL macro')
+
+ if flag.startswith('-P'):
+ preinclude = flag[2:] or flags.popleft()
+ self._pre_include += unit.resolve_include([(get_retargeted(path, unit)), preinclude])
+ self._flags += ['-P', preinclude]
+ continue
+
+ self._flags.append(flag)
+
+ def descr(self):
+ return 'AS', self._path, 'light-green'
+
+ def flags(self):
+ return self._flags + self._platform + [self._fmt, self._type]
+
+ def tools(self):
+ return ['contrib/tools/yasm']
+
+ def input(self):
+ return common.make_tuples(self._pre_include + [self._path])
+
+ def output(self):
+ return common.make_tuples([common.tobuilddir(common.stripext(self._path)) + '.o'])
+
+ def requested_vars(self):
+ return [('includes', '_ASM__INCLUDE')]
+
+ def run(self, extra_args, binary):
+ in_file = self.resolve_path(common.get(self.input, 0))
+ in_file_no_ext = common.stripext(in_file)
+ file_name = os.path.basename(in_file_no_ext)
+ file_size = os.path.getsize(in_file)
+ tmp_file = self.resolve_path(common.get(self.output, 0) + '.asm')
+
+ parser = argparse.ArgumentParser(prog='rodata.py', add_help=False)
+ parser.add_argument('--includes', help='module\'s addincls', nargs='*', required=False)
+ args = parser.parse_args(extra_args)
+ self._incl_dirs = args.includes
+
+ with open(tmp_file, 'w') as f:
+ f.write('global ' + self._prefix + file_name + '\n')
+ f.write('global ' + self._prefix + file_name + 'Size' + '\n')
+ f.write('SECTION .rodata ALIGN=16\n')
+ f.write(self._prefix + file_name + ':\nincbin "' + in_file + '"\n')
+ f.write('align 4, db 0\n')
+ f.write(self._prefix + file_name + 'Size:\ndd ' + str(file_size) + '\n')
+
+ if self._fmt.startswith('elf'):
+ f.write('size ' + self._prefix + file_name + ' ' + str(file_size) + '\n')
+ f.write('size ' + self._prefix + file_name + 'Size 4\n')
+
+ return self.do_run(binary, tmp_file)
+
+ def do_run(self, binary, path):
+ def plt():
+ for x in self._platform:
+ yield '-D'
+ yield x
+
+ def incls():
+ for x in self._incl_dirs:
+ yield '-I'
+ yield x
+
+ cmd = [binary, '-f', self._fmt] + list(plt()) + ['-D', '_' + self._type + '_', '-D_YASM_'] + self._flags + list(incls()) + ['-o', common.get(self.output, 0), path]
+ self.call(cmd)
+
+
+class RODataCXX(iw.CustomCommand):
+ def __init__(self, path, unit):
+ self._path = path
+ self._base = os.path.basename(common.stripext(self._path))
+
+ def descr(self):
+ return 'RD', self._path, 'light-green'
+
+ def input(self):
+ return common.make_tuples([self._path])
+
+ def main_out(self):
+ return common.tobuilddir(common.stripext(self._path)) + '.cpp'
+
+ def output(self):
+ return common.make_tuples([self.main_out()])
+
+ def run(self, extra_args, binary):
+ with open(self.resolve_path(self.main_out()), 'w') as f:
+ f.write('static_assert(sizeof(unsigned int) == 4, "ups, something gone wrong");\n\n')
+ f.write('extern "C" {\n')
+ f.write(' extern const unsigned char ' + self._base + '[] = {\n')
+
+ cnt = 0
+
+ with open(self.resolve_path(self._path), 'r') as input:
+ for ch in input.read():
+ f.write('0x%02x, ' % ord(ch))
+
+ cnt += 1
+
+ if cnt % 50 == 1:
+ f.write('\n')
+
+ f.write(' };\n')
+ f.write(' extern const unsigned int ' + self._base + 'Size = sizeof(' + self._base + ');\n')
+ f.write('}\n')
+
+
+def ro_data(path, unit):
+ if unit.enabled('ARCH_AARCH64') or unit.enabled('ARCH_ARM') or unit.enabled('ARCH_PPC64LE'):
+ return RODataCXX(path, unit)
+
+ return ROData(path, unit)
+
+
+def init():
+ iw.addrule('rodata', ro_data)
diff --git a/build/plugins/sandbox_registry.py b/build/plugins/sandbox_registry.py
new file mode 100644
index 0000000000..dc1be399b3
--- /dev/null
+++ b/build/plugins/sandbox_registry.py
@@ -0,0 +1,21 @@
+import os
+
+import ymake
+
+
+def onregister_sandbox_import(unit, *args):
+ args = iter(args)
+ for path in args:
+ path = os.path.normpath(path)
+ source = unit.resolve_arc_path(path)
+ abs_source = unit.resolve(source)
+ if not os.path.exists(abs_source):
+ ymake.report_configure_error('REGISTER_SANDBOX_IMPORT: File or directory {} does not exists'.format(path))
+ splited_path = path.split(os.sep)
+ l, r = 0, len(splited_path)
+ if splited_path[-1] == "__init__.py":
+ r -= 1
+ if not splited_path[0]:
+ l += 1
+ path = ".".join(splited_path[l:r])
+ unit.onresource(["-", "{}.{}={}".format("SANDBOX_TASK_REGISTRY", path, path)])
diff --git a/build/plugins/scarab_cant_clash.py b/build/plugins/scarab_cant_clash.py
new file mode 100644
index 0000000000..77dc303183
--- /dev/null
+++ b/build/plugins/scarab_cant_clash.py
@@ -0,0 +1,66 @@
+import _common as common
+
+
+def onacceleo(unit, *args):
+ if unit.get("YMAKE_JAVA_MODULES") == "yes":
+ return
+ flat, kv = common.sort_by_keywords(
+ {'XSD': -1, 'MTL': -1, 'MTL_ROOT': 1, 'LANG': -1, 'OUT': -1, 'OUT_NOAUTO': -1, 'OUTPUT_INCLUDES': -1, 'DEBUG': 0},
+ args
+ )
+
+ try:
+ mtlroot = kv['MTL_ROOT'][0]
+ except Exception:
+ mtlroot = unit.path().replace('$S/', '')
+
+ classpath = ['$SCARAB', ] # XXX special word for ya make to replace following paths with real classpath
+ classpath.append('tools/acceleo')
+
+ depends = []
+ if not unit.get('IDE_MSVS_CALL'):
+ for jar in classpath[1:]:
+ depends.append(jar)
+
+ classpath = ':'.join(classpath)
+
+ # Generate java cmd
+ cmd = [
+ '-classpath',
+ classpath,
+ '-Dfile.encoding=UTF-8',
+ 'ru.yandex.se.logsng.tool.Cli',
+ ]
+
+ for xsd in kv.get('XSD', []):
+ cmd += ['--xsd', xsd]
+
+ for mtl in kv.get('MTL', []):
+ cmd += ['--mtl', mtl]
+
+ for lang in kv.get('LANG', []):
+ cmd += ['--lang', lang]
+
+ cmd += ['--output-dir', unit.path().replace('$S/', '${ARCADIA_BUILD_ROOT}/')]
+ cmd += ['--build-root', '${ARCADIA_BUILD_ROOT}']
+ cmd += ['--source-root', '${ARCADIA_ROOT}']
+ cmd += ['--mtl-root', mtlroot]
+
+ # Generate RUN_JAVA args
+ run_java = cmd
+
+ if 'DEBUG' not in kv:
+ run_java += ['HIDE_OUTPUT']
+
+ inputs = kv.get('XSD', []) + kv.get('MTL', []) + kv.get('LANG', [])
+ if inputs:
+ run_java += ['IN'] + inputs
+
+ for k in 'OUT', 'OUT_NOAUTO', 'OUTPUT_INCLUDES':
+ if kv.get(k):
+ run_java += [k] + kv[k]
+
+ if depends:
+ run_java += ['TOOL'] + depends
+
+ unit.on_run_java(run_java)
diff --git a/build/plugins/split_codegen.py b/build/plugins/split_codegen.py
new file mode 100644
index 0000000000..f1e60bc142
--- /dev/null
+++ b/build/plugins/split_codegen.py
@@ -0,0 +1,43 @@
+from _common import sort_by_keywords
+
+# This hard-coded many times in CppParts in various codegens
+_DEFAULT_CPP_PARTS = 20
+# See TCodegenParams::MethodStream usage in factor codegen
+_ADDITIONAL_STREAM_COUNT = 5
+
+
+def onsplit_codegen(unit, *args):
+ '''
+ @usage: SPLIT_CODEGEN(tool prefix opts... [OUT_NUM num] [OUTPUT_INCLUDES output_includes...])
+
+ Generator of a certain number of parts of the .cpp file + one header .h file from .in
+
+ Supports keywords:
+ 1. OUT_NUM <the number of generated Prefix.N.cpp default 25 (N varies from 0 to 24)>
+ 2. OUTPUT_INCLUDES <path to files that will be included in generalnyj of macro files>
+ '''
+ keywords = {"OUT_NUM": 1}
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+
+ num_outputs = _DEFAULT_CPP_PARTS + _ADDITIONAL_STREAM_COUNT
+ if "OUT_NUM" in spec_args:
+ num_outputs = int(spec_args["OUT_NUM"][0])
+
+ tool = flat_args[0]
+ prefix = flat_args[1]
+
+ cmd = [tool, prefix, 'OUT']
+ for num in range(num_outputs):
+ cmd.append('{}.{}.cpp'.format(prefix, num))
+
+ cpp_parts = int(num_outputs) - _ADDITIONAL_STREAM_COUNT
+ cpp_parts_args = ['--cpp-parts', str(cpp_parts)]
+
+ if len(flat_args) > 2:
+ if flat_args[2] != 'OUTPUT_INCLUDES':
+ cmd.append('OPTS')
+ cmd += cpp_parts_args + flat_args[2:]
+ else:
+ cmd += ['OPTS'] + cpp_parts_args
+
+ unit.on_split_codegen_base(cmd)
diff --git a/build/plugins/ssqls.py b/build/plugins/ssqls.py
new file mode 100644
index 0000000000..618cbc11bc
--- /dev/null
+++ b/build/plugins/ssqls.py
@@ -0,0 +1,40 @@
+from os.path import splitext
+
+import _import_wrapper as iw
+from _common import resolve_includes
+
+
+class SSQLSParser(object):
+ def __init__(self, path, unit):
+ s = unit.resolve_arc_path(path)
+ assert s.startswith('$S/') and s.endswith('.ssqls'), s
+ h = '$B/' + s[3:-6] + '.h'
+
+ import xml.etree.cElementTree as ET
+ try:
+ doc = ET.parse(path)
+ except ET.ParseError as e:
+ unit.message(['error', 'malformed XML {}: {}'.format(path, e)])
+ doc = ET.Element('DbObject')
+ xmls, headers = self.parse_doc(doc)
+ self._includes = resolve_includes(unit, s, xmls)
+ self._induced = {'cpp': [h], 'h': resolve_includes(unit, h, headers)}
+
+ @staticmethod
+ def parse_doc(doc):
+ paths = lambda nodes: filter(None, (e.get('path') for e in nodes))
+ includes = doc.findall('include')
+ ancestors = paths(doc.findall('ancestors/ancestor'))
+ headers = [e.text.strip('<>""') for e in includes]
+ headers += [splitext(s)[0] + '.h' for s in ancestors]
+ return paths(includes) + ancestors, headers
+
+ def includes(self):
+ return self._includes
+
+ def induced_deps(self):
+ return self._induced
+
+
+def init():
+ iw.addparser('ssqls', SSQLSParser)
diff --git a/build/plugins/suppressions.py b/build/plugins/suppressions.py
new file mode 100644
index 0000000000..6f4a1b4f03
--- /dev/null
+++ b/build/plugins/suppressions.py
@@ -0,0 +1,19 @@
+def onsuppressions(unit, *args):
+ """
+ SUPPRESSIONS() - allows to specify files with suppression notation which will be used by
+ address, leak or thread sanitizer runtime by default.
+ Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer
+ and tsan.supp for thread sanitizer suppressions respectively.
+ See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks
+ for details.
+ """
+ import os
+
+ valid = ("asan.supp", "tsan.supp", "lsan.supp")
+
+ if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"):
+ for x in args:
+ if os.path.basename(x) not in valid:
+ unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)])
+ return
+ unit.onsrcs(["GLOBAL"] + list(args))
diff --git a/build/plugins/swig.py b/build/plugins/swig.py
new file mode 100644
index 0000000000..32a37204a6
--- /dev/null
+++ b/build/plugins/swig.py
@@ -0,0 +1,164 @@
+import os
+import posixpath
+import re
+
+import _import_wrapper as iw
+import _common as common
+
+
+def init():
+ iw.addrule('swg', Swig)
+
+
+class Swig(iw.CustomCommand):
+ def __init__(self, path, unit):
+ self._tool = unit.get('SWIG_TOOL')
+ self._library_dir = unit.get('SWIG_LIBRARY') or 'contrib/tools/swig/Lib'
+ self._local_swig = unit.get('USE_LOCAL_SWIG') == "yes"
+
+ self._path = path
+ self._flags = ['-cpperraswarn']
+
+ self._bindir = common.tobuilddir(unit.path())
+ self._input_name = common.stripext(os.path.basename(self._path))
+
+ relpath = os.path.relpath(os.path.dirname(self._path), unit.path())
+
+ self._swig_lang = unit.get('SWIG_LANG')
+
+ if self._swig_lang != 'jni_java':
+ self._main_out = os.path.join(
+ self._bindir,
+ '' if relpath == '.' else relpath.replace('..', '__'),
+ self._input_name + '_wrap.swg.c')
+
+ if not path.endswith('.c.swg'):
+ self._flags += ['-c++']
+ self._main_out += 'pp'
+
+ # lang_specific_incl_dir = 'perl5' if self._swig_lang == 'perl' else self._swig_lang
+ lang_specific_incl_dir = self._swig_lang
+ if self._swig_lang == 'perl':
+ lang_specific_incl_dir = 'perl5'
+ elif self._swig_lang in ['jni_cpp', 'jni_java']:
+ lang_specific_incl_dir = 'java'
+ incl_dirs = [
+ "FOR", "swig",
+ posixpath.join(self._library_dir, lang_specific_incl_dir),
+ "FOR", "swig",
+ self._library_dir
+ ]
+ self._incl_dirs = ['$S', '$B'] + [posixpath.join('$S', d) for d in incl_dirs]
+
+ modname = unit.get('REALPRJNAME')
+ self._flags.extend(['-module', modname])
+
+ if not self._local_swig:
+ unit.onaddincl(incl_dirs)
+
+ if self._swig_lang == 'python':
+ self._out_name = modname + '.py'
+ self._flags.extend(['-interface', unit.get('MODULE_PREFIX') + modname])
+
+ if self._swig_lang == 'perl':
+ self._out_name = modname + '.pm'
+ self._flags.append('-shadow')
+ unit.onpeerdir(['build/platform/perl'])
+
+ if self._swig_lang in ['jni_cpp', 'java']:
+ self._out_header = os.path.splitext(self._main_out)[0] + '.h'
+ if (not unit.get('USE_SYSTEM_JDK')) and (unit.get('OS_ANDROID') != "yes"):
+ unit.onpeerdir(['contrib/libs/jdk'])
+
+ self._package = 'ru.yandex.' + os.path.dirname(self._path).replace('$S/', '').replace('$B/', '').replace('/', '.').replace('-', '_')
+ if self._swig_lang in ['jni_java', 'java']:
+ self._out_name = os.path.splitext(os.path.basename(self._path))[0] + '.jsrc'
+ elif self._swig_lang != 'jni_cpp':
+ self._flags.append('-' + self._swig_lang)
+
+ def descr(self):
+ return 'SW', self._path, 'yellow'
+
+ def flags(self):
+ return self._flags
+
+ def tools(self):
+ return ['contrib/tools/swig'] if not self._tool else []
+
+ def input(self):
+ return [
+ (self._path, [])
+ ]
+
+ def output(self):
+ if self._swig_lang == 'jni_java':
+ return [(common.join_intl_paths(self._bindir, self._out_name), [])]
+ elif self._swig_lang == 'jni_cpp':
+ return [(self._main_out, []), (self._out_header, [])]
+
+ return [
+ (self._main_out, []),
+ (common.join_intl_paths(self._bindir, self._out_name), (['noauto', 'add_to_outs'] if self._swig_lang != 'java' else [])),
+ ] + ([(self._out_header, [])] if self._swig_lang == 'java' else [])
+
+ def output_includes(self):
+ return [(self._out_header, [])] if self._swig_lang in ['java', 'jni_cpp'] else []
+
+ def run(self, extra_args, binary):
+ if self._local_swig:
+ binary = self._tool
+ return self.do_run_java(binary, self._path) if self._swig_lang in ['java', 'jni_cpp', 'jni_java'] else self.do_run(binary, self._path)
+
+ def _incl_flags(self):
+ return ['-I' + self.resolve_path(x) for x in self._incl_dirs]
+
+ def do_run(self, binary, path):
+ self.call([binary] + self._flags + [
+ '-o', self.resolve_path(common.get(self.output, 0)),
+ '-outdir', self.resolve_path(self._bindir)
+ ] + self._incl_flags() + [self.resolve_path(path)])
+
+ def do_run_java(self, binary, path):
+ import tarfile
+
+ outdir = self.resolve_path(self._bindir)
+ if self._swig_lang != 'jni_cpp':
+ java_srcs_dir = os.path.join(outdir, self._package.replace('.', '/'))
+ if not os.path.exists(java_srcs_dir):
+ os.makedirs(java_srcs_dir)
+
+ flags = self._incl_flags()
+ src = self.resolve_path(path)
+ with open(src, 'r') as f:
+ if not re.search(r'(?m)^%module\b', f.read()):
+ flags += ['-module', os.path.splitext(os.path.basename(src))[0]]
+
+ if self._swig_lang == 'jni_cpp':
+ self.call([binary, '-c++', '-o', self._main_out, '-java', '-package', self._package] + flags + [src])
+ elif self._swig_lang == 'jni_java':
+ self.call([binary, '-c++', '-o', os.path.join(outdir, 'unused.cpp'), '-outdir', java_srcs_dir, '-java', '-package', self._package] + flags + [src])
+ elif self._swig_lang == 'java':
+ self.call([
+ binary, '-c++', '-o', self._main_out, '-outdir', java_srcs_dir,
+ '-java', '-package', self._package,
+ ] + flags + [src])
+
+ if self._swig_lang in ['jni_java', 'java']:
+ with tarfile.open(os.path.join(outdir, self._out_name), 'a') as tf:
+ tf.add(java_srcs_dir, arcname=self._package.replace('.', '/'))
+
+ if self._swig_lang in ['jni_cpp', 'java']:
+ header = os.path.splitext(self.resolve_path(self._main_out))[0] + '.h'
+ if not os.path.exists(header):
+ open(header, 'w').close()
+
+
+def on_swig_lang_filtered_srcs(unit, *args):
+ swig_lang = unit.get('SWIG_LANG')
+ allowed_exts = set()
+ if swig_lang == 'jni_cpp':
+ allowed_exts = set(['.cpp', '.swg'])
+ if swig_lang == 'jni_java':
+ allowed_exts = set(['.java', '.swg'])
+ args = [arg for arg in iter(args) if allowed_exts and os.path.splitext(arg)[1] in allowed_exts]
+ unit.onsrcs(args)
diff --git a/build/plugins/tests/test_code_generator.py b/build/plugins/tests/test_code_generator.py
new file mode 100644
index 0000000000..a675d9068c
--- /dev/null
+++ b/build/plugins/tests/test_code_generator.py
@@ -0,0 +1,16 @@
+from build.plugins import code_generator
+
+
+def test_include_parser():
+ template_file = """
+ @ from 'util/namespace.macro' import namespace, change_namespace, close_namespaces
+ @ import 'market/tools/code_generator/templates/serialization/json.macro' as json
+ @ import 'market/tools/code_generator/templates/serialization/request_parameters.macro' as rp
+ #include <sss/abcdefg.h>
+ #include<fff/asd>
+ #include "hhh/quququ.h"
+ """
+
+ includes, induced = code_generator.CodeGeneratorTemplateParser.parse_includes(template_file.split('\n'))
+ assert includes == ['util/namespace.macro', 'market/tools/code_generator/templates/serialization/json.macro', 'market/tools/code_generator/templates/serialization/request_parameters.macro']
+ assert induced == ['sss/abcdefg.h', 'fff/asd', 'hhh/quququ.h']
diff --git a/build/plugins/tests/test_common.py b/build/plugins/tests/test_common.py
new file mode 100644
index 0000000000..e1780354f8
--- /dev/null
+++ b/build/plugins/tests/test_common.py
@@ -0,0 +1,49 @@
+import pytest
+
+import build.plugins._common as pc
+
+
+def test_sort_by_keywords():
+ keywords = {'KEY1': 2, 'KEY2': 0, 'KEY3': 1}
+ args = 'aaaa bbbb KEY2 KEY1 kkk10 kkk11 ccc ddd KEY3 kkk3 eee'.split()
+ flat, spec = pc.sort_by_keywords(keywords, args)
+ assert flat == ['aaaa', 'bbbb', 'ccc', 'ddd', 'eee']
+ assert spec == {'KEY1': ['kkk10', 'kkk11'], 'KEY2': True, 'KEY3': ['kkk3']}
+
+ keywords = {'KEY1': 0, 'KEY2': 4}
+ args = 'aaaa KEY2 eee'.split()
+ flat, spec = pc.sort_by_keywords(keywords, args)
+ assert flat == ['aaaa']
+ assert spec == {'KEY2': ['eee']}
+
+ keywords = {'KEY1': 2, 'KEY2': 2}
+ args = 'KEY1 k10 KEY2 k20 KEY1 k11 KEY2 k21 KEY1 k13'.split()
+ flat, spec = pc.sort_by_keywords(keywords, args)
+ assert flat == []
+ assert spec == {'KEY1': ['k10', 'k11', 'k13'], 'KEY2': ['k20', 'k21']}
+
+
+def test_filter_out_by_keyword():
+ assert pc.filter_out_by_keyword([], 'A') == []
+ assert pc.filter_out_by_keyword(['x'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A', 'B'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A', 'B', 'y'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'y'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A'], 'A') == ['x']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y', 'A'], 'A') == ['x', 'y']
+ assert pc.filter_out_by_keyword(['x', 'A', 'A', 'A', 'B', 'y', 'A', 'F', 'z'], 'A') == ['x', 'y', 'z']
+
+
+test_data = [
+ [[1, 2, 3], 1, [[1], [2], [3]]],
+ [[1, 2, 3], 2, [[1, 2], [3]]],
+ [[1, 2, 3, 4], 2, [[1, 2], [3, 4]]],
+ [[1], 5, [[1]]],
+]
+
+
+@pytest.mark.parametrize('lst, chunk_size, expected', test_data, ids=[str(num + 1) for num in range(len(test_data))])
+def test_generate_chunks(lst, chunk_size, expected):
+ assert list(pc.generate_chunks(lst, chunk_size)) == expected
diff --git a/build/plugins/tests/test_requirements.py b/build/plugins/tests/test_requirements.py
new file mode 100644
index 0000000000..24d57ac901
--- /dev/null
+++ b/build/plugins/tests/test_requirements.py
@@ -0,0 +1,52 @@
+import pytest
+
+import build.plugins._requirements as requirements
+import build.plugins._test_const as consts
+
+
+class TestRequirements(object):
+ @pytest.mark.parametrize('test_size', consts.TestSize.sizes())
+ def test_cpu(self, test_size):
+ max_cpu = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Cpu)
+ min_cpu = consts.TestRequirementsConstants.MinCpu
+ assert requirements.check_cpu(-1, test_size)
+ assert requirements.check_cpu(min_cpu - 1, test_size)
+ assert requirements.check_cpu("unknown", test_size)
+ assert not requirements.check_cpu(1, test_size)
+ assert not requirements.check_cpu(3, test_size)
+ assert requirements.check_cpu(1000, test_size)
+ if max_cpu != consts.TestRequirementsConstants.All:
+ assert requirements.check_cpu(max_cpu + 1, test_size)
+ assert requirements.check_cpu(max_cpu + 4, test_size)
+ assert requirements.check_cpu(consts.TestRequirementsConstants.All, test_size)
+ else:
+ assert not requirements.check_cpu(consts.TestRequirementsConstants.All, test_size)
+
+ @pytest.mark.parametrize('test_size', consts.TestSize.sizes())
+ def test_ram(self, test_size):
+ max_ram = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.Ram)
+ min_ram = consts.TestRequirementsConstants.MinRam
+ assert requirements.check_ram(-1, test_size)
+ assert requirements.check_ram(min_ram - 1, test_size)
+ assert requirements.check_ram(max_ram + 1, test_size)
+ assert not requirements.check_ram(1, test_size)
+ assert not requirements.check_ram(4, test_size)
+ assert not requirements.check_ram(5, test_size)
+ assert not requirements.check_ram(32, consts.TestSize.Large)
+ assert requirements.check_ram(48, consts.TestSize.Large)
+
+ assert not requirements.check_ram(1, test_size, is_kvm=True)
+ assert not requirements.check_ram(4, test_size, is_kvm=True)
+ assert not requirements.check_ram(16, test_size, is_kvm=True)
+ assert requirements.check_ram(32, test_size, is_kvm=True)
+
+ @pytest.mark.parametrize('test_size', consts.TestSize.sizes())
+ def test_ram_disk(self, test_size):
+ max_ram_disk = consts.TestSize.get_max_requirements(test_size).get(consts.TestRequirements.RamDisk)
+ min_ram_disk = consts.TestRequirementsConstants.MinRamDisk
+ assert requirements.check_ram_disk(-1, test_size)
+ assert requirements.check_ram_disk(min_ram_disk - 1, test_size)
+ assert requirements.check_ram_disk(max_ram_disk + 1, test_size)
+ assert requirements.check_ram_disk(8, test_size)
+ assert not requirements.check_ram_disk(1, test_size)
+ assert not requirements.check_ram_disk(4, test_size)
diff --git a/build/plugins/tests/test_ssqls.py b/build/plugins/tests/test_ssqls.py
new file mode 100644
index 0000000000..2a1d032109
--- /dev/null
+++ b/build/plugins/tests/test_ssqls.py
@@ -0,0 +1,23 @@
+import xml.etree.cElementTree as ET
+
+from build.plugins import ssqls
+
+
+example = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<DbObject>
+ <include path="A.ssqls">&lt;a.h&gt;</include>
+ <include>"b.h"</include>
+
+ <ancestors>
+ <ancestor path="C.ssqls"/>
+ </ancestors>
+</DbObject>
+'''
+
+
+def test_include_parser():
+ doc = ET.fromstring(example)
+ xmls, headers = ssqls.SSQLSParser.parse_doc(doc)
+ assert headers == ['a.h', 'b.h', 'C.h']
+ assert xmls == ['A.ssqls', 'C.ssqls']
diff --git a/build/plugins/tests/ya.make b/build/plugins/tests/ya.make
new file mode 100644
index 0000000000..87228b98df
--- /dev/null
+++ b/build/plugins/tests/ya.make
@@ -0,0 +1,16 @@
+PY2TEST()
+
+OWNER(g:yatool)
+
+PEERDIR(
+ build/plugins
+)
+
+TEST_SRCS(
+ test_code_generator.py
+ test_common.py
+ test_requirements.py
+ test_ssqls.py
+)
+
+END()
diff --git a/build/plugins/xsyn.py b/build/plugins/xsyn.py
new file mode 100644
index 0000000000..ab7c1639db
--- /dev/null
+++ b/build/plugins/xsyn.py
@@ -0,0 +1,34 @@
+import _import_wrapper as iw
+import _common as common
+
+
+class Xsyn(iw.CustomCommand):
+
+ def __init__(self, path, unit):
+ self._path = path
+
+ def descr(self):
+ return 'XN', self._path, 'yellow'
+
+ def tools(self):
+ return []
+
+ def input(self):
+ return common.make_tuples([
+ '$S/library/cpp/xml/parslib/xsyn2ragel.py',
+ self._path,
+ '$S/library/cpp/xml/parslib/xmlpars.xh'
+ ])
+
+ def output(self):
+ return common.make_tuples([
+ common.tobuilddir(self._path + '.h.rl5')
+ ])
+
+ def run(self, extra_args, interpeter):
+ self.call(interpeter + [self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)),
+ self.resolve_path(common.get(self.input, 2)), 'dontuse'], stdout=common.get(self.output, 0))
+
+
+def init():
+ iw.addrule('xsyn', Xsyn)
diff --git a/build/plugins/ya.make b/build/plugins/ya.make
new file mode 100644
index 0000000000..4ad5f5988e
--- /dev/null
+++ b/build/plugins/ya.make
@@ -0,0 +1,23 @@
+OWNER(g:ymake)
+
+PY2_LIBRARY()
+
+PY_SRCS(
+ code_generator.py
+ ssqls.py
+ swig.py
+
+ _common.py
+ _custom_command.py
+ _import_wrapper.py
+ _requirements.py
+ _test_const.py
+)
+
+PEERDIR(build/plugins/lib)
+
+END()
+
+RECURSE(
+ tests
+)
diff --git a/build/plugins/yql_python_udf.py b/build/plugins/yql_python_udf.py
new file mode 100644
index 0000000000..c4f949d8a9
--- /dev/null
+++ b/build/plugins/yql_python_udf.py
@@ -0,0 +1,55 @@
+from _common import sort_by_keywords
+
+
+def get_or_default(kv, name, default):
+ if name in kv:
+ return kv[name][0]
+ return default
+
+
+def onregister_yql_python_udf(unit, *args):
+ flat, kv = sort_by_keywords({'NAME': 1, 'RESOURCE_NAME': 1, 'ADD_LIBRA_MODULES': 1}, args)
+ assert len(flat) == 0
+ name = get_or_default(kv, 'NAME', 'CustomPython')
+ resource_name = get_or_default(kv, 'RESOURCE_NAME', name)
+ add_libra_modules = get_or_default(kv, 'ADD_LIBRA_MODULES', 'no') == 'yes'
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == 'yes'
+ py3 = unit.get('PYTHON3') == 'yes'
+
+ unit.onyql_abi_version(['2', '9', '0'])
+ unit.onpeerdir(['yql/udfs/common/python/python_udf'])
+ unit.onpeerdir(['ydb/library/yql/public/udf'])
+
+ if add_libra_modules:
+ unit.onpeerdir(['quality/user_sessions/libra_arc/noyql'])
+ unit.onpeerdir(['yql/udfs/quality/libra/module'])
+
+ if use_arcadia_python:
+ flavor = 'Arcadia'
+ unit.onpeerdir([
+ 'library/python/runtime',
+ 'yql/udfs/common/python/main'
+ ] if not py3 else [
+ 'library/python/runtime_py3',
+ 'yql/udfs/common/python/main_py3'
+ ])
+ else:
+ flavor = 'System'
+
+ output_includes = [
+ 'yql/udfs/common/python/python_udf/python_udf.h',
+ 'ydb/library/yql/public/udf/udf_registrator.h',
+ ]
+ if add_libra_modules:
+ output_includes.append('yql/udfs/quality/libra/module/module.h')
+
+ path = name + '.yql_python_udf.cpp'
+ libra_flag = '1' if add_libra_modules else '0'
+ unit.onpython([
+ 'build/scripts/gen_yql_python_udf.py',
+ flavor, name, resource_name, path, libra_flag,
+ 'OUT', path,
+ 'OUTPUT_INCLUDES',
+ ] + output_includes
+ )
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
new file mode 100644
index 0000000000..8970837f0f
--- /dev/null
+++ b/build/plugins/ytest.py
@@ -0,0 +1,1113 @@
+import os
+import re
+import sys
+import json
+import copy
+import base64
+import shlex
+import _common
+import lib._metric_resolvers as mr
+import _test_const as consts
+import _requirements as reqs
+import StringIO
+import subprocess
+import collections
+
+import ymake
+
+
+MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
+MDS_SHEME = 'mds'
+CANON_DATA_DIR_NAME = 'canondata'
+CANON_OUTPUT_STORAGE = 'canondata_storage'
+CANON_RESULT_FILE_NAME = 'result.json'
+CANON_MDS_RESOURCE_REGEX = re.compile(re.escape(MDS_URI_PREFIX) + r'(.*?)($|#)')
+CANON_SB_VAULT_REGEX = re.compile(r"\w+=(value|file):[-\w]+:\w+")
+CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
+
+VALID_NETWORK_REQUIREMENTS = ("full", "restricted")
+VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
+BLOCK_SEPARATOR = '============================================================='
+SPLIT_FACTOR_MAX_VALUE = 1000
+SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
+PARTITION_MODS = ('SEQUENTIAL', 'MODULO')
+DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml"
+DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json"
+PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json"
+
+
+tidy_config_map = None
+
+def ontest_data(unit, *args):
+ ymake.report_configure_error("TEST_DATA is removed in favour of DATA")
+
+
+def save_in_file(filepath, data):
+ if filepath:
+ with open(filepath, 'a') as file_handler:
+ if os.stat(filepath).st_size == 0:
+ print >>file_handler, BLOCK_SEPARATOR
+ print >> file_handler, data
+
+
+def prepare_recipes(data):
+ data = data.replace('"USE_RECIPE_DELIM"', "\n")
+ data = data.replace("$TEST_RECIPES_VALUE", "")
+ return base64.b64encode(data or "")
+
+
+def prepare_env(data):
+ data = data.replace("$TEST_ENV_VALUE", "")
+ return serialize_list(shlex.split(data))
+
+
+def is_yt_spec_contain_pool_info(filename): # XXX switch to yson in ymake + perf test for configure
+ pool_re = re.compile(r"""['"]*pool['"]*\s*?=""")
+ cypress_root_re = re.compile(r"""['"]*cypress_root['"]*\s*=""")
+ with open(filename, 'r') as afile:
+ yt_spec = afile.read()
+ return pool_re.search(yt_spec) and cypress_root_re.search(yt_spec)
+
+
+def validate_sb_vault(name, value):
+ if not CANON_SB_VAULT_REGEX.match(value):
+ return "sb_vault value '{}' should follow pattern <ENV_NAME>=:<value|file>:<owner>:<vault key>".format(value)
+
+
+def validate_numerical_requirement(name, value):
+ if mr.resolve_value(value) is None:
+ return "Cannot convert [[imp]]{}[[rst]] to the proper [[imp]]{}[[rst]] requirement value".format(value, name)
+
+
+def validate_choice_requirement(name, val, valid):
+ if val not in valid:
+ return "Unknown [[imp]]{}[[rst]] requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(name, val, ", ".join(valid))
+
+
+def validate_force_sandbox_requirement(name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, check_func):
+ if is_force_sandbox or not in_autocheck or is_fuzzing or is_ytexec_run:
+ if value == 'all':
+ return
+ return validate_numerical_requirement(name, value)
+ error_msg = validate_numerical_requirement(name, value)
+ if error_msg:
+ return error_msg
+ return check_func(mr.resolve_value(value), test_size, is_kvm)
+
+
+# TODO: Remove is_kvm param when there will be guarantees on RAM
+def validate_requirement(req_name, value, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run):
+ req_checks = {
+ 'container': validate_numerical_requirement,
+ 'cpu': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_cpu),
+ 'disk_usage': validate_numerical_requirement,
+ 'dns': lambda n, v: validate_choice_requirement(n, v, VALID_DNS_REQUIREMENTS),
+ 'kvm': None,
+ 'network': lambda n, v: validate_choice_requirement(n, v, VALID_NETWORK_REQUIREMENTS),
+ 'ram': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram),
+ 'ram_disk': lambda n, v: validate_force_sandbox_requirement(n, v, test_size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run, reqs.check_ram_disk),
+ 'sb': None,
+ 'sb_vault': validate_sb_vault,
+ }
+
+ if req_name not in req_checks:
+ return "Unknown requirement: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(req_name, ", ".join(sorted(req_checks)))
+
+ if req_name in ('container', 'disk') and not is_force_sandbox:
+ return "Only [[imp]]LARGE[[rst]] tests without [[imp]]ya:force_distbuild[[rst]] tag can have [[imp]]{}[[rst]] requirement".format(req_name)
+
+ check_func = req_checks[req_name]
+ if check_func:
+ return check_func(req_name, value)
+
+
+def validate_test(unit, kw):
+ def get_list(key):
+ return deserialize_list(kw.get(key, ""))
+
+ valid_kw = copy.deepcopy(kw)
+ errors = []
+ warnings = []
+
+ if valid_kw.get('SCRIPT-REL-PATH') == 'boost.test':
+ project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
+ if not project_path.startswith(("contrib", "mail", "maps", "tools/idl", "metrika", "devtools", "mds", "yandex_io", "smart_devices")):
+ errors.append("BOOSTTEST is not allowed here")
+ elif valid_kw.get('SCRIPT-REL-PATH') == 'gtest':
+ project_path = valid_kw.get('BUILD-FOLDER-PATH', "")
+ if not project_path.startswith(("contrib", "devtools", "mail", "mds", "taxi")):
+ errors.append("GTEST_UGLY is not allowed here, use GTEST instead")
+
+ size_timeout = collections.OrderedDict(sorted(consts.TestSize.DefaultTimeouts.items(), key=lambda t: t[1]))
+
+ size = valid_kw.get('SIZE', consts.TestSize.Small).lower()
+ # TODO: use set instead list
+ tags = get_list("TAG")
+ requirements_orig = get_list("REQUIREMENTS")
+ in_autocheck = "ya:not_autocheck" not in tags and 'ya:manual' not in tags
+ is_fat = 'ya:fat' in tags
+ is_force_sandbox = 'ya:force_distbuild' not in tags and is_fat
+ is_ytexec_run = 'ya:yt' in tags
+ is_fuzzing = valid_kw.get("FUZZING", False)
+ is_kvm = 'kvm' in requirements_orig
+ requirements = {}
+ list_requirements = ('sb_vault')
+ for req in requirements_orig:
+ if req in ('kvm', ):
+ requirements[req] = str(True)
+ continue
+
+ if ":" in req:
+ req_name, req_value = req.split(":", 1)
+ if req_name in list_requirements:
+ requirements[req_name] = ",".join(filter(None, [requirements.get(req_name), req_value]))
+ else:
+ if req_name in requirements:
+ if req_value in ["0"]:
+ warnings.append("Requirement [[imp]]{}[[rst]] is dropped [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ del requirements[req_name]
+ elif requirements[req_name] != req_value:
+ warnings.append("Requirement [[imp]]{}[[rst]] is redefined [[imp]]{}[[rst]] -> [[imp]]{}[[rst]]".format(req_name, requirements[req_name], req_value))
+ requirements[req_name] = req_value
+ else:
+ requirements[req_name] = req_value
+ else:
+ errors.append("Invalid requirement syntax [[imp]]{}[[rst]]: expect <requirement>:<value>".format(req))
+
+ if not errors:
+ for req_name, req_value in requirements.items():
+ error_msg = validate_requirement(req_name, req_value, size, is_force_sandbox, in_autocheck, is_fuzzing, is_kvm, is_ytexec_run)
+ if error_msg:
+ errors += [error_msg]
+
+ invalid_requirements_for_distbuild = [requirement for requirement in requirements.keys() if requirement not in ('ram', 'ram_disk', 'cpu', 'network')]
+ sb_tags = [tag for tag in tags if tag.startswith('sb:')]
+
+ if is_fat:
+ if size != consts.TestSize.Large:
+ errors.append("Only LARGE test may have ya:fat tag")
+
+ if in_autocheck and not is_force_sandbox:
+ if invalid_requirements_for_distbuild:
+ errors.append("'{}' REQUIREMENTS options can be used only for FAT tests without ya:force_distbuild tag. Remove TAG(ya:force_distbuild) or an option.".format(invalid_requirements_for_distbuild))
+ if sb_tags:
+ errors.append("You can set sandbox tags '{}' only for FAT tests without ya:force_distbuild. Remove TAG(ya:force_sandbox) or sandbox tags.".format(sb_tags))
+ if 'ya:sandbox_coverage' in tags:
+ errors.append("You can set 'ya:sandbox_coverage' tag only for FAT tests without ya:force_distbuild.")
+ else:
+ if is_force_sandbox:
+ errors.append('ya:force_sandbox can be used with LARGE tests only')
+ if 'ya:nofuse' in tags:
+ errors.append('ya:nofuse can be used with LARGE tests only')
+ if 'ya:privileged' in tags:
+ errors.append("ya:privileged can be used with LARGE tests only")
+ if in_autocheck and size == consts.TestSize.Large:
+ errors.append("LARGE test must have ya:fat tag")
+
+ if 'ya:privileged' in tags and 'container' not in requirements:
+ errors.append("Only tests with 'container' requirement can have 'ya:privileged' tag")
+
+ if size not in size_timeout:
+ errors.append("Unknown test size: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]".format(size.upper(), ", ".join([sz.upper() for sz in size_timeout.keys()])))
+ else:
+ try:
+ timeout = int(valid_kw.get('TEST-TIMEOUT', size_timeout[size]) or size_timeout[size])
+ script_rel_path = valid_kw.get('SCRIPT-REL-PATH')
+ if timeout < 0:
+ raise Exception("Timeout must be > 0")
+ if size_timeout[size] < timeout and in_autocheck and script_rel_path != 'java.style':
+ suggested_size = None
+ for s, t in size_timeout.items():
+ if timeout <= t:
+ suggested_size = s
+ break
+
+ if suggested_size:
+ suggested_size = ", suggested size: [[imp]]{}[[rst]]".format(suggested_size.upper())
+ else:
+ suggested_size = ""
+ errors.append("Max allowed timeout for test size [[imp]]{}[[rst]] is [[imp]]{} sec[[rst]]{}".format(size.upper(), size_timeout[size], suggested_size))
+ except Exception as e:
+ errors.append("Error when parsing test timeout: [[bad]]{}[[rst]]".format(e))
+
+ requiremtens_list = []
+ for req_name, req_value in requirements.iteritems():
+ requiremtens_list.append(req_name + ":" + req_value)
+ valid_kw['REQUIREMENTS'] = serialize_list(requiremtens_list)
+
+ if valid_kw.get("FUZZ-OPTS"):
+ for option in get_list("FUZZ-OPTS"):
+ if not option.startswith("-"):
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
+ break
+ eqpos = option.find("=")
+ if eqpos == -1 or len(option) == eqpos + 1:
+ errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
+ break
+ if option[eqpos - 1] == " " or option[eqpos + 1] == " ":
+ errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
+ break
+ if option[:eqpos] in ("-runs", "-dict", "-jobs", "-workers", "-artifact_prefix", "-print_final_stats"):
+ errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
+ break
+
+ if valid_kw.get("YT-SPEC"):
+ if not is_ytexec_run:
+ errors.append("You can use YT_SPEC macro only tests marked with ya:yt tag")
+ else:
+ for filename in get_list("YT-SPEC"):
+ filename = unit.resolve('$S/' + filename)
+ if not os.path.exists(filename):
+ errors.append("File '{}' specified in the YT_SPEC macro doesn't exist".format(filename))
+ continue
+ if is_yt_spec_contain_pool_info(filename) and "ya:external" not in tags:
+ tags.append("ya:external")
+ tags.append("ya:yt_research_pool")
+
+ if valid_kw.get("USE_ARCADIA_PYTHON") == "yes" and valid_kw.get("SCRIPT-REL-PATH") == "py.test":
+ errors.append("PYTEST_SCRIPT is deprecated")
+
+ partition = valid_kw.get('TEST_PARTITION', 'SEQUENTIAL')
+ if partition not in PARTITION_MODS:
+ raise ValueError('partition mode should be one of {}, detected: {}'.format(PARTITION_MODS, partition))
+
+ if valid_kw.get('SPLIT-FACTOR'):
+ if valid_kw.get('FORK-MODE') == 'none':
+ errors.append('SPLIT_FACTOR must be use with FORK_TESTS() or FORK_SUBTESTS() macro')
+
+ value = 1
+ try:
+ value = int(valid_kw.get('SPLIT-FACTOR'))
+ if value <= 0:
+ raise ValueError("must be > 0")
+ if value > SPLIT_FACTOR_MAX_VALUE:
+ raise ValueError("the maximum allowed value is {}".format(SPLIT_FACTOR_MAX_VALUE))
+ except ValueError as e:
+ errors.append('Incorrect SPLIT_FACTOR value: {}'.format(e))
+
+ if valid_kw.get('FORK-TEST-FILES') and size != consts.TestSize.Large:
+ nfiles = count_entries(valid_kw.get('TEST-FILES'))
+ if nfiles * value > SPLIT_FACTOR_TEST_FILES_MAX_VALUE:
+ errors.append('Too much chunks generated:{} (limit: {}). Remove FORK_TEST_FILES() macro or reduce SPLIT_FACTOR({}).'.format(
+ nfiles * value, SPLIT_FACTOR_TEST_FILES_MAX_VALUE, value))
+
+ unit_path = get_norm_unit_path(unit)
+ if not is_fat and "ya:noretries" in tags and not is_ytexec_run \
+ and not unit_path.startswith("devtools/") \
+ and not unit_path.startswith("infra/kernel/") \
+ and not unit_path.startswith("yt/python/yt") \
+ and not unit_path.startswith("infra/yp_dns_api/tests") \
+ and not unit_path.startswith("yp/tests"):
+ errors.append("Only LARGE tests can have 'ya:noretries' tag")
+
+ if errors:
+ return None, warnings, errors
+
+ return valid_kw, warnings, errors
+
+
+def get_norm_unit_path(unit, extra=None):
+ path = _common.strip_roots(unit.path())
+ if extra:
+ return '{}/{}'.format(path, extra)
+ return path
+
+
+def dump_test(unit, kw):
+ valid_kw, warnings, errors = validate_test(unit, kw)
+ for w in warnings:
+ unit.message(['warn', w])
+ for e in errors:
+ ymake.report_configure_error(e)
+ if valid_kw is None:
+ return None
+ string_handler = StringIO.StringIO()
+ for k, v in valid_kw.iteritems():
+ print >>string_handler, k + ': ' + v
+ print >>string_handler, BLOCK_SEPARATOR
+ data = string_handler.getvalue()
+ string_handler.close()
+ return data
+
+
+def serialize_list(lst):
+ lst = filter(None, lst)
+ return '\"' + ';'.join(lst) + '\"' if lst else ''
+
+
+def deserialize_list(val):
+ return filter(None, val.replace('"', "").split(";"))
+
+
+def count_entries(x):
+ # see (de)serialize_list
+ assert x is None or isinstance(x, str), type(x)
+ if not x:
+ return 0
+ return x.count(";") + 1
+
+
+def get_values_list(unit, key):
+ res = map(str.strip, (unit.get(key) or '').replace('$' + key, '').strip().split())
+ return [r for r in res if r and r not in ['""', "''"]]
+
+
+def get_norm_paths(unit, key):
+ # return paths without trailing (back)slash
+ return [x.rstrip('\\/') for x in get_values_list(unit, key)]
+
+
+def get_unit_list_variable(unit, name):
+ items = unit.get(name)
+ if items:
+ items = items.split(' ')
+ assert items[0] == "${}".format(name), (items, name)
+ return items[1:]
+ return []
+
+
+def implies(a, b):
+ return bool((not a) or b)
+
+
+def match_coverage_extractor_requirements(unit):
+ # we shouldn't add test if
+ return all([
+ # tests are not requested
+ unit.get("TESTS_REQUESTED") == "yes",
+ # build doesn't imply clang coverage, which supports segment extraction from the binaries
+ unit.get("CLANG_COVERAGE") == "yes",
+ # contrib wasn't requested
+ implies(get_norm_unit_path(unit).startswith("contrib/"), unit.get("ENABLE_CONTRIB_COVERAGE") == "yes"),
+ ])
+
+
+def get_tidy_config_map(unit):
+ global tidy_config_map
+ if tidy_config_map is None:
+ config_map_path = unit.resolve(os.path.join("$S", PROJECT_TIDY_CONFIG_MAP_PATH))
+ with open(config_map_path, 'r') as afile:
+ tidy_config_map = json.load(afile)
+ return tidy_config_map
+
+
+def get_default_tidy_config(unit):
+ unit_path = get_norm_unit_path(unit)
+ default_config_map_path = unit.resolve(os.path.join("$S", DEFAULT_TIDY_CONFIG_MAP_PATH))
+ with open(default_config_map_path, 'r') as afile:
+ tidy_default_config_map = json.load(afile)
+ for project_prefix, config_path in tidy_default_config_map.items():
+ if unit_path.startswith(project_prefix):
+ return config_path
+ return DEFAULT_TIDY_CONFIG
+
+
+def get_project_tidy_config(unit):
+ tidy_map = get_tidy_config_map(unit)
+ unit_path = get_norm_unit_path(unit)
+
+ for project_prefix, config_path in tidy_map.items():
+ if unit_path.startswith(project_prefix):
+ return config_path
+ else:
+ return get_default_tidy_config(unit)
+
+
+def onadd_ytest(unit, *args):
+ keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
+ "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
+ flat_args, spec_args = _common.sort_by_keywords(keywords, args)
+
+ test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
+
+ if flat_args[1] == "fuzz.test":
+ unit.ondata("arcadia/fuzzing/{}/corpus.json".format(get_norm_unit_path(unit)))
+ elif flat_args[1] == "go.test":
+ data, _ = get_canonical_test_resources(unit)
+ test_data += data
+ elif flat_args[1] == "coverage.extractor" and not match_coverage_extractor_requirements(unit):
+ # XXX
+ # Current ymake implementation doesn't allow to call macro inside the 'when' body
+ # that's why we add ADD_YTEST(coverage.extractor) to every PROGRAM entry and check requirements later
+ return
+ elif flat_args[1] == "clang_tidy" and unit.get("TIDY") != "yes":
+ # Graph is not prepared
+ return
+ elif flat_args[1] == "no.test":
+ return
+ test_size = ''.join(spec_args.get('SIZE', [])) or unit.get('TEST_SIZE_NAME') or ''
+ test_tags = serialize_list(_get_test_tags(unit, spec_args))
+ test_timeout = ''.join(spec_args.get('TIMEOUT', [])) or unit.get('TEST_TIMEOUT') or ''
+ test_requirements = spec_args.get('REQUIREMENTS', []) + get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+
+ if flat_args[1] != "clang_tidy" and unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ if flat_args[1] in ("unittest.py", "gunittest", "g_benchmark"):
+ flat_args[1] = "clang_tidy"
+ test_size = 'SMALL'
+ test_tags = ''
+ test_timeout = "60"
+ test_requirements = []
+ unit.set(["TEST_YT_SPEC_VALUE", ""])
+ else:
+ return
+
+ if flat_args[1] == "clang_tidy" and unit.get("TIDY") == "yes":
+ if unit.get("TIDY_CONFIG"):
+ default_config_path = unit.get("TIDY_CONFIG")
+ project_config_path = unit.get("TIDY_CONFIG")
+ else:
+ default_config_path = get_default_tidy_config(unit)
+ project_config_path = get_project_tidy_config(unit)
+
+ unit.set(["DEFAULT_TIDY_CONFIG", default_config_path])
+ unit.set(["PROJECT_TIDY_CONFIG", project_config_path])
+
+ fork_mode = []
+ if 'FORK_SUBTESTS' in spec_args:
+ fork_mode.append('subtests')
+ if 'FORK_TESTS' in spec_args:
+ fork_mode.append('tests')
+ fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
+ fork_mode = ' '.join(fork_mode) if fork_mode else ''
+
+ unit_path = get_norm_unit_path(unit)
+
+ test_record = {
+ 'TEST-NAME': flat_args[0],
+ 'SCRIPT-REL-PATH': flat_args[1],
+ 'TESTED-PROJECT-NAME': unit.name(),
+ 'TESTED-PROJECT-FILENAME': unit.filename(),
+ 'SOURCE-FOLDER-PATH': unit_path,
+ # TODO get rid of BUILD-FOLDER-PATH
+ 'BUILD-FOLDER-PATH': unit_path,
+ 'BINARY-PATH': "{}/{}".format(unit_path, unit.filename()),
+ 'GLOBAL-LIBRARY-PATH': unit.global_filename(),
+ 'CUSTOM-DEPENDENCIES': ' '.join(spec_args.get('DEPENDS', []) + get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ # 'TEST-PRESERVE-ENV': 'da',
+ 'TEST-DATA': serialize_list(test_data),
+ 'TEST-TIMEOUT': test_timeout,
+ 'FORK-MODE': fork_mode,
+ 'SPLIT-FACTOR': ''.join(spec_args.get('SPLIT_FACTOR', [])) or unit.get('TEST_SPLIT_FACTOR') or '',
+ 'SIZE': test_size,
+ 'TAG': test_tags,
+ 'REQUIREMENTS': serialize_list(test_requirements),
+ 'TEST-CWD': unit.get('TEST_CWD_VALUE') or '',
+ 'FUZZ-DICTS': serialize_list(spec_args.get('FUZZ_DICTS', []) + get_unit_list_variable(unit, 'FUZZ_DICTS_VALUE')),
+ 'FUZZ-OPTS': serialize_list(spec_args.get('FUZZ_OPTS', []) + get_unit_list_variable(unit, 'FUZZ_OPTS_VALUE')),
+ 'YT-SPEC': serialize_list(spec_args.get('YT_SPEC', []) + get_unit_list_variable(unit, 'TEST_YT_SPEC_VALUE')),
+ 'BLOB': unit.get('TEST_BLOB_DATA') or '',
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'TEST_IOS_DEVICE_TYPE': unit.get('TEST_IOS_DEVICE_TYPE_VALUE') or '',
+ 'TEST_IOS_RUNTIME_TYPE': unit.get('TEST_IOS_RUNTIME_TYPE_VALUE') or '',
+ 'ANDROID_APK_TEST_ACTIVITY': unit.get('ANDROID_APK_TEST_ACTIVITY_VALUE') or '',
+ 'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL',
+ 'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '',
+ }
+
+ if flat_args[1] == "go.bench":
+ if "ya:run_go_benchmark" not in test_record["TAG"]:
+ return
+ else:
+ test_record["TEST-NAME"] += "_bench"
+
+ if flat_args[1] == 'fuzz.test' and unit.get('FUZZING') == 'yes':
+ test_record['FUZZING'] = '1'
+ # use all cores if fuzzing requested
+ test_record['REQUIREMENTS'] = serialize_list(filter(None, deserialize_list(test_record['REQUIREMENTS']) + ["cpu:all", "ram:all"]))
+
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+
+
+def java_srcdirs_to_data(unit, var):
+ extra_data = []
+ for srcdir in (unit.get(var) or '').replace('$' + var, '').split():
+ if srcdir == '.':
+ srcdir = unit.get('MODDIR')
+ if srcdir.startswith('${ARCADIA_ROOT}/') or srcdir.startswith('$ARCADIA_ROOT/'):
+ srcdir = srcdir.replace('${ARCADIA_ROOT}/', '$S/')
+ srcdir = srcdir.replace('$ARCADIA_ROOT/', '$S/')
+ if srcdir.startswith('${CURDIR}/') or srcdir.startswith('$CURDIR/'):
+ srcdir = srcdir.replace('${CURDIR}/', os.path.join('$S', unit.get('MODDIR')))
+ srcdir = srcdir.replace('$CURDIR/', os.path.join('$S', unit.get('MODDIR')))
+ srcdir = unit.resolve_arc_path(srcdir)
+ if not srcdir.startswith('$'):
+ srcdir = os.path.join('$S', unit.get('MODDIR'), srcdir)
+ if srcdir.startswith('$S'):
+ extra_data.append(srcdir.replace('$S', 'arcadia'))
+ return serialize_list(extra_data)
+
+
+def onadd_check(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ flat_args, spec_args = _common.sort_by_keywords({"DEPENDS": -1, "TIMEOUT": 1, "DATA": -1, "TAG": -1, "REQUIREMENTS": -1, "FORK_MODE": 1,
+ "SPLIT_FACTOR": 1, "FORK_SUBTESTS": 0, "FORK_TESTS": 0, "SIZE": 1}, args)
+ check_type = flat_args[0]
+ test_dir = get_norm_unit_path(unit)
+
+ test_timeout = ''
+ fork_mode = ''
+ extra_test_data = ''
+ extra_test_dart_data = {}
+ ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
+
+ if check_type in ["flake8.py2", "flake8.py3"]:
+ script_rel_path = check_type
+ fork_mode = unit.get('TEST_FORK_MODE') or ''
+ elif check_type == "JAVA_STYLE":
+ if ymake_java_test and not unit.get('ALL_SRCDIRS') or '':
+ return
+ if len(flat_args) < 2:
+ raise Exception("Not enough arguments for JAVA_STYLE check")
+ check_level = flat_args[1]
+ allowed_levels = {
+ 'base': '/yandex_checks.xml',
+ 'strict': '/yandex_checks_strict.xml',
+ 'extended': '/yandex_checks_extended.xml',
+ 'library': '/yandex_checks_library.xml',
+ }
+ if check_level not in allowed_levels:
+ raise Exception('{} is not allowed in LINT(), use one of {}'.format(check_level, allowed_levels.keys()))
+ flat_args[1] = allowed_levels[check_level]
+ if check_level == 'none':
+ return
+ script_rel_path = "java.style"
+ test_timeout = '120'
+ fork_mode = unit.get('TEST_FORK_MODE') or ''
+ if ymake_java_test:
+ extra_test_data = java_srcdirs_to_data(unit, 'ALL_SRCDIRS')
+ extra_test_dart_data['JDK_RESOURCE'] = 'JDK' + (unit.get('JDK_VERSION') or '_DEFAULT')
+ elif check_type == "gofmt":
+ script_rel_path = check_type
+ go_files = flat_args[1:]
+ if go_files:
+ test_dir = os.path.dirname(go_files[0]).lstrip("$S/")
+ else:
+ script_rel_path = check_type
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ uid_ext = ''
+ if check_type in ("check.data", "check.resource"):
+ if unit.get("VALIDATE_DATA") == "no":
+ return
+ if check_type == "check.data":
+ uid_ext = unit.get("SBR_UID_EXT").split(" ", 1)[-1] # strip variable name
+ data_re = re.compile(r"sbr:/?/?(\d+)=?.*")
+ data = flat_args[1:]
+ resources = []
+ for f in data:
+ matched = re.match(data_re, f)
+ if matched:
+ resources.append(matched.group(1))
+ if resources:
+ test_files = serialize_list(resources)
+ else:
+ return
+ else:
+ test_files = serialize_list(flat_args[1:])
+
+ test_record = {
+ 'TEST-NAME': check_type.lower(),
+ 'TEST-TIMEOUT': test_timeout,
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': " ".join(spec_args.get('DEPENDS', [])),
+ 'TEST-DATA': extra_test_data,
+ "SBR-UID-EXT": uid_ext,
+ 'SPLIT-FACTOR': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': fork_mode,
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
+ 'REQUIREMENTS': '',
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
+ 'NO_JBUILD': 'yes' if ymake_java_test else 'no',
+ }
+ test_record.update(extra_test_dart_data)
+
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+
+
+def on_register_no_check_imports(unit):
+ s = unit.get('NO_CHECK_IMPORTS_FOR_VALUE')
+ if s not in ('', 'None'):
+ unit.onresource(['-', 'py/no_check_imports/{}="{}"'.format(_common.pathid(s), s)])
+
+
+def onadd_check_py_imports(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ if unit.get('NO_CHECK_IMPORTS_FOR_VALUE').strip() == "":
+ return
+ unit.onpeerdir(['library/python/testing/import_test'])
+ check_type = "py.imports"
+ test_dir = get_norm_unit_path(unit)
+
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ test_files = serialize_list([get_norm_unit_path(unit, unit.filename())])
+ test_record = {
+ 'TEST-NAME': "pyimports",
+ 'TEST-TIMEOUT': '',
+ 'SCRIPT-REL-PATH': check_type,
+ 'TESTED-PROJECT-NAME': os.path.basename(test_dir),
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': '',
+ 'TEST-DATA': '',
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ 'SPLIT-FACTOR': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': '',
+ 'FORK-TEST-FILES': '',
+ 'SIZE': 'SMALL',
+ 'TAG': '',
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'no',
+ 'PYTHON-PATHS': '',
+ # TODO remove FILES, see DEVTOOLS-7052
+ 'FILES': test_files,
+ 'TEST-FILES': test_files,
+ }
+ if unit.get('NO_CHECK_IMPORTS_FOR_VALUE') != "None":
+ test_record["NO-CHECK"] = serialize_list(get_values_list(unit, 'NO_CHECK_IMPORTS_FOR_VALUE') or ["*"])
+ else:
+ test_record["NO-CHECK"] = ''
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+
+
+def onadd_pytest_script(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ unit.set(["PYTEST_BIN", "no"])
+ custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
+ timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
+
+ if timeout:
+ timeout = timeout[0]
+ else:
+ timeout = '0'
+ test_type = args[0]
+ fork_mode = unit.get('TEST_FORK_MODE').split() or ''
+ split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
+ test_size = unit.get('TEST_SIZE_NAME') or ''
+
+ test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
+ tags = _get_test_tags(unit)
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+ python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
+ binary_path = None
+ test_cwd = unit.get('TEST_CWD_VALUE') or ''
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, data_files=data_files)
+
+
+def onadd_pytest_bin(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ flat, kws = _common.sort_by_keywords({'RUNNER_BIN': 1}, args)
+ if flat:
+ ymake.report_configure_error(
+ 'Unknown arguments found while processing add_pytest_bin macro: {!r}'
+ .format(flat)
+ )
+
+ runner_bin = kws.get('RUNNER_BIN', [None])[0]
+ test_type = 'py3test.bin' if (unit.get("PYTHON3") == 'yes') else "pytest.bin"
+
+ add_test_to_dart(unit, test_type, runner_bin=runner_bin)
+
+
+def add_test_to_dart(unit, test_type, binary_path=None, runner_bin=None):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+ custom_deps = get_values_list(unit, 'TEST_DEPENDS_VALUE')
+ timeout = filter(None, [unit.get(["TEST_TIMEOUT"])])
+ if timeout:
+ timeout = timeout[0]
+ else:
+ timeout = '0'
+ fork_mode = unit.get('TEST_FORK_MODE').split() or ''
+ split_factor = unit.get('TEST_SPLIT_FACTOR') or ''
+ test_size = unit.get('TEST_SIZE_NAME') or ''
+ test_cwd = unit.get('TEST_CWD_VALUE') or ''
+
+ unit_path = unit.path()
+ test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
+ tags = _get_test_tags(unit)
+ requirements = get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')
+ test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+ python_paths = get_values_list(unit, 'TEST_PYTHON_PATH_VALUE')
+ yt_spec = get_values_list(unit, 'TEST_YT_SPEC_VALUE')
+ if not binary_path:
+ binary_path = os.path.join(unit_path, unit.filename())
+ _dump_test(unit, test_type, test_files, timeout, get_norm_unit_path(unit), custom_deps, test_data, python_paths, split_factor, fork_mode, test_size, tags, requirements, binary_path, test_cwd=test_cwd, runner_bin=runner_bin, yt_spec=yt_spec, data_files=data_files)
+
+
+def extract_java_system_properties(unit, args):
+ if len(args) % 2:
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: odd number of arguments'.format(unit.path())
+
+ props = []
+ for x, y in zip(args[::2], args[1::2]):
+ if x == 'FILE':
+ if y.startswith('${BINDIR}') or y.startswith('${ARCADIA_BUILD_ROOT}') or y.startswith('/'):
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: absolute/build file path {}'.format(unit.path(), y)
+
+ y = _common.rootrel_arc_src(y, unit)
+ if not os.path.exists(unit.resolve('$S/' + y)):
+ return [], 'Wrong use of SYSTEM_PROPERTIES in {}: can\'t resolve {}'.format(unit.path(), y)
+
+ y = '${ARCADIA_ROOT}/' + y
+ props.append({'type': 'file', 'path': y})
+ else:
+ props.append({'type': 'inline', 'key': x, 'value': y})
+
+ return props, None
+
+
+def onjava_test(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+
+ assert unit.get('MODULE_TYPE') is not None
+
+ if unit.get('MODULE_TYPE') == 'JTEST_FOR':
+ if not unit.get('UNITTEST_DIR'):
+ ymake.report_configure_error('skip JTEST_FOR in {}: no args provided'.format(unit.path()))
+ return
+
+ java_cp_arg_type = unit.get('JAVA_CLASSPATH_CMD_TYPE_VALUE') or 'MANIFEST'
+ if java_cp_arg_type not in ('MANIFEST', 'COMMAND_FILE', 'LIST'):
+ ymake.report_configure_error('{}: TEST_JAVA_CLASSPATH_CMD_TYPE({}) are invalid. Choose argument from MANIFEST, COMMAND_FILE or LIST)'.format(unit.path(), java_cp_arg_type))
+ return
+
+ unit_path = unit.path()
+ path = _common.strip_roots(unit_path)
+
+ test_data = get_norm_paths(unit, 'TEST_DATA_VALUE')
+ test_data.append('arcadia/build/scripts/run_junit.py')
+ test_data.append('arcadia/build/scripts/unpacking_jtest_runner.py')
+
+ data, data_files = get_canonical_test_resources(unit)
+ test_data += data
+
+ props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
+ if error_mgs:
+ ymake.report_configure_error(error_mgs)
+ return
+ for prop in props:
+ if prop['type'] == 'file':
+ test_data.append(prop['path'].replace('${ARCADIA_ROOT}', 'arcadia'))
+
+ props = base64.b64encode(json.dumps(props, encoding='utf-8'))
+
+ test_cwd = unit.get('TEST_CWD_VALUE') or '' # TODO: validate test_cwd value
+
+ if unit.get('MODULE_TYPE') == 'JUNIT5':
+ script_rel_path = 'junit5.test'
+ else:
+ script_rel_path = 'junit.test'
+
+ ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
+ test_record = {
+ 'SOURCE-FOLDER-PATH': path,
+ 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path)]),
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TEST-TIMEOUT': unit.get('TEST_TIMEOUT') or '',
+ 'TESTED-PROJECT-NAME': path,
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ # 'TEST-PRESERVE-ENV': 'da',
+ 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))),
+ 'FORK-MODE': unit.get('TEST_FORK_MODE') or '',
+ 'SPLIT-FACTOR': unit.get('TEST_SPLIT_FACTOR') or '',
+ 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'TAG': serialize_list(_get_test_tags(unit)),
+ 'SIZE': unit.get('TEST_SIZE_NAME') or '',
+ 'REQUIREMENTS': serialize_list(get_values_list(unit, 'TEST_REQUIREMENTS_VALUE')),
+ 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+
+ # JTEST/JTEST_FOR only
+ 'MODULE_TYPE': unit.get('MODULE_TYPE'),
+ 'UNITTEST_DIR': unit.get('UNITTEST_DIR') or '',
+ 'JVM_ARGS': serialize_list(get_values_list(unit, 'JVM_ARGS_VALUE')),
+ 'SYSTEM_PROPERTIES': props,
+ 'TEST-CWD': test_cwd,
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'JAVA_CLASSPATH_CMD_TYPE': java_cp_arg_type,
+ 'NO_JBUILD': 'yes' if ymake_java_test else 'no',
+ 'JDK_RESOURCE': 'JDK' + (unit.get('JDK_VERSION') or '_DEFAULT'),
+ 'JDK_FOR_TESTS': 'JDK' + (unit.get('JDK_VERSION') or '_DEFAULT') + '_FOR_TESTS',
+ }
+ test_classpath_origins = unit.get('TEST_CLASSPATH_VALUE')
+ if test_classpath_origins:
+ test_record['TEST_CLASSPATH_ORIGINS'] = test_classpath_origins
+ test_record['TEST_CLASSPATH'] = '${TEST_CLASSPATH_MANAGED}'
+ elif ymake_java_test:
+ test_record['TEST_CLASSPATH'] = '${DART_CLASSPATH}'
+ test_record['TEST_CLASSPATH_DEPS'] = '${DART_CLASSPATH_DEPS}'
+ if unit.get('UNITTEST_DIR'):
+ test_record['TEST_JAR'] = '${UNITTEST_MOD}'
+ else:
+ test_record['TEST_JAR'] = '{}/{}.jar'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
+
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(['DART_DATA', data])
+
+
+def onjava_test_deps(unit, *args):
+ if unit.get("TIDY") == "yes":
+ # graph changed for clang_tidy tests
+ return
+
+ assert unit.get('MODULE_TYPE') is not None
+ assert len(args) == 1
+ mode = args[0]
+
+ path = get_norm_unit_path(unit)
+ ymake_java_test = unit.get('YMAKE_JAVA_TEST') == 'yes'
+
+ test_record = {
+ 'SOURCE-FOLDER-PATH': path,
+ 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path), 'dependencies']).strip('-'),
+ 'SCRIPT-REL-PATH': 'java.dependency.test',
+ 'TEST-TIMEOUT': '',
+ 'TESTED-PROJECT-NAME': path,
+ 'TEST-DATA': '',
+ 'TEST_PARTITION': 'SEQUENTIAL',
+ 'FORK-MODE': '',
+ 'SPLIT-FACTOR': '',
+ 'CUSTOM-DEPENDENCIES': ' '.join(get_values_list(unit, 'TEST_DEPENDS_VALUE')),
+ 'TAG': '',
+ 'SIZE': 'SMALL',
+ 'IGNORE_CLASSPATH_CLASH': ' '.join(get_values_list(unit, 'JAVA_IGNORE_CLASSPATH_CLASH_VALUE')),
+ 'NO_JBUILD': 'yes' if ymake_java_test else 'no',
+
+ # JTEST/JTEST_FOR only
+ 'MODULE_TYPE': unit.get('MODULE_TYPE'),
+ 'UNITTEST_DIR': '',
+ 'SYSTEM_PROPERTIES': '',
+ 'TEST-CWD': '',
+ }
+ if mode == 'strict':
+ test_record['STRICT_CLASSPATH_CLASH'] = 'yes'
+
+ if ymake_java_test:
+ test_record['CLASSPATH'] = '$B/{}/{}.jar ${{DART_CLASSPATH}}'.format(unit.get('MODDIR'), unit.get('REALPRJNAME'))
+
+ data = dump_test(unit, test_record)
+ unit.set_property(['DART_DATA', data])
+
+
+def _get_test_tags(unit, spec_args=None):
+ if spec_args is None:
+ spec_args = {}
+ tags = spec_args.get('TAG', []) + get_values_list(unit, 'TEST_TAGS_VALUE')
+ # DEVTOOLS-7571
+ if unit.get('SKIP_TEST_VALUE') and 'ya:fat' in tags and "ya:not_autocheck" not in tags:
+ tags.append("ya:not_autocheck")
+
+ return tags
+
+
+def _dump_test(
+ unit,
+ test_type,
+ test_files,
+ timeout,
+ test_dir,
+ custom_deps,
+ test_data,
+ python_paths,
+ split_factor,
+ fork_mode,
+ test_size,
+ tags,
+ requirements,
+ binary_path='',
+ old_pytest=False,
+ test_cwd=None,
+ runner_bin=None,
+ yt_spec=None,
+ data_files=None
+):
+
+ if test_type == "PY_TEST":
+ script_rel_path = "py.test"
+ else:
+ script_rel_path = test_type
+
+ unit_path = unit.path()
+ fork_test_files = unit.get('FORK_TEST_FILES_MODE')
+ fork_mode = ' '.join(fork_mode) if fork_mode else ''
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON')
+ if test_cwd:
+ test_cwd = test_cwd.replace("$TEST_CWD_VALUE", "").replace('"MACRO_CALLS_DELIM"', "").strip()
+ test_name = os.path.basename(binary_path)
+ test_record = {
+ 'TEST-NAME': os.path.splitext(test_name)[0],
+ 'TEST-TIMEOUT': timeout,
+ 'SCRIPT-REL-PATH': script_rel_path,
+ 'TESTED-PROJECT-NAME': test_name,
+ 'SOURCE-FOLDER-PATH': test_dir,
+ 'CUSTOM-DEPENDENCIES': " ".join(custom_deps),
+ 'TEST-ENV': prepare_env(unit.get("TEST_ENV_VALUE")),
+ # 'TEST-PRESERVE-ENV': 'da',
+ 'TEST-DATA': serialize_list(sorted(_common.filter_out_by_keyword(test_data, 'AUTOUPDATED'))),
+ 'TEST-RECIPES': prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+ 'SPLIT-FACTOR': split_factor,
+ 'TEST_PARTITION': unit.get('TEST_PARTITION') or 'SEQUENTIAL',
+ 'FORK-MODE': fork_mode,
+ 'FORK-TEST-FILES': fork_test_files,
+ 'TEST-FILES': serialize_list(test_files),
+ 'SIZE': test_size,
+ 'TAG': serialize_list(tags),
+ 'REQUIREMENTS': serialize_list(requirements),
+ 'USE_ARCADIA_PYTHON': use_arcadia_python or '',
+ 'OLD_PYTEST': 'yes' if old_pytest else 'no',
+ 'PYTHON-PATHS': serialize_list(python_paths),
+ 'TEST-CWD': test_cwd or '',
+ 'SKIP_TEST': unit.get('SKIP_TEST_VALUE') or '',
+ 'BUILD-FOLDER-PATH': _common.strip_roots(unit_path),
+ 'BLOB': unit.get('TEST_BLOB_DATA') or '',
+ 'CANONIZE_SUB_PATH': unit.get('CANONIZE_SUB_PATH') or '',
+ }
+ if binary_path:
+ test_record['BINARY-PATH'] = _common.strip_roots(binary_path)
+ if runner_bin:
+ test_record['TEST-RUNNER-BIN'] = runner_bin
+ if yt_spec:
+ test_record['YT-SPEC'] = serialize_list(yt_spec)
+ data = dump_test(unit, test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+ save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
+
+
+def onsetup_pytest_bin(unit, *args):
+ use_arcadia_python = unit.get('USE_ARCADIA_PYTHON') == "yes"
+ if use_arcadia_python:
+ unit.onresource(['-', 'PY_MAIN={}'.format("library.python.pytest.main:main")]) # XXX
+ unit.onadd_pytest_bin(list(args))
+ else:
+ unit.onno_platform()
+ unit.onadd_pytest_script(["PY_TEST"])
+
+
+def onrun(unit, *args):
+ exectest_cmd = unit.get(["EXECTEST_COMMAND_VALUE"]) or ''
+ exectest_cmd += "\n" + subprocess.list2cmdline(args)
+ unit.set(["EXECTEST_COMMAND_VALUE", exectest_cmd])
+
+
+def onsetup_exectest(unit, *args):
+ command = unit.get(["EXECTEST_COMMAND_VALUE"])
+ if command is None:
+ ymake.report_configure_error("EXECTEST must have at least one RUN macro")
+ return
+ command = command.replace("$EXECTEST_COMMAND_VALUE", "")
+ if "PYTHON_BIN" in command:
+ unit.ondepends('contrib/tools/python')
+ unit.set(["TEST_BLOB_DATA", base64.b64encode(command)])
+ add_test_to_dart(unit, "exectest", binary_path=os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
+
+
+def onsetup_run_python(unit):
+ if unit.get("USE_ARCADIA_PYTHON") == "yes":
+ unit.ondepends('contrib/tools/python')
+
+
+def get_canonical_test_resources(unit):
+ unit_path = unit.path()
+ canon_data_dir = os.path.join(unit.resolve(unit_path), CANON_DATA_DIR_NAME, unit.get('CANONIZE_SUB_PATH') or '')
+
+ try:
+ _, dirs, files = next(os.walk(canon_data_dir))
+ except StopIteration:
+ # path doesn't exist
+ return [], []
+
+ if CANON_RESULT_FILE_NAME in files:
+ return _get_canonical_data_resources_v2(os.path.join(canon_data_dir, CANON_RESULT_FILE_NAME), unit_path)
+ return [], []
+
+
+def _load_canonical_file(filename, unit_path):
+ try:
+ with open(filename) as results_file:
+ return json.load(results_file)
+ except Exception as e:
+ print>>sys.stderr, "malformed canonical data in {}: {} ({})".format(unit_path, e, filename)
+ return {}
+
+
+def _get_resource_from_uri(uri):
+ m = CANON_MDS_RESOURCE_REGEX.match(uri)
+ if m:
+ res_id = m.group(1)
+ return "{}:{}".format(MDS_SHEME, res_id)
+
+ m = CANON_SBR_RESOURCE_REGEX.match(uri)
+ if m:
+ # There might be conflict between resources, because all resources in sandbox have 'resource.tar.gz' name
+ # That's why we use notation with '=' to specify specific path for resource
+ uri = m.group(1)
+ res_id = m.group(2)
+ return "{}={}".format(uri, '/'.join([CANON_OUTPUT_STORAGE, res_id]))
+
+
+def _get_external_resources_from_canon_data(data):
+ # Method should work with both canonization versions:
+ # result.json: {'uri':X 'checksum':Y}
+ # result.json: {'testname': {'uri':X 'checksum':Y}}
+ # result.json: {'testname': [{'uri':X 'checksum':Y}]}
+ # Also there is a bug - if user returns {'uri': 1} from test - machinery will fail
+ # That's why we check 'uri' and 'checksum' fields presence
+ # (it's still a bug - user can return {'uri':X, 'checksum': Y}, we need to unify canonization format)
+ res = set()
+
+ if isinstance(data, dict):
+ if 'uri' in data and 'checksum' in data:
+ resource = _get_resource_from_uri(data['uri'])
+ if resource:
+ res.add(resource)
+ else:
+ for k, v in data.iteritems():
+ res.update(_get_external_resources_from_canon_data(v))
+ elif isinstance(data, list):
+ for e in data:
+ res.update(_get_external_resources_from_canon_data(e))
+
+ return res
+
+
+def _get_canonical_data_resources_v2(filename, unit_path):
+ return (_get_external_resources_from_canon_data(_load_canonical_file(filename, unit_path)), [filename])
diff --git a/build/plugins/ytest2.py b/build/plugins/ytest2.py
new file mode 100644
index 0000000000..0a34263c35
--- /dev/null
+++ b/build/plugins/ytest2.py
@@ -0,0 +1,54 @@
+import os
+import _common
+
+
+def dir_stmts(unit, dir):
+ unit.onpeerdir(dir)
+ unit.onsrcdir(os.sep.join([dir, 'tests']))
+
+
+def pytest_base(unit, args):
+ related_prj_dir = args[0]
+ related_prj_name = args[1]
+ dir_stmts(unit, related_prj_dir)
+ ytest_base(unit, related_prj_dir, related_prj_name, args[2:])
+ unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test'])
+
+
+def ytest_base(unit, related_prj_dir, related_prj_name, args):
+ keywords = {"DEPENDS": -1, "DATA": -1}
+ flat_args, spec_args = _common.sort_by_keywords(keywords, args)
+ unit.set(['TEST-NAME', os.path.basename(flat_args[0])])
+ unit.set(['SCRIPT-REL-PATH', flat_args[1]])
+ unit.set(['SOURCE-FOLDER-PATH', related_prj_dir])
+ unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)])
+ unit.set(['TESTED-BINARY-PATH', flat_args[0]])
+
+ custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
+ unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
+ data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ')
+ data_lst.sort()
+ data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
+ unit.set(['TEST-DATA', data])
+
+ related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
+ related_dirs_value = []
+ for rel in related_dirs_list:
+ related_dirs_value.extend(['--test-related-path', rel])
+ unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)])
+ unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))])
+
+
+def on_unittest(unit, *args):
+ related_prj_name = args[0]
+ related_prj_dir = args[1][3:]
+ unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}'])
+ ytest_base(unit, related_prj_dir, related_prj_name, args)
+
+
+def on_ytest(unit, *args):
+ pytest_base(unit, args)
+
+
+def on_py_test(unit, *args):
+ pytest_base(unit, args)