aboutsummaryrefslogtreecommitdiffstats
path: root/build/plugins/lib
diff options
context:
space:
mode:
authoralexv-smirnov <alex@ydb.tech>2023-03-15 19:59:12 +0300
committeralexv-smirnov <alex@ydb.tech>2023-03-15 19:59:12 +0300
commit056bb284ccf8dd6793ec3a54ffa36c4fb2b9ad11 (patch)
tree4740980126f32e3af7937ba0ca5f83e59baa4ab0 /build/plugins/lib
parent269126dcced1cc8b53eb4398b4a33e5142f10290 (diff)
downloadydb-056bb284ccf8dd6793ec3a54ffa36c4fb2b9ad11.tar.gz
add library/cpp/actors, ymake build to ydb oss export
Diffstat (limited to 'build/plugins/lib')
-rw-r--r--build/plugins/lib/__init__.py0
-rw-r--r--build/plugins/lib/_metric_resolvers.py11
-rw-r--r--build/plugins/lib/nots/__init__.py0
-rw-r--r--build/plugins/lib/nots/constants.py12
-rw-r--r--build/plugins/lib/nots/package_manager/__init__.py11
-rw-r--r--build/plugins/lib/nots/package_manager/base/__init__.py14
-rw-r--r--build/plugins/lib/nots/package_manager/base/constants.py5
-rw-r--r--build/plugins/lib/nots/package_manager/base/lockfile.py68
-rw-r--r--build/plugins/lib/nots/package_manager/base/node_modules_bundler.py66
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_json.py170
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_manager.py141
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/package_json.py152
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/utils.py15
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/package_manager/base/utils.py29
-rw-r--r--build/plugins/lib/nots/package_manager/base/ya.make23
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/__init__.py12
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/constants.py2
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/lockfile.py162
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/package_manager.py215
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py320
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py68
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/ya.make15
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/utils.py11
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/workspace.py75
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/ya.make24
-rw-r--r--build/plugins/lib/nots/package_manager/ya.make14
-rw-r--r--build/plugins/lib/nots/semver/__init__.py5
-rw-r--r--build/plugins/lib/nots/semver/semver.py228
-rw-r--r--build/plugins/lib/nots/semver/tests/test_version.py242
-rw-r--r--build/plugins/lib/nots/semver/tests/test_version_range.py69
-rw-r--r--build/plugins/lib/nots/semver/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/semver/ya.make14
-rw-r--r--build/plugins/lib/nots/typescript/__init__.py9
-rw-r--r--build/plugins/lib/nots/typescript/tests/ts_config.py86
-rw-r--r--build/plugins/lib/nots/typescript/tests/ya.make13
-rw-r--r--build/plugins/lib/nots/typescript/ts_config.py300
-rw-r--r--build/plugins/lib/nots/typescript/ts_errors.py10
-rw-r--r--build/plugins/lib/nots/typescript/ya.make19
-rw-r--r--build/plugins/lib/nots/ya.make15
-rw-r--r--build/plugins/lib/test_const/__init__.py522
-rw-r--r--build/plugins/lib/test_const/ya.make9
-rw-r--r--build/plugins/lib/ya.make7
43 files changed, 3211 insertions, 0 deletions
diff --git a/build/plugins/lib/__init__.py b/build/plugins/lib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/__init__.py
diff --git a/build/plugins/lib/_metric_resolvers.py b/build/plugins/lib/_metric_resolvers.py
new file mode 100644
index 0000000000..270eb78345
--- /dev/null
+++ b/build/plugins/lib/_metric_resolvers.py
@@ -0,0 +1,11 @@
+import re
+
+VALUE_PATTERN = re.compile(r"^\s*(?P<value>\d+)\s*$")
+
+
+def resolve_value(val):
+ match = VALUE_PATTERN.match(val)
+ if not match:
+ return None
+ val = match.group('value')
+ return int(val)
diff --git a/build/plugins/lib/nots/__init__.py b/build/plugins/lib/nots/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/nots/__init__.py
diff --git a/build/plugins/lib/nots/constants.py b/build/plugins/lib/nots/constants.py
new file mode 100644
index 0000000000..d819e86192
--- /dev/null
+++ b/build/plugins/lib/nots/constants.py
@@ -0,0 +1,12 @@
+from lib.nots.semver import Version
+
+# it is crucial to keep this array sorted
+SUPPORTED_NODE_VERSIONS = [
+ Version.from_str("12.18.4"),
+ Version.from_str("12.22.12"),
+ Version.from_str("14.21.1"),
+ Version.from_str("16.18.1"),
+ Version.from_str("18.12.1")
+]
+
+DEFAULT_NODE_VERSION = SUPPORTED_NODE_VERSIONS[0]
diff --git a/build/plugins/lib/nots/package_manager/__init__.py b/build/plugins/lib/nots/package_manager/__init__.py
new file mode 100644
index 0000000000..4128980efe
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/__init__.py
@@ -0,0 +1,11 @@
+from .pnpm import PnpmPackageManager
+from .base import PackageJson, constants, utils, bundle_node_modules, extract_node_modules
+
+
+manager = PnpmPackageManager
+
+__all__ = [
+ "PackageJson",
+ "constants", "utils",
+ "bundle_node_modules", "extract_node_modules"
+]
diff --git a/build/plugins/lib/nots/package_manager/base/__init__.py b/build/plugins/lib/nots/package_manager/base/__init__.py
new file mode 100644
index 0000000000..8950fd818e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/__init__.py
@@ -0,0 +1,14 @@
+from . import constants, utils
+from .lockfile import BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+from .package_json import PackageJson
+from .package_manager import BasePackageManager, PackageManagerError, PackageManagerCommandError
+from .node_modules_bundler import bundle_node_modules, extract_node_modules
+
+
+__all__ = [
+ "constants", "utils",
+ "BaseLockfile", "LockfilePackageMeta", "LockfilePackageMetaInvalidError",
+ "BasePackageManager", "PackageManagerError", "PackageManagerCommandError",
+ "PackageJson",
+ "bundle_node_modules", "extract_node_modules",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/constants.py b/build/plugins/lib/nots/package_manager/base/constants.py
new file mode 100644
index 0000000000..ecdbe05ebf
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/constants.py
@@ -0,0 +1,5 @@
+PACKAGE_JSON_FILENAME = "package.json"
+NODE_MODULES_DIRNAME = "node_modules"
+NODE_MODULES_BUNDLE_FILENAME = "node_modules.tar"
+NODE_MODULES_WORKSPACE_BUNDLE_FILENAME = "workspace_node_modules.tar"
+NPM_REGISTRY_URL = "http://npm.yandex-team.ru"
diff --git a/build/plugins/lib/nots/package_manager/base/lockfile.py b/build/plugins/lib/nots/package_manager/base/lockfile.py
new file mode 100644
index 0000000000..9b9c0be954
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/lockfile.py
@@ -0,0 +1,68 @@
+import os
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+class LockfilePackageMeta(object):
+ """
+ Basic struct representing package meta from lockfile.
+ """
+ __slots__ = ("name", "version", "sky_id", "integrity", "integrity_algorithm", "tarball_path")
+
+ @staticmethod
+ def from_str(s):
+ return LockfilePackageMeta(*s.strip().split(" "))
+
+ def __init__(self, name, version, sky_id, integrity, integrity_algorithm):
+ self.name = name
+ self.version = version
+ self.sky_id = sky_id
+ self.integrity = integrity
+ self.integrity_algorithm = integrity_algorithm
+ self.tarball_path = "{}-{}.tgz".format(name, version)
+
+ def to_str(self):
+ return " ".join([self.name, self.version, self.sky_id, self.integrity, self.integrity_algorithm])
+
+
+class LockfilePackageMetaInvalidError(RuntimeError):
+ pass
+
+
+@add_metaclass(ABCMeta)
+class BaseLockfile(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: lockfile path
+ :type path: str
+ :rtype: BaseLockfile
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ @abstractmethod
+ def read(self):
+ pass
+
+ @abstractmethod
+ def write(self, path=None):
+ pass
+
+ @abstractmethod
+ def get_packages_meta(self):
+ pass
+
+ @abstractmethod
+ def update_tarball_resolutions(self, fn):
+ pass
diff --git a/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
new file mode 100644
index 0000000000..c835c4d7ca
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
@@ -0,0 +1,66 @@
+import os
+import tarfile
+
+from io import BytesIO
+
+from .utils import build_nm_path
+
+
+PEERS_DIR = ".peers"
+PEERS_INDEX = "index"
+
+
+def bundle_node_modules(build_root, peers, node_modules_path, bundle_path):
+ """
+ Creates node_modules bundle.
+ Bundle contains node_modules directory, peers' node_modules directories,
+ and index file with the list of added peers (\\n delimited).
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param peers: list of peers (arcadia root related)
+ :type peers: list of str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path, "w") as tf:
+ tf.add(node_modules_path, arcname=".")
+
+ # Peers' node_modules.
+ added_peers = []
+ for p in peers:
+ peer_nm_path = build_nm_path(os.path.join(build_root, p))
+ peer_bundled_nm_path = build_nm_path(os.path.join(PEERS_DIR, p))
+ if not os.path.isdir(peer_nm_path):
+ continue
+ tf.add(peer_nm_path, arcname=peer_bundled_nm_path)
+ added_peers.append(p)
+
+ # Peers index.
+ peers_index = "\n".join(added_peers)
+ ti = tarfile.TarInfo(name=os.path.join(PEERS_DIR, PEERS_INDEX))
+ ti.size = len(peers_index)
+ tf.addfile(ti, BytesIO(peers_index.encode()))
+
+
+def extract_node_modules(build_root, node_modules_path, bundle_path):
+ """
+ Extracts node_modules bundle.
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path) as tf:
+ tf.extractall(node_modules_path)
+
+ peers = open(os.path.join(node_modules_path, PEERS_DIR, PEERS_INDEX)).read().split("\n")
+ for p in peers:
+ if not p:
+ continue
+ bundled_nm_path = build_nm_path(os.path.join(node_modules_path, PEERS_DIR, p))
+ nm_path = build_nm_path(os.path.join(build_root, p))
+ os.rename(bundled_nm_path, nm_path)
diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py
new file mode 100644
index 0000000000..29d7b01203
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_json.py
@@ -0,0 +1,170 @@
+import os
+import json
+
+from six import iteritems
+
+from .utils import build_pj_path
+
+
+class PackageJsonWorkspaceError(RuntimeError):
+ pass
+
+
+class PackageJson(object):
+ DEP_KEY = "dependencies"
+ DEV_DEP_KEY = "devDependencies"
+ PEER_DEP_KEY = "peerDependencies"
+ OPT_DEP_KEY = "optionalDependencies"
+ DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY)
+
+ WORKSPACE_SCHEMA = "workspace:"
+
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: package.json path
+ :type path: str
+ :rtype: PackageJson
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ def read(self):
+ with open(self.path) as f:
+ self.data = json.load(f)
+
+ def write(self, path=None):
+ """
+ :param path: path to store package.json, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ directory = os.path.dirname(path)
+ if not os.path.exists(directory):
+ os.mkdir(directory)
+
+ with open(path, "w") as f:
+ json.dump(self.data, f, indent=4)
+
+ def get_name(self):
+ return self.data["name"]
+
+ def get_version(self):
+ return self.data["version"]
+
+ def get_description(self):
+ return self.data.get("description")
+
+ def get_nodejs_version(self):
+ return self.data.get("engines", {}).get("node")
+
+ def dependencies_iter(self):
+ for key in self.DEP_KEYS:
+ deps = self.data.get(key)
+ if not deps:
+ continue
+
+ for name, spec in iteritems(deps):
+ yield (name, spec)
+
+ def get_workspace_dep_spec_paths(self):
+ """
+ Returns names and paths from specifiers of the defined workspace dependencies.
+ :rtype: list of (str, str)
+ """
+ spec_paths = []
+ schema = self.WORKSPACE_SCHEMA
+ schema_len = len(schema)
+
+ for name, spec in self.dependencies_iter():
+ if not spec.startswith(schema):
+ continue
+
+ spec_path = spec[schema_len:]
+ if not (spec_path.startswith(".") or spec_path.startswith("..")):
+ raise PackageJsonWorkspaceError(
+ "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(
+ spec, name, self.path
+ )
+ )
+
+ spec_paths.append((name, spec_path))
+
+ return spec_paths
+
+ def get_workspace_dep_paths(self, base_path=None):
+ """
+ Returns paths of the defined workspace dependencies.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()]
+
+ def get_workspace_deps(self):
+ """
+ :rtype: list of PackageJson
+ """
+ ws_deps = []
+ pj_dir = os.path.dirname(self.path)
+
+ for name, rel_path in self.get_workspace_dep_spec_paths():
+ dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
+ dep_pj = PackageJson.load(build_pj_path(dep_path))
+
+ if name != dep_pj.get_name():
+ raise PackageJsonWorkspaceError(
+ "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(
+ name, dep_pj.get_name(), self.path
+ )
+ )
+
+ ws_deps.append(dep_pj)
+
+ return ws_deps
+
+ def get_workspace_map(self, ignore_self=False):
+ """
+ Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth.
+ :param ignore_self: whether path of the current module will be excluded
+ :type ignore_self: bool
+ :rtype: dict of (PackageJson, int)
+ """
+ ws_deps = {}
+ # list of (pj, depth)
+ pj_queue = [(self, 0)]
+
+ while len(pj_queue):
+ (pj, depth) = pj_queue.pop()
+ pj_dir = os.path.dirname(pj.path)
+ if pj_dir in ws_deps:
+ continue
+
+ if not ignore_self or pj != self:
+ ws_deps[pj_dir] = (pj, depth)
+
+ for dep_pj in pj.get_workspace_deps():
+ pj_queue.append((dep_pj, depth + 1))
+
+ return ws_deps
+
+ def get_dep_paths_by_names(self):
+ """
+ Returns dict of {dependency_name: dependency_path}
+ """
+ ws_map = self.get_workspace_map()
+ return {pj.get_name(): path for path, (pj, _) in ws_map.items()}
diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py
new file mode 100644
index 0000000000..d3da1d3579
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_manager.py
@@ -0,0 +1,141 @@
+import os
+import sys
+import subprocess
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+from .constants import NPM_REGISTRY_URL
+from .package_json import PackageJson
+from .utils import build_nm_path, build_pj_path
+
+
+class PackageManagerError(RuntimeError):
+ pass
+
+
+class PackageManagerCommandError(PackageManagerError):
+ def __init__(self, cmd, code, stdout, stderr):
+ self.cmd = cmd
+ self.code = code
+ self.stdout = stdout
+ self.stderr = stderr
+
+ msg = "package manager exited with code {} while running {}:\n{}\n{}".format(code, cmd, stdout, stderr)
+ super(PackageManagerCommandError, self).__init__(msg)
+
+
+@add_metaclass(ABCMeta)
+class BasePackageManager(object):
+ def __init__(self, build_root, build_path, sources_path, nodejs_bin_path, script_path, contribs_path, module_path=None, sources_root=None):
+ self.module_path = build_path[len(build_root) + 1:] if module_path is None else module_path
+ self.build_path = build_path
+ self.sources_path = sources_path
+ self.build_root = build_root
+ self.sources_root = sources_path[:-len(self.module_path) - 1] if sources_root is None else sources_root
+ self.nodejs_bin_path = nodejs_bin_path
+ self.script_path = script_path
+ self.contribs_path = contribs_path
+
+ @classmethod
+ def load_package_json(cls, path):
+ """
+ :param path: path to package.json
+ :type path: str
+ :rtype: PackageJson
+ """
+ return PackageJson.load(path)
+
+ @classmethod
+ def load_package_json_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with package.json
+ :type dir_path: str
+ :rtype: PackageJson
+ """
+ return cls.load_package_json(build_pj_path(dir_path))
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile(cls, path):
+ pass
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ pass
+
+ @abstractmethod
+ def create_node_modules(self):
+ pass
+
+ @abstractmethod
+ def calc_node_modules_inouts(self):
+ pass
+
+ @abstractmethod
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ pass
+
+ def get_local_peers_from_package_json(self):
+ """
+ Returns paths of direct workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ return self.load_package_json_from_dir(self.sources_path).get_workspace_dep_paths(base_path=self.module_path)
+
+ def get_peers_from_package_json(self):
+ """
+ Returns paths of workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+ prefix_len = len(self.sources_root) + 1
+
+ return [p[prefix_len:] for p in pj.get_workspace_map(ignore_self=True).keys()]
+
+ def _exec_command(self, args, include_defaults=True):
+ if not self.nodejs_bin_path:
+ raise PackageManagerError("Unable to execute command: nodejs_bin_path is not configured")
+
+ cmd = [self.nodejs_bin_path, self.script_path] + args + (self._get_default_options() if include_defaults else [])
+ p = subprocess.Popen(
+ cmd,
+ cwd=self.build_path,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = p.communicate()
+
+ if p.returncode != 0:
+ self._dump_debug_log()
+
+ raise PackageManagerCommandError(cmd, p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8"))
+
+ def _nm_path(self, *parts):
+ return os.path.join(build_nm_path(self.build_path), *parts)
+
+ def _contrib_tarball_path(self, pkg):
+ return os.path.join(self.contribs_path, pkg.tarball_path)
+
+ def _contrib_tarball_url(self, pkg):
+ return "file:" + self._contrib_tarball_path(pkg)
+
+ def _get_default_options(self):
+ return ["--registry", NPM_REGISTRY_URL]
+
+ def _get_debug_log_path(self):
+ return None
+
+ def _dump_debug_log(self):
+ log_path = self._get_debug_log_path()
+
+ if not log_path:
+ return
+
+ try:
+ with open(log_path) as f:
+ sys.stderr.write("Package manager log {}:\n{}\n".format(log_path, f.read()))
+ except Exception:
+ sys.stderr.write("Failed to dump package manager log {}.\n".format(log_path))
diff --git a/build/plugins/lib/nots/package_manager/base/tests/package_json.py b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
new file mode 100644
index 0000000000..42aab85b26
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
@@ -0,0 +1,152 @@
+import os
+import pytest
+
+from build.plugins.lib.nots.package_manager.base.package_json import PackageJson, PackageJsonWorkspaceError
+
+
+def test_get_workspace_dep_spec_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_spec_paths = pj.get_workspace_dep_spec_paths()
+
+ assert ws_dep_spec_paths == [
+ ("@yandex-int/bar", "../bar"),
+ ("@yandex-int/baz", "../baz"),
+ ]
+
+
+def test_get_workspace_dep_spec_paths_invalid_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:*",
+ },
+ }
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_dep_spec_paths()
+
+ assert str(e.value) == "Expected relative path specifier for workspace dependency, but got 'workspace:*' for @yandex-int/bar in /packages/foo/package.json"
+
+
+def test_get_workspace_dep_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths()
+
+ assert ws_dep_paths == [
+ "/packages/bar",
+ "/packages/baz",
+ ]
+
+
+def test_get_workspace_dep_paths_with_custom_base_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths(base_path="custom/dir")
+
+ assert ws_dep_paths == [
+ "custom/bar",
+ "custom/baz",
+ ]
+
+
+def test_get_workspace_deps_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ ws_deps = pj.get_workspace_deps()
+
+ assert len(ws_deps) == 2
+ assert ws_deps[0].path == "/packages/bar/package.json"
+ assert ws_deps[1].path == "/packages/baz/package.json"
+
+
+def test_get_workspace_deps_with_wrong_name():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@shouldbe/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_deps()
+
+ assert str(e.value) == "Workspace dependency name mismatch, found '@yandex-int/bar' instead of '@shouldbe/bar' in /packages/foo/package.json"
+
+
+def test_get_workspace_map_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ name = os.path.basename(os.path.dirname(path))
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(name),
+ "dependencies": ({"@yandex-int/qux": "workspace:../qux"} if name == "bar" else {}),
+ }
+ return p
+ PackageJson.load = classmethod(load_mock)
+
+ ws_map = pj.get_workspace_map()
+
+ assert len(ws_map) == 3
+ assert ws_map["/packages/foo"][0].path == "/packages/foo/package.json"
+ assert ws_map["/packages/foo"][1] == 0
+ assert ws_map["/packages/bar"][0].path == "/packages/bar/package.json"
+ assert ws_map["/packages/bar"][1] == 1
+ assert ws_map["/packages/qux"][0].path == "/packages/qux/package.json"
+ assert ws_map["/packages/qux"][1] == 2
diff --git a/build/plugins/lib/nots/package_manager/base/tests/utils.py b/build/plugins/lib/nots/package_manager/base/tests/utils.py
new file mode 100644
index 0000000000..4287beec47
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/utils.py
@@ -0,0 +1,15 @@
+from build.plugins.lib.nots.package_manager.base import utils
+
+
+def test_extract_package_name_from_path():
+ happy_checklist = [
+ ("@yandex-int/foo-bar-baz/some/path/inside/the/package", "@yandex-int/foo-bar-baz"),
+ ("@yandex-int/foo-bar-buzz", "@yandex-int/foo-bar-buzz"),
+ ("package-wo-scope", "package-wo-scope"),
+ ("p", "p"),
+ ("", ""),
+ ]
+
+ for item in happy_checklist:
+ package_name = utils.extract_package_name_from_path(item[0])
+ assert package_name == item[1]
diff --git a/build/plugins/lib/nots/package_manager/base/tests/ya.make b/build/plugins/lib/nots/package_manager/base/tests/ya.make
new file mode 100644
index 0000000000..1bece69c33
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/ya.make
@@ -0,0 +1,14 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ package_json.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/base/utils.py b/build/plugins/lib/nots/package_manager/base/utils.py
new file mode 100644
index 0000000000..017bf4ca41
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/utils.py
@@ -0,0 +1,29 @@
+import os
+
+from .constants import PACKAGE_JSON_FILENAME, NODE_MODULES_DIRNAME, NODE_MODULES_BUNDLE_FILENAME
+
+
+def s_rooted(p):
+ return os.path.join("$S", p)
+
+
+def b_rooted(p):
+ return os.path.join("$B", p)
+
+
+def build_pj_path(p):
+ return os.path.join(p, PACKAGE_JSON_FILENAME)
+
+
+def build_nm_path(p):
+ return os.path.join(p, NODE_MODULES_DIRNAME)
+
+
+def build_nm_bundle_path(p):
+ return os.path.join(p, NODE_MODULES_BUNDLE_FILENAME)
+
+
+def extract_package_name_from_path(p):
+ # if we have scope prefix then we are using the first two tokens, otherwise - only the first one
+ parts = p.split("/", 2)
+ return "/".join(parts[:2]) if p.startswith("@") else parts[0]
diff --git a/build/plugins/lib/nots/package_manager/base/ya.make b/build/plugins/lib/nots/package_manager/base/ya.make
new file mode 100644
index 0000000000..4b7f22f05a
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/ya.make
@@ -0,0 +1,23 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ node_modules_bundler.py
+ package_json.py
+ package_manager.py
+ utils.py
+)
+
+PEERDIR(
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/__init__.py b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
new file mode 100644
index 0000000000..b3a3c20c02
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
@@ -0,0 +1,12 @@
+from . import constants
+from .lockfile import PnpmLockfile
+from .package_manager import PnpmPackageManager
+from .workspace import PnpmWorkspace
+
+
+__all__ = [
+ "constants",
+ "PnpmLockfile",
+ "PnpmPackageManager",
+ "PnpmWorkspace",
+]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/constants.py b/build/plugins/lib/nots/package_manager/pnpm/constants.py
new file mode 100644
index 0000000000..e84a78c55e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/constants.py
@@ -0,0 +1,2 @@
+PNPM_WS_FILENAME = "pnpm-workspace.yaml"
+PNPM_LOCKFILE_FILENAME = "pnpm-lock.yaml"
diff --git a/build/plugins/lib/nots/package_manager/pnpm/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
new file mode 100644
index 0000000000..19c748e55b
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
@@ -0,0 +1,162 @@
+import base64
+import binascii
+import yaml
+import os
+
+from six.moves.urllib import parse as urlparse
+from six import iteritems
+
+from ..base import PackageJson, BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+
+
+class PnpmLockfile(BaseLockfile):
+ IMPORTER_KEYS = PackageJson.DEP_KEYS + ("specifiers",)
+
+ def read(self):
+ with open(self.path, "r") as f:
+ self.data = yaml.load(f, Loader=yaml.CSafeLoader)
+
+ def write(self, path=None):
+ """
+ :param path: path to store lockfile, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ yaml.dump(self.data, f, Dumper=yaml.CSafeDumper)
+
+ def get_packages_meta(self):
+ """
+ Extracts packages meta from lockfile.
+ :rtype: list of LockfilePackageMeta
+ """
+ packages = self.data.get("packages", {})
+
+ return map(lambda x: _parse_package_meta(*x), iteritems(packages))
+
+ def update_tarball_resolutions(self, fn):
+ """
+ :param fn: maps `LockfilePackageMeta` instance to new `resolution.tarball` value
+ :type fn: lambda
+ """
+ packages = self.data.get("packages", {})
+
+ for key, meta in iteritems(packages):
+ meta["resolution"]["tarball"] = fn(_parse_package_meta(key, meta))
+ packages[key] = meta
+
+ def get_importers(self):
+ """
+ Returns "importers" section from the lockfile or creates similar structure from "dependencies" and "specifiers".
+ :rtype: dict of dict of dict of str
+ """
+ importers = self.data.get("importers")
+ if importers is not None:
+ return importers
+
+ importer = {k: self.data[k] for k in self.IMPORTER_KEYS if k in self.data}
+
+ return ({".": importer} if importer else {})
+
+ def merge(self, lf):
+ """
+ Merges two lockfiles:
+ 1. Converts the lockfile to monorepo-like lockfile with "importers" section instead of "dependencies" and "specifiers".
+ 2. Merges `lf`'s dependencies and specifiers to importers.
+ 3. Merges `lf`'s packages to the lockfile.
+ :param lf: lockfile to merge
+ :type lf: PnpmLockfile
+ """
+ importers = self.get_importers()
+ build_path = os.path.dirname(self.path)
+
+ for [importer, imports] in iteritems(lf.get_importers()):
+ importer_path = os.path.normpath(os.path.join(os.path.dirname(lf.path), importer))
+ importer_rel_path = os.path.relpath(importer_path, build_path)
+ importers[importer_rel_path] = imports
+
+ self.data["importers"] = importers
+
+ for k in self.IMPORTER_KEYS:
+ self.data.pop(k, None)
+
+ packages = self.data.get("packages", {})
+ for k, v in iteritems(lf.data.get("packages", {})):
+ if k not in packages:
+ packages[k] = v
+ self.data["packages"] = packages
+
+
+def _parse_package_meta(key, meta):
+ """
+ :param key: uniq package key from lockfile
+ :type key: string
+ :param meta: package meta dict from lockfile
+ :type meta: dict
+ :rtype: LockfilePackageMetaInvalidError
+ """
+ try:
+ name, version = _parse_package_key(key)
+ sky_id = _parse_sky_id_from_tarball_url(meta["resolution"]["tarball"])
+ integrity_algorithm, integrity = _parse_package_integrity(meta["resolution"]["integrity"])
+ except KeyError as e:
+ raise TypeError("Invalid package meta for key {}, missing {} key".format(key, e))
+ except LockfilePackageMetaInvalidError as e:
+ raise TypeError("Invalid package meta for key {}, parse error: {}".format(key, e))
+
+ return LockfilePackageMeta(name, version, sky_id, integrity, integrity_algorithm)
+
+
+def _parse_package_key(key):
+ """
+ Returns tuple of scoped package name and version.
+ :param key: package key in format "/({scope}/)?{package_name}/{package_version}(_{peer_dependencies})?"
+ :type key: string
+ :rtype: (str, str)
+ """
+ try:
+ tokens = key.split("/")[1:]
+ version = tokens.pop().split("_", 1)[0]
+
+ if len(tokens) < 1 or len(tokens) > 2:
+ raise TypeError()
+ except (IndexError, TypeError):
+ raise LockfilePackageMetaInvalidError("Invalid package key")
+
+ return ("/".join(tokens), version)
+
+
+def _parse_sky_id_from_tarball_url(tarball_url):
+ """
+ :param tarball_url: tarball url
+ :type tarball_url: string
+ :rtype: string
+ """
+ if tarball_url.startswith("file:"):
+ return ""
+
+ rbtorrent_param = urlparse.parse_qs(urlparse.urlparse(tarball_url).query).get("rbtorrent")
+
+ if rbtorrent_param is None:
+ raise LockfilePackageMetaInvalidError("Missing rbtorrent param in tarball url {}".format(tarball_url))
+
+ return "rbtorrent:{}".format(rbtorrent_param[0])
+
+
+def _parse_package_integrity(integrity):
+ """
+ Returns tuple of algorithm and hash (hex).
+ :param integrity: package integrity in format "{algo}-{base64_of_hash}"
+ :type integrity: string
+ :rtype: (str, str)
+ """
+ algo, hash_b64 = integrity.split("-", 1)
+
+ try:
+ hash_hex = binascii.hexlify(base64.b64decode(hash_b64))
+ except TypeError as e:
+ raise LockfilePackageMetaInvalidError("Invalid package integrity encoding, integrity: {}, error: {}".format(integrity, e))
+
+ return (algo, hash_hex)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
new file mode 100644
index 0000000000..5d41185336
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
@@ -0,0 +1,215 @@
+import os
+import yaml
+
+from six import iteritems
+
+from ..base import BasePackageManager, PackageManagerError
+from ..base.utils import build_pj_path, build_nm_path, build_nm_bundle_path, s_rooted, b_rooted
+from ..base.node_modules_bundler import bundle_node_modules
+from ..base.constants import NODE_MODULES_BUNDLE_FILENAME
+from .lockfile import PnpmLockfile
+from .workspace import PnpmWorkspace
+from .utils import build_lockfile_path, build_ws_config_path
+
+
+class PnpmPackageManager(BasePackageManager):
+ _STORE_NM_PATH = os.path.join(".pnpm", "store")
+ _VSTORE_NM_PATH = os.path.join(".pnpm", "virtual-store")
+ _STORE_VER = "v3"
+
+ @classmethod
+ def load_lockfile(cls, path):
+ """
+ :param path: path to lockfile
+ :type path: str
+ :rtype: PnpmLockfile
+ """
+ return PnpmLockfile.load(path)
+
+ @classmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with lockfile
+ :type dir_path: str
+ :rtype: PnpmLockfile
+ """
+ return cls.load_lockfile(build_lockfile_path(dir_path))
+
+ def create_node_modules(self):
+ """
+ Creates node_modules directory according to the lockfile.
+ """
+ ws = self._prepare_workspace()
+ self._exec_command(
+ [
+ "install",
+ "--offline",
+ "--frozen-lockfile",
+ "--public-hoist-pattern",
+ "",
+ "--store-dir",
+ self._nm_path(self._STORE_NM_PATH),
+ "--virtual-store-dir",
+ self._nm_path(self._VSTORE_NM_PATH),
+ "--no-verify-store-integrity",
+ "--package-import-method",
+ "hardlink",
+ "--ignore-pnpmfile",
+ "--ignore-scripts",
+ "--strict-peer-dependencies",
+ ]
+ )
+ self._fix_stores_in_modules_yaml()
+
+ bundle_node_modules(
+ build_root=self.build_root,
+ node_modules_path=self._nm_path(),
+ peers=ws.get_paths(base_path=self.module_path, ignore_self=True),
+ bundle_path=NODE_MODULES_BUNDLE_FILENAME,
+ )
+
+ def calc_node_modules_inouts(self):
+ """
+ Returns input and output paths for command that creates `node_modules` bundle.
+ Inputs:
+ - source package.json and lockfile,
+ - built package.jsons of all deps,
+ - merged lockfiles and workspace configs of direct non-leave deps,
+ - tarballs.
+ Outputs:
+ - merged lockfile,
+ - generated workspace config,
+ - created node_modules bundle.
+ :rtype: (list of str, list of str)
+ """
+ ins = [
+ s_rooted(build_pj_path(self.module_path)),
+ s_rooted(build_lockfile_path(self.module_path)),
+ ]
+ outs = [
+ b_rooted(build_lockfile_path(self.module_path)),
+ b_rooted(build_ws_config_path(self.module_path)),
+ b_rooted(build_nm_bundle_path(self.module_path)),
+ ]
+
+ # Source lockfiles are used only to get tarballs info.
+ src_lf_paths = [build_lockfile_path(self.sources_path)]
+ pj = self.load_package_json_from_dir(self.sources_path)
+
+ for [dep_src_path, (_, depth)] in iteritems(pj.get_workspace_map(ignore_self=True)):
+ dep_mod_path = dep_src_path[len(self.sources_root) + 1 :]
+ # pnpm requires all package.jsons.
+ ins.append(b_rooted(build_pj_path(dep_mod_path)))
+
+ dep_lf_src_path = build_lockfile_path(dep_src_path)
+ if not os.path.isfile(dep_lf_src_path):
+ # It is ok for leaves.
+ continue
+ src_lf_paths.append(dep_lf_src_path)
+
+ if depth == 1:
+ ins.append(b_rooted(build_ws_config_path(dep_mod_path)))
+ ins.append(b_rooted(build_lockfile_path(dep_mod_path)))
+
+ for pkg in self.extract_packages_meta_from_lockfiles(src_lf_paths):
+ ins.append(b_rooted(self._contrib_tarball_path(pkg)))
+
+ return (ins, outs)
+
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ """
+ :type lf_paths: iterable of BaseLockfile
+ :rtype: iterable of LockfilePackageMeta
+ """
+ tarballs = set()
+
+ for lf_path in lf_paths:
+ try:
+ for pkg in self.load_lockfile(lf_path).get_packages_meta():
+ if pkg.tarball_path not in tarballs:
+ tarballs.add(pkg.tarball_path)
+ yield pkg
+ except Exception as e:
+ raise PackageManagerError("Unable to process lockfile {}: {}".format(lf_path, e))
+
+ def _prepare_workspace(self):
+ """
+ :rtype: PnpmWorkspace
+ """
+ pj = self._build_package_json()
+ ws = PnpmWorkspace(build_ws_config_path(self.build_path))
+ ws.set_from_package_json(pj)
+ dep_paths = ws.get_paths(ignore_self=True)
+ self._build_merged_workspace_config(ws, dep_paths)
+ self._build_merged_lockfile(dep_paths)
+
+ return ws
+
+ def _build_package_json(self):
+ """
+ :rtype: PackageJson
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+
+ if not os.path.exists(self.build_path):
+ os.makedirs(self.build_path, exist_ok=True)
+
+ pj.path = build_pj_path(self.build_path)
+ pj.write()
+
+ return pj
+
+ def _build_merged_lockfile(self, dep_paths):
+ """
+ :type dep_paths: list of str
+ :rtype: PnpmLockfile
+ """
+ lf = self.load_lockfile_from_dir(self.sources_path)
+ # Change to the output path for correct path calcs on merging.
+ lf.path = build_lockfile_path(self.build_path)
+
+ for dep_path in dep_paths:
+ lf_path = build_lockfile_path(dep_path)
+ if os.path.isfile(lf_path):
+ lf.merge(self.load_lockfile(lf_path))
+
+ lf.update_tarball_resolutions(lambda p: self._contrib_tarball_url(p))
+ lf.write()
+
+ def _build_merged_workspace_config(self, ws, dep_paths):
+ """
+ NOTE: This method mutates `ws`.
+ :type ws: PnpmWorkspaceConfig
+ :type dep_paths: list of str
+ """
+ for dep_path in dep_paths:
+ ws_config_path = build_ws_config_path(dep_path)
+ if os.path.isfile(ws_config_path):
+ ws.merge(PnpmWorkspace.load(ws_config_path))
+
+ ws.write()
+
+ def _fix_stores_in_modules_yaml(self):
+ """
+ Ensures that store paths are the same as would be after installing deps in the source dir.
+ This is required to reuse `node_modules` after build.
+ """
+ with open(self._nm_path(".modules.yaml"), "r+") as f:
+ data = yaml.load(f, Loader=yaml.CSafeLoader)
+ # NOTE: pnpm requires absolute store path here.
+ data["storeDir"] = os.path.join(build_nm_path(self.sources_path), self._STORE_NM_PATH, self._STORE_VER)
+ data["virtualStoreDir"] = self._VSTORE_NM_PATH
+ f.seek(0)
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+ f.truncate()
+
+ def _get_default_options(self):
+ return super(PnpmPackageManager, self)._get_default_options() + [
+ "--stream",
+ "--reporter",
+ "append-only",
+ "--no-color",
+ ]
+
+ def _get_debug_log_path(self):
+ return self._nm_path(".pnpm-debug.log")
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py
new file mode 100644
index 0000000000..06315a4992
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/lockfile.py
@@ -0,0 +1,320 @@
+import pytest
+
+from build.plugins.lib.nots.package_manager.pnpm.lockfile import PnpmLockfile
+
+
+def test_lockfile_get_packages_meta_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.name == "@babel/cli"
+ assert pkg.version == "7.6.2"
+ assert pkg.sky_id == "rbtorrent:cb1849da3e4947e56a8f6bde6a1ec42703ddd187"
+ assert pkg.integrity == b"24367e4ff6ebf693df4f696600c272a490d34d31ccf5e3c3fc40f5d13463473255744572f89077891961cd8993b796243601efc561a55159cbb5dbfaaee883ad"
+ assert pkg.integrity_algorithm == "sha512"
+
+
+def test_lockfile_get_packages_empty():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {}
+
+ assert len(list(lf.get_packages_meta())) == 0
+
+
+def test_package_meta_invalid_key():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "in/valid": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key in/valid, parse error: Invalid package key"
+
+
+def test_package_meta_missing_resolution():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'resolution' key"
+
+
+def test_package_meta_missing_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {},
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'tarball' key"
+
+
+def test_package_meta_missing_rbtorrent():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {
+ "tarball": "valid-1.2.3.tgz",
+ },
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, parse error: Missing rbtorrent param in tarball url valid-1.2.3.tgz"
+
+
+def test_lockfile_meta_file_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "file:/some/abs/path.tgz",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.name == "@babel/cli"
+ assert pkg.version == "7.6.2"
+ assert pkg.sky_id == ""
+
+
+def test_lockfile_update_tarball_resolutions_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ lf.update_tarball_resolutions(lambda p: p.name)
+
+ assert lf.data["packages"]["/@babel/cli/7.6.2_@babel+core@7.6.2"]["resolution"]["tarball"] == "@babel/cli"
+
+
+def test_lockfile_merge():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf3 = PnpmLockfile(path="/another/baz/pnpm-lock.yaml")
+ lf3.data = {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+ lf4 = PnpmLockfile(path="/another/quux/pnpm-lock.yaml")
+ lf4.data = {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ }
+
+ lf1.merge(lf2)
+ lf1.merge(lf3)
+ lf1.merge(lf4)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ },
+ "../another/baz": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../another/qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ "../another/quux": {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+
+def test_lockfile_merge_dont_overrides_packages():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {
+ "overriden": True,
+ },
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf1.merge(lf2)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ },
+ }
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py
new file mode 100644
index 0000000000..5d11dd9e5d
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/workspace.py
@@ -0,0 +1,68 @@
+from build.plugins.lib.nots.package_manager.base import PackageJson
+from build.plugins.lib.nots.package_manager.pnpm.workspace import PnpmWorkspace
+
+
+def test_workspace_get_paths():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_get_paths_with_custom_base_path_without_self():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths(base_path="some/custom/dir", ignore_self=True)) == [
+ "some/another/baz",
+ "some/custom/bar",
+ ]
+
+
+def test_workspace_set_from_package_json():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ pj = PackageJson(path="/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@a/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@a/baz": "workspace:../../another/baz",
+ },
+ "peerDependencies": {
+ "@a/qux": "workspace:../../another/qux",
+ },
+ "optionalDependencies": {
+ "@a/quux": "workspace:../../another/quux",
+ }
+ }
+
+ ws.set_from_package_json(pj)
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/another/quux",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_merge():
+ ws1 = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws1.packages = set([".", "../bar", "../../another/baz"])
+ ws2 = PnpmWorkspace(path="/another/baz/pnpm-workspace.yaml")
+ ws2.packages = set([".", "../qux"])
+
+ ws1.merge(ws2)
+
+ assert sorted(ws1.get_paths()) == [
+ "/another/baz",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
new file mode 100644
index 0000000000..44877dfc1b
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
@@ -0,0 +1,15 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ lockfile.py
+ workspace.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/pnpm/utils.py b/build/plugins/lib/nots/package_manager/pnpm/utils.py
new file mode 100644
index 0000000000..1fa4291b9d
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/utils.py
@@ -0,0 +1,11 @@
+import os
+
+from .constants import PNPM_LOCKFILE_FILENAME, PNPM_WS_FILENAME
+
+
+def build_lockfile_path(p):
+ return os.path.join(p, PNPM_LOCKFILE_FILENAME)
+
+
+def build_ws_config_path(p):
+ return os.path.join(p, PNPM_WS_FILENAME)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
new file mode 100644
index 0000000000..9df0d2de0c
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
@@ -0,0 +1,75 @@
+import os
+import yaml
+
+
+class PnpmWorkspace(object):
+ @classmethod
+ def load(cls, path):
+ ws = cls(path)
+ ws.read()
+
+ return ws
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ # NOTE: pnpm requires relative workspace paths.
+ self.packages = set()
+
+ def read(self):
+ with open(self.path) as f:
+ self.packages = set(yaml.load(f, Loader=yaml.CSafeLoader).get("packages", []))
+
+ def write(self, path=None):
+ if not path:
+ path = self.path
+
+ with open(path, "w") as f:
+ data = {
+ "packages": list(self.packages),
+ }
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+
+ def get_paths(self, base_path=None, ignore_self=False):
+ """
+ Returns absolute paths of the workspace packages.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :param ignore_self: whether path of the current module will be excluded (if present)
+ :type ignore_self: bool
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(base_path, pkg_path))
+ for pkg_path in self.packages if not ignore_self or pkg_path != "."]
+
+ def set_from_package_json(self, package_json):
+ """
+ Sets packages to "workspace" deps from given package.json.
+ :param package_json: package.json of workspace
+ :type package_json: PackageJson
+ """
+ if os.path.dirname(package_json.path) != os.path.dirname(self.path):
+ raise TypeError(
+ "package.json should be in workspace directory {}, given: {}".format(os.path.dirname(self.path), package_json.path))
+
+ self.packages = set(path for _, path in package_json.get_workspace_dep_spec_paths())
+ # Add relative path to self.
+ self.packages.add(".")
+
+ def merge(self, ws):
+ """
+ Adds `ws`'s packages to the workspace.
+ :param ws: workspace to merge
+ :type ws: PnpmWorkspace
+ """
+ dir_path = os.path.dirname(self.path)
+ ws_dir_path = os.path.dirname(ws.path)
+
+ for p_rel_path in ws.packages:
+ p_path = os.path.normpath(os.path.join(ws_dir_path, p_rel_path))
+ self.packages.add(os.path.relpath(p_path, dir_path))
diff --git a/build/plugins/lib/nots/package_manager/pnpm/ya.make b/build/plugins/lib/nots/package_manager/pnpm/ya.make
new file mode 100644
index 0000000000..f57ae4a2ba
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/ya.make
@@ -0,0 +1,24 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ package_manager.py
+ workspace.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ contrib/python/PyYAML
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/ya.make b/build/plugins/lib/nots/package_manager/ya.make
new file mode 100644
index 0000000000..3ac1ea9103
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/semver/__init__.py b/build/plugins/lib/nots/semver/__init__.py
new file mode 100644
index 0000000000..05dd8e9b5a
--- /dev/null
+++ b/build/plugins/lib/nots/semver/__init__.py
@@ -0,0 +1,5 @@
+from .semver import Version, Operator, VersionRange
+
+__all__ = [
+ "Version", "Operator", "VersionRange",
+]
diff --git a/build/plugins/lib/nots/semver/semver.py b/build/plugins/lib/nots/semver/semver.py
new file mode 100644
index 0000000000..d7dcc26c2c
--- /dev/null
+++ b/build/plugins/lib/nots/semver/semver.py
@@ -0,0 +1,228 @@
+import re
+
+
+class Version:
+ """
+ This class is intended to provide utility methods to work with semver ranges.
+ Right now it is limited to the simplest case: a ">=" operator followed by an exact version with no prerelease or build specification.
+ Example: ">= 1.2.3"
+ """
+
+ @classmethod
+ def from_str(cls, input):
+ """
+ :param str input: save exact formatted version e.g. 1.2.3
+ :rtype: Version
+ :raises: ValueError
+ """
+ parts = input.strip().split(".", 2)
+ major = int(parts[0])
+ minor = int(parts[1])
+ patch = int(parts[2])
+
+ return cls(major, minor, patch)
+
+ STABLE_VERSION_RE = re.compile(r'^\d+\.\d+\.\d+$')
+
+ @classmethod
+ def is_stable(cls, v):
+ """
+ Verifies that the version is in a supported format.
+
+ :param v:string with the version
+ :return: bool
+ """
+ return cls.STABLE_VERSION_RE.match(v) is not None
+
+ @classmethod
+ def cmp(cls, a, b):
+ """
+ Compare two versions. Should be used with "cmp_to_key" wrapper in sorted(), min(), max()...
+
+ For example:
+ sorted(["1.2.3", "2.4.2", "1.2.7"], key=cmp_to_key(Version.cmp))
+
+ :param a:string with version or Version instance
+ :param b:string with version or Version instance
+ :return: int
+ :raises: ValueError
+ """
+ a_version = a if isinstance(a, cls) else cls.from_str(a)
+ b_version = b if isinstance(b, cls) else cls.from_str(b)
+
+ if a_version > b_version:
+ return 1
+ elif a_version < b_version:
+ return -1
+ else:
+ return 0
+
+ __slots__ = ("_values")
+
+ def __init__(self, major, minor, patch):
+ """
+ :param int major
+ :param int minor
+ :param int patch
+ :raises ValueError
+ """
+ version_parts = {
+ "major": major,
+ "minor": minor,
+ "patch": patch,
+ }
+
+ for name, value in version_parts.items():
+ value = int(value)
+ version_parts[name] = value
+ if value < 0:
+ raise ValueError(
+ "{!r} is negative. A version can only be positive.".format(name)
+ )
+
+ self._values = (version_parts["major"], version_parts["minor"], version_parts["patch"])
+
+ def __str__(self):
+ return "{}.{}.{}".format(self._values[0], self._values[1], self._values[2])
+
+ def __repr__(self):
+ return '<Version({})>'.format(self)
+
+ def __eq__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() == other.as_tuple()
+
+ def __ne__(self, other):
+ return self.as_tuple() != other.as_tuple()
+
+ def __gt__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() > other.as_tuple()
+
+ def __ge__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() >= other.as_tuple()
+
+ def __lt__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() < other.as_tuple()
+
+ def __le__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() <= other.as_tuple()
+
+ @property
+ def major(self):
+ """The major part of the version (read-only)."""
+ return self._values[0]
+
+ @major.setter
+ def major(self, value):
+ raise AttributeError("Attribute 'major' is readonly")
+
+ @property
+ def minor(self):
+ """The minor part of the version (read-only)."""
+ return self._values[1]
+
+ @minor.setter
+ def minor(self, value):
+ raise AttributeError("Attribute 'minor' is readonly")
+
+ @property
+ def patch(self):
+ """The patch part of the version (read-only)."""
+ return self._values[2]
+
+ @patch.setter
+ def patch(self, value):
+ raise AttributeError("Attribute 'patch' is readonly")
+
+ def as_tuple(self):
+ """
+ :rtype: tuple
+ """
+ return self._values
+
+
+class Operator:
+ EQ = "="
+ GT = ">"
+ GE = ">="
+ LT = "<"
+ LE = "<="
+
+
+class VersionRange:
+ @classmethod
+ def from_str(cls, input):
+ """
+ :param str input
+ :rtype: VersionRange
+ :raises: ValueError
+ """
+ parts = input.strip().split(Operator.GE) # the only supported range operator at the moment
+
+ if len(parts) != 2 or parts[0] != "":
+ raise ValueError("Unsupported version range: '{}'. Currently we only support ranges formatted like so: '>= 1.2.3'".format(input))
+
+ version = Version.from_str(parts[1])
+
+ return cls(Operator.GE, version)
+
+ __slots__ = ("_operator", "_version")
+
+ def __init__(self, operator, version):
+ """
+ :param str operator
+ :raises: ValueError
+ """
+ if operator != Operator.GE:
+ raise ValueError("Unsupported range operator '{}'".format(operator))
+
+ self._operator = operator
+ self._version = version
+
+ @property
+ def operator(self):
+ """The comparison operator to be used (read-only)."""
+ return self._operator
+
+ @operator.setter
+ def operator(self, value):
+ raise AttributeError("Attribute 'operator' is readonly")
+
+ @property
+ def version(self):
+ """Version to be used with the operator (read-only)."""
+ return self._version
+
+ @version.setter
+ def version(self, value):
+ raise AttributeError("Attribute 'version' is readonly")
+
+ def is_satisfied_by(self, version):
+ """
+ :param Version version
+ :rtype: bool
+ :raises: ValueError
+ """
+ if self._operator != Operator.GE:
+ raise ValueError("Unsupported operator '{}'".format(self._operator))
+
+ return version >= self._version
diff --git a/build/plugins/lib/nots/semver/tests/test_version.py b/build/plugins/lib/nots/semver/tests/test_version.py
new file mode 100644
index 0000000000..0fa49ea15b
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/test_version.py
@@ -0,0 +1,242 @@
+from functools import cmp_to_key
+
+from build.plugins.lib.nots.semver import Version
+
+
+def test_from_str():
+ # arrange
+ version_str = "1.2.3"
+
+ # act
+ version = Version.from_str(version_str)
+
+ # assert
+ assert version.major == 1
+ assert version.minor == 2
+ assert version.patch == 3
+
+
+def test_from_str_bad_version():
+ # arrange
+ version_str = "best version imaginable"
+ error = None
+
+ # act
+ try:
+ Version.from_str(version_str)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert error is not None
+
+
+def test_is_stable_true():
+ # arrange
+ version_str = "1.2.3"
+
+ # act + assert
+ assert Version.is_stable(version_str)
+
+
+def test_is_stable_false():
+ # arrange
+ version_str = "1.2.3-beta1"
+
+ # act + assert
+ assert not Version.is_stable(version_str)
+
+
+def test_is_stable_incorrect():
+ # arrange
+ version_str = "v1.2.3"
+
+ # act + assert
+ assert not Version.is_stable(version_str)
+
+
+def test_cmp_lt():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.5")
+
+ # act + assert
+ assert Version.cmp(a, b) == -1
+
+
+def test_cmp_gt():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.2")
+
+ # act + assert
+ assert Version.cmp(a, b) == 1
+
+
+def test_cmp_eq():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert Version.cmp(a, b) == 0
+
+
+def test_cmp_lt_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.5"
+
+ # act + assert
+ assert Version.cmp(a, b) == -1
+
+
+def test_cmp_gt_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.2"
+
+ # act + assert
+ assert Version.cmp(a, b) == 1
+
+
+def test_cmp_eq_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.3"
+
+ # act + assert
+ assert Version.cmp(a, b) == 0
+
+
+def test_cmp_usage_in_sorted_asc():
+ # arrange
+ unsorted = ["1.2.3", "2.4.2", "1.2.7"]
+
+ # act + assert
+ assert sorted(unsorted, key=cmp_to_key(Version.cmp)) == ["1.2.3", "1.2.7", "2.4.2"]
+
+
+def test_cmp_usage_in_sorted_desc():
+ # arrange
+ unsorted = ["1.2.3", "2.4.2", "1.2.7"]
+
+ # act + assert
+ assert sorted(unsorted, key=cmp_to_key(Version.cmp), reverse=True) == ["2.4.2", "1.2.7", "1.2.3"]
+
+
+def test_init_negative_numbers():
+ # arrange
+ major = 1
+ minor = -2
+ patch = 3
+
+ error = None
+
+ # act
+ try:
+ Version(major, minor, patch)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert isinstance(error, ValueError)
+ assert str(error) == "'minor' is negative. A version can only be positive."
+
+
+def test_eq():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a == version_b
+
+
+def test_eq_negative():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert not version_a == version_b
+
+
+def test_ne():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a != version_b
+
+
+def test_ne_negative():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert not version_a != version_b
+
+
+def test_gt():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a > version_b
+
+
+def test_ge_equals():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a >= version_b
+
+
+def test_ge_exceeds():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a >= version_b
+
+
+def test_lt():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert version_a < version_b
+
+
+def test_le_equals():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a <= version_b
+
+
+def test_le_is_less():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert version_a <= version_b
+
+
+def test_to_tuple():
+ # arrange
+ version = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version.as_tuple() == (1, 2, 3)
diff --git a/build/plugins/lib/nots/semver/tests/test_version_range.py b/build/plugins/lib/nots/semver/tests/test_version_range.py
new file mode 100644
index 0000000000..b2a5e556b5
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/test_version_range.py
@@ -0,0 +1,69 @@
+from build.plugins.lib.nots.semver import Version, Operator, VersionRange
+
+
+def test_from_str():
+ # arrange
+ range_str = ">= 1.2.3"
+
+ # act
+ range = VersionRange.from_str(range_str)
+
+ # assert
+ assert isinstance(range, VersionRange)
+ assert range.operator == Operator.GE
+
+
+def test_from_str_no_operator():
+ # arrange
+ range_str = r"¯\_(ツ)_/¯"
+ error = None
+
+ # act
+ try:
+ VersionRange.from_str(range_str)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert isinstance(error, ValueError)
+ assert str(error) == "Unsupported version range: '{}'. Currently we only support ranges formatted like so: '>= 1.2.3'".format(range_str)
+
+
+def test_init():
+ # arrange
+ operator = Operator.GE
+ version = Version.from_str("1.2.3")
+
+ # act
+ range = VersionRange(operator, version)
+
+ # assert
+ assert range.operator == Operator.GE
+ assert range.version == Version(1, 2, 3)
+
+
+def test_is_satisfied_by_starts_with():
+ # arrange
+ version = Version.from_str("1.2.3")
+ range = VersionRange.from_str(">= 1.2.3")
+
+ # act + assert
+ assert range.is_satisfied_by(version)
+
+
+def test_is_satisfied_by_includes():
+ # arrange
+ version = Version.from_str("5.8.2")
+ range = VersionRange.from_str(">= 1.2.3")
+
+ # act + assert
+ assert range.is_satisfied_by(version)
+
+
+def test_is_satisfied_by_not_includes():
+ # arrange
+ version = Version.from_str("1.2.2")
+ range = VersionRange.from_str(">= 1.2.3")
+
+ # act + assert
+ assert not range.is_satisfied_by(version)
diff --git a/build/plugins/lib/nots/semver/tests/ya.make b/build/plugins/lib/nots/semver/tests/ya.make
new file mode 100644
index 0000000000..b7605505f3
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/ya.make
@@ -0,0 +1,14 @@
+PY3TEST()
+
+OWNER(g:frontend-build-platform)
+
+PEERDIR(
+ build/plugins/lib/nots/semver
+)
+
+TEST_SRCS(
+ test_version_range.py
+ test_version.py
+)
+
+END()
diff --git a/build/plugins/lib/nots/semver/ya.make b/build/plugins/lib/nots/semver/ya.make
new file mode 100644
index 0000000000..7d2be228f2
--- /dev/null
+++ b/build/plugins/lib/nots/semver/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ semver.py
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/typescript/__init__.py b/build/plugins/lib/nots/typescript/__init__.py
new file mode 100644
index 0000000000..95f458ac5f
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/__init__.py
@@ -0,0 +1,9 @@
+from .ts_config import TsConfig
+from .ts_errors import TsError, TsValidationError
+
+
+__all__ = [
+ "TsConfig",
+ "TsError",
+ "TsValidationError",
+]
diff --git a/build/plugins/lib/nots/typescript/tests/ts_config.py b/build/plugins/lib/nots/typescript/tests/ts_config.py
new file mode 100644
index 0000000000..4b8fd675b3
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/ts_config.py
@@ -0,0 +1,86 @@
+import pytest
+
+from build.plugins.lib.nots.typescript import TsConfig, TsValidationError
+
+
+def test_ts_config_validate_valid():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "outDir": "./build",
+ },
+ }
+
+ cfg.validate()
+
+
+def test_ts_config_validate_empty():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ ]
+
+
+def test_ts_config_validate_invalid_common():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "preserveSymlinks": True,
+ "rootDirs": [],
+ "outFile": "./foo.js",
+ },
+ "references": [],
+ "files": [],
+ "include": [],
+ "exclude": [],
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ "'outFile' option is not supported",
+ "'preserveSymlinks' option is not supported due to pnpm limitations",
+ "'rootDirs' option is not supported, relative imports should have single root",
+ "'files' option is not supported, use 'include'",
+ "composite builds are not supported, use peerdirs in ya.make instead of 'references' option",
+ ]
+
+
+def test_ts_config_validate_invalid_subdirs():
+ cfg = TsConfig(path="/foo/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "/bar/src",
+ "outDir": "../bar/build",
+ },
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate()
+
+ assert e.value.errors == [
+ "'outDir' should be a subdirectory of the module",
+ ]
+
+
+def test_ts_config_compiler_options():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ assert cfg.compiler_option("invalid") is None
+
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "src",
+ },
+ }
+
+ assert cfg.compiler_option("rootDir") == "src"
diff --git a/build/plugins/lib/nots/typescript/tests/ya.make b/build/plugins/lib/nots/typescript/tests/ya.make
new file mode 100644
index 0000000000..44798138bc
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/ya.make
@@ -0,0 +1,13 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ ts_config.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/nots/typescript/ts_config.py b/build/plugins/lib/nots/typescript/ts_config.py
new file mode 100644
index 0000000000..e3855b5f08
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_config.py
@@ -0,0 +1,300 @@
+import copy
+import os
+import json
+
+from .ts_errors import TsError, TsValidationError
+
+from ..package_manager.base import utils
+
+DEFAULT_TS_CONFIG_FILE = "tsconfig.json"
+
+
+def merge_dicts(d1, d2):
+ """
+ Merges two dicts recursively assuming that both have similar structure.
+ If d1.x.y.z has different type than d2.x.y.z then d2 will override d1 and result value res.x.y.z == d2.x.y.z.
+ If corresponding values are lists then the result will have a sum of those lists.
+ """
+ if isinstance(d1, dict) and isinstance(d2, dict):
+ for k in d2:
+ d1[k] = merge_dicts(d1[k], d2[k]) if k in d1 else d2[k]
+ else:
+ if isinstance(d1, list) and isinstance(d2, list):
+ return d1 + d2
+ else:
+ return d2
+ return d1
+
+
+class TsConfig(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: tsconfig.json path
+ :type path: str
+ :rtype: TsConfig
+ """
+ tsconfig = cls(path)
+ tsconfig.read()
+
+ return tsconfig
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = {}
+
+ def read(self):
+ try:
+ with open(self.path) as f:
+ self.data = json.load(f)
+ except Exception as e:
+ raise TsError("Failed to read tsconfig {}: {}".format(self.path, e))
+
+ def merge(self, rel_path, base_tsconfig):
+ """
+ :param rel_path: relative path to the configuration file we are merging in.
+ It is required to set the relative paths correctly.
+ :type rel_path: str
+ :param base_tsconfig: base TsConfig we are merging with our TsConfig instance
+ :type base_tsconfig: dict
+ """
+ if not base_tsconfig.data:
+ return
+
+ def relative_path(p):
+ return os.path.normpath(os.path.join(rel_path, p))
+
+ base_config_data = copy.deepcopy(base_tsconfig.data)
+
+ parameter_section_labels = ["compilerOptions", "typeAcquisition", "watchOptions"]
+ for opt_label in parameter_section_labels:
+ base_options = base_config_data.get(opt_label)
+ if not base_options:
+ continue
+
+ new_options = self.data.get(opt_label)
+ for key in base_options:
+ val = base_options[key]
+
+ # lists of paths
+ if key in ["extends", "outDir", "rootDir", "baseUrl", "include"]:
+ val = relative_path(val)
+
+ # path string
+ elif key in ["rootDirs", "excludeDirectories", "excludeFiles"]:
+ val = map(relative_path, val)
+
+ # dicts having paths as values
+ elif key in ["paths"]:
+ new_paths = new_options.get(key)
+ val = map(relative_path, val) + (new_paths if new_paths else [])
+
+ base_options[key] = val
+
+ if new_options and base_options:
+ base_options.update(new_options)
+ self.data[opt_label] = base_options
+
+ base_config_data.update(self.data)
+ self.data = base_config_data
+
+ def inline_extend(self, dep_paths):
+ """
+ Merges the tsconfig parameters from configuration file referred by "extends" if any.
+ Relative paths are adjusted, current parameter values are prioritized higer than
+ those coming from extension file (according to TSC mergin rules).
+ Returns list of file paths for config files merged into the current configuration
+ :param dep_paths: dict of dependency names to their paths
+ :type dep_paths: dict
+ :rtype: list of str
+ """
+ ext_value = self.data.get("extends")
+ if not ext_value:
+ return []
+
+ if ext_value.startswith("."):
+ base_config_path = ext_value
+
+ else:
+ dep_name = utils.extract_package_name_from_path(ext_value)
+ # the rest part is the ext config path
+ file_path_start = len(dep_name) + 1
+ file_path = ext_value[file_path_start:]
+ dep_path = dep_paths.get(dep_name)
+ if dep_path is None:
+ raise Exception(
+ "referenceing from {}, data: {}\n: Dependency '{}' not found in dep_paths: {}".format(
+ self.path, str(self.data), dep_name, dep_paths
+ )
+ )
+ base_config_path = os.path.join(dep_path, file_path)
+
+ rel_path = os.path.dirname(base_config_path)
+ tsconfig_curdir_path = os.path.join(os.path.dirname(self.path), base_config_path)
+ if os.path.isdir(tsconfig_curdir_path):
+ base_config_path = os.path.join(base_config_path, DEFAULT_TS_CONFIG_FILE)
+
+ # processing the base file recursively
+ base_config = TsConfig.load(os.path.join(os.path.dirname(self.path), base_config_path))
+ paths = [base_config_path] + base_config.inline_extend(dep_paths)
+
+ self.merge(rel_path, base_config)
+ del self.data["extends"]
+
+ return paths
+
+ def get_or_create_compiler_options(self):
+ """
+ Returns ref to the "compilerOptions" dict.
+ :rtype: dict
+ """
+ opts = self.data.get("compilerOptions")
+ if opts is None:
+ opts = {}
+ self.data["compilerOptions"] = opts
+
+ return opts
+
+ def prepend_include(self, value):
+ """
+ Prepends `value` to `include` list
+ :param value: value to prepend
+ :type value: str
+ """
+ includeList = self.data.get("include")
+ self.data["include"] = [value] + includeList
+
+ def compiler_option(self, name, default=None):
+ """
+ :param name: option key
+ :type name: str
+ :param default: default value
+ :type default: mixed
+ :rtype: mixed
+ """
+ return self.get_or_create_compiler_options().get(name, default)
+
+ def add_to_compiler_option(self, name, add_value):
+ """
+ Merges the existing value with add_value for the option with label=name.
+ Merge is done recursively if the value is of a dict instance.
+ :param name: option key
+ :type name: str
+ :param value: option value to set
+ :type value: mixed
+ """
+ default_value = {} if isinstance(add_value, dict) else []
+ opts = self.get_or_create_compiler_options()
+ opts[name] = merge_dicts(opts.get(name, default_value), add_value)
+
+ def inject_plugin(self, plugin):
+ """
+ :param plugin: plugin dict (ts-patch compatible, see https://github.com/nonara/ts-patch)
+ :type plugin: dict of str
+ """
+ opts = self.get_or_create_compiler_options()
+ if not opts.get("plugins"):
+ opts["plugins"] = []
+ opts["plugins"].append(plugin)
+
+ def validate(self):
+ """
+ Checks whether the config is compatible with current toolchain.
+ """
+ opts = self.get_or_create_compiler_options()
+ errors = []
+ root_dir = opts.get("rootDir")
+ out_dir = opts.get("outDir")
+ config_dir = os.path.dirname(self.path)
+
+ def is_mod_subdir(p):
+ return not os.path.isabs(p) and os.path.normpath(os.path.join(config_dir, p)).startswith(config_dir)
+
+ if root_dir is None:
+ errors.append("'rootDir' option is required")
+
+ if out_dir is None:
+ errors.append("'outDir' option is required")
+ elif not is_mod_subdir(out_dir):
+ errors.append("'outDir' should be a subdirectory of the module")
+
+ if opts.get("outFile") is not None:
+ errors.append("'outFile' option is not supported")
+
+ if opts.get("preserveSymlinks"):
+ errors.append("'preserveSymlinks' option is not supported due to pnpm limitations")
+
+ if opts.get("rootDirs") is not None:
+ errors.append("'rootDirs' option is not supported, relative imports should have single root")
+
+ if self.data.get("files") is not None:
+ errors.append("'files' option is not supported, use 'include'")
+
+ if self.data.get("references") is not None:
+ errors.append("composite builds are not supported, use peerdirs in ya.make instead of 'references' option")
+
+ if len(errors):
+ raise TsValidationError(self.path, errors)
+
+ def transform_paths(self, build_path, sources_path, package_rel_path, nodejs_bin_path):
+ """
+ Updates config with correct abs paths.
+ All source files/dirs will be mapped to `sources_path`, output files/dirs will be mapped to `build_path`.
+ :param build_path: module's build root
+ :type build_path: str
+ :param sources_path: module's source root
+ :type sources_path: str
+ :param package_rel_path: module's rel path to package root
+ :type package_rel_path: str
+ """
+ opts = self.get_or_create_compiler_options()
+
+ def sources_path_rel(x):
+ return os.path.normpath(os.path.join(sources_path, x))
+
+ def build_path_rel(x):
+ return os.path.normpath(os.path.join(build_path, x))
+
+ root_dir = opts["rootDir"]
+ out_dir = opts["outDir"]
+
+ opts["rootDir"] = sources_path_rel(root_dir)
+ opts["outDir"] = build_path_rel(out_dir)
+
+ if opts.get("typeRoots"):
+ opts["typeRoots"] = list(map(sources_path_rel, opts["typeRoots"])) + list(
+ map(build_path_rel, opts["typeRoots"])
+ )
+
+ opts["baseUrl"] = os.path.normpath(os.path.join(package_rel_path, "node_modules"))
+
+ include_dir_list = self.data.get("include")
+ if include_dir_list:
+ self.data["include"] = list(map(sources_path_rel, include_dir_list))
+
+ exclude_dir_list = self.data.get("exclude")
+ if exclude_dir_list:
+ self.data["exclude"] = list(map(sources_path_rel, exclude_dir_list))
+
+ if opts.get("sourceMap"):
+ opts["sourceRoot"] = os.path.relpath(root_dir, out_dir)
+
+ opts["skipLibCheck"] = True
+
+ node_types_path = os.path.join(os.path.dirname(nodejs_bin_path), "node_modules", "@types", "node")
+ # See: https://st.yandex-team.ru/FBP-47#62b4750775525b18f08205c7
+ self.add_to_compiler_option("paths", {"*": ["*", "./@types/*", node_types_path]})
+
+ def write(self, path=None, indent=None):
+ """
+ :param path: tsconfig path, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ json.dump(self.data, f, indent=indent)
diff --git a/build/plugins/lib/nots/typescript/ts_errors.py b/build/plugins/lib/nots/typescript/ts_errors.py
new file mode 100644
index 0000000000..105851d9ec
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_errors.py
@@ -0,0 +1,10 @@
+class TsError(RuntimeError):
+ pass
+
+
+class TsValidationError(TsError):
+ def __init__(self, path, errors):
+ self.path = path
+ self.errors = errors
+
+ super(TsValidationError, self).__init__("Invalid tsconfig {}:\n{}".format(path, "\n".join(errors)))
diff --git a/build/plugins/lib/nots/typescript/ya.make b/build/plugins/lib/nots/typescript/ya.make
new file mode 100644
index 0000000000..8847f9bbd3
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ya.make
@@ -0,0 +1,19 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ ts_errors.py
+ ts_config.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/ya.make b/build/plugins/lib/nots/ya.make
new file mode 100644
index 0000000000..b24c534033
--- /dev/null
+++ b/build/plugins/lib/nots/ya.make
@@ -0,0 +1,15 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+ build/plugins/lib/nots/semver
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/test_const/__init__.py b/build/plugins/lib/test_const/__init__.py
new file mode 100644
index 0000000000..a3229bad25
--- /dev/null
+++ b/build/plugins/lib/test_const/__init__.py
@@ -0,0 +1,522 @@
+# coding: utf-8
+import re
+
+
+RESTART_TEST_INDICATOR = '##restart-test##'
+INFRASTRUCTURE_ERROR_INDICATOR = '##infrastructure-error##'
+
+RESTART_TEST_INDICATORS = [
+ RESTART_TEST_INDICATOR,
+ "network error",
+]
+
+UID_PREFIX_DELIMITER = '-'
+
+# testing
+BIN_DIRECTORY = 'bin'
+CANON_DATA_DIR_NAME = "canondata"
+CANON_RESULT_FILE_NAME = "result.json"
+CANONIZATION_RESULT_FILE_NAME = "canonization_res.json"
+COMMON_CONTEXT_FILE_NAME = "common_test.context"
+CONSOLE_SNIPPET_LIMIT = 5000
+FAKE_OUTPUT_EXTS = frozenset([".mf", ".fake", ".cpf", ".cpsf"])
+LIST_NODE_LOG_FILE = "test_list.log"
+LIST_NODE_RESULT_FILE = "test_list.json"
+LIST_RESULT_NODE_LOG_FILE = "list_result.log"
+LIST_TRACE_FILE_NAME = "ytest_list.report.trace"
+MAX_FILE_SIZE = 1024 * 1024 * 2 # 2 MB
+MAX_TEST_RESTART_COUNT = 3
+NO_LISTED_TESTS = "NO_LISTED_TESTS"
+REPORT_SNIPPET_LIMIT = 12000
+SANITIZER_ERROR_RC = 100
+SUITE_CONTEXT_FILE_NAME = "test.context"
+TEST_LIST_FILE = "test_names_list.json"
+TEST_SUBTEST_SEPARATOR = '::'
+TESTING_OUT_DIR_NAME = "testing_out_stuff"
+TESTING_OUT_RAM_DRIVE_DIR_NAME = "ram_drive_output"
+TESTING_OUT_TAR_NAME = TESTING_OUT_DIR_NAME + ".tar.zstd"
+TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+TRACE_FILE_NAME = "ytest.report.trace"
+TRUNCATING_IGNORE_FILE_LIST = {TRACE_FILE_NAME, SUITE_CONTEXT_FILE_NAME, "run_test.log"}
+YT_RUN_TEST_DIR_NAME = "yt_run_test"
+YT_RUN_TEST_TAR_NAME = "yt_run_test.tar"
+COVERAGE_CFLAGS = ["-fprofile-instr-generate", "-fcoverage-mapping", "-DCLANG_COVERAGE"]
+COVERAGE_LDFLAGS = ["-fprofile-instr-generate", "-fcoverage-mapping"]
+
+MANDATORY_ENV_VAR_NAME = 'YA_MANDATORY_ENV_VARS'
+
+BUILD_FLAGS_ALLOWED_IN_CONTEXT = {
+ 'AUTOCHECK',
+ # Required for local test runs
+ 'TESTS_REQUESTED',
+ 'USE_ARCADIA_PYTHON',
+ 'USE_SYSTEM_PYTHON',
+}
+
+STYLE_TEST_TYPES = [
+ "classpath.clash",
+ "clang_tidy",
+ "eslint",
+ "flake8.py2",
+ "flake8.py3",
+ "gofmt",
+ "govet",
+ "java.style",
+ "ktlint",
+ "custom_lint",
+]
+
+REGULAR_TEST_TYPES = [
+ "benchmark",
+ "boost_test",
+ "exectest",
+ "fuzz",
+ "g_benchmark",
+ "go_bench",
+ "go_test",
+ "gtest",
+ "hermione",
+ "java",
+ "jest",
+ "py2test",
+ "py3test",
+ "pytest",
+ "unittest",
+]
+
+TEST_NODE_OUTPUT_RESULTS = [TESTING_OUT_TAR_NAME, YT_RUN_TEST_TAR_NAME]
+
+# kvm
+DEFAULT_RAM_REQUIREMENTS_FOR_KVM = 4
+MAX_RAM_REQUIREMENTS_FOR_KVM = 16
+
+# distbuild
+DISTBUILD_STATUS_REPORT_ENV_NAME = 'NODE_EXTENDED_STATUS_FILE_PATH'
+DEFAULT_TEST_NODE_TIMEOUT = 15 * 60
+TEST_NODE_FINISHING_TIME = 5 * 60
+
+# coverage
+COVERAGE_FUNCTION_ENTRIES_LIMIT = 2
+COVERAGE_PYTHON_EXTS = (".py", ".pyx", ".pxi", ".pxd")
+
+COVERAGE_RESOLVED_FILE_NAME_PATTERN = "coverage_resolved.{}.json"
+CPP_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("cpp")
+GO_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("go")
+JAVA_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("java")
+NLG_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("nlg")
+PYTHON2_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("py2")
+PYTHON3_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("py3")
+TS_COVERAGE_RESOLVED_FILE_NAME = COVERAGE_RESOLVED_FILE_NAME_PATTERN.format("ts")
+
+COVERAGE_CLANG_ENV_NAME = 'LLVM_PROFILE_FILE'
+COVERAGE_GCOV_ENV_NAME = 'GCOV_PREFIX'
+COVERAGE_GO_ENV_NAME = 'GO_COVERAGE_PREFIX'
+COVERAGE_PYTHON_ENV_NAME = 'PYTHON_COVERAGE_PREFIX'
+COVERAGE_TS_ENV_NAME = 'TS_COVERAGE_PREFIX'
+COVERAGE_NLG_ENV_NAME = 'NLG_COVERAGE_FILENAME'
+COVERAGE_ENV_VARS = (
+ COVERAGE_CLANG_ENV_NAME,
+ COVERAGE_GCOV_ENV_NAME,
+ COVERAGE_GO_ENV_NAME,
+ COVERAGE_NLG_ENV_NAME,
+ COVERAGE_PYTHON_ENV_NAME,
+ COVERAGE_TS_ENV_NAME,
+)
+PYTHON_COVERAGE_PREFIX_FILTER_ENV_NAME = 'PYTHON_COVERAGE_PREFIX_FILTER'
+PYTHON_COVERAGE_EXCLUDE_REGEXP_ENV_NAME = 'PYTHON_COVERAGE_EXCLUDE_REGEXP'
+
+CLANG_COVERAGE_TEST_TYPES = (
+ "boost_test",
+ "coverage_extractor",
+ "exectest",
+ "gtest",
+ # java tests might use shared libraries
+ "java",
+ "py2test",
+ "py3test",
+ "pytest",
+ "unittest",
+)
+COVERAGE_TABLE_CHUNKS = 20
+COVERAGE_TESTS_TIMEOUT_FACTOR = 1.5
+COVERAGE_YT_PROXY = "hahn.yt.yandex.net"
+COVERAGE_YT_ROOT_PATH = "//home/codecoverage"
+COVERAGE_YT_TABLE_PREFIX = "datatable"
+
+# fuzzing
+CORPUS_DATA_FILE_NAME = 'corpus.json'
+CORPUS_DATA_ROOT_DIR = 'fuzzing'
+CORPUS_DIR_NAME = 'corpus'
+FUZZING_COVERAGE_ARGS = ['--sanitize-coverage=trace-div,trace-gep']
+FUZZING_COMPRESSION_COEF = 1.1
+FUZZING_DEFAULT_TIMEOUT = 3600
+FUZZING_FINISHING_TIME = 600
+FUZZING_TIMEOUT_RE = re.compile(r'(^|\s)-max_total_time=(?P<max_time>\d+)')
+GENERATED_CORPUS_DIR_NAME = 'mined_corpus'
+MAX_CORPUS_RESOURCES_ALLOWED = 5
+
+# hermione
+HERMIONE_REPORT_DIR_NAME = "hermione-report"
+HERMIONE_REPORT_TAR_NAME = HERMIONE_REPORT_DIR_NAME + ".tar"
+HERMIONE_REPORT_INDEX_FILE_NAME = "index.html"
+HERMIONE_REPORT_DB_URLS_FILE_NAME = "databaseUrls.json"
+HERMIONE_TESTS_READ_FILE_NAME = "tests.json"
+HERMIONE_TESTS_READ_STDOUT_FILE_NAME = "read_tests.out"
+HERMIONE_TESTS_READ_STDERR_FILE_NAME = "read_tests.err"
+HERMIONE_TESTS_RUN_FILE_NAME = "test_results.jsonl"
+HERMIONE_TESTS_RUN_STDOUT_FILE_NAME = "run_tests.out"
+HERMIONE_TESTS_RUN_STDERR_FILE_NAME = "run_tests.err"
+
+# yt
+YT_OPERATION_ID_SUBSTITUTION = '$OPERATION_ID'
+YT_SANDBOX_ROOT_PREFIX = '$(YT_SANDBOX_ROOT)'
+
+# sandbox
+SANDBOX_RUN_TEST_YT_TOKEN_VALUE_NAME = 'YA_MAKE_SANDBOX_RUN_TEST_YT_TOKEN'
+
+# global resources
+ANDROID_AVD_ROOT = 'ANDROID_AVD_RESOURCE_GLOBAL'
+ANDROID_SDK_ROOT = 'ANDROID_SDK_RESOURCE_GLOBAL'
+COVERAGE_PUSH_TOOL_LOCAL = 'USE_SYSTEM_COVERAGE_PUSH_TOOL'
+COVERAGE_PUSH_TOOL_RESOURCE = 'COVERAGE_PUSH_TOOL_RESOURCE_GLOBAL'
+COVERAGE_PUSH_TOOL_LB_LOCAL = 'USE_SYSTEM_COVERAGE_PUSH_TOOL_LB'
+COVERAGE_PUSH_TOOL_LB_RESOURCE = 'COVERAGE_PUSH_TOOL_LB_RESOURCE_GLOBAL'
+FLAKE8_PY2_RESOURCE = 'FLAKE8_PY2_RESOURCE_GLOBAL'
+FLAKE8_PY3_RESOURCE = 'FLAKE8_PY3_RESOURCE_GLOBAL'
+GO_TOOLS_RESOURCE = 'GO_TOOLS_RESOURCE_GLOBAL'
+JSTYLE_RUNNER_LIB = 'JSTYLE_LIB_RESOURCE_GLOBAL'
+NODEJS_RESOURCE = 'NODEJS_RESOURCE_GLOBAL'
+NYC_RESOURCE = 'NYC_RESOURCE_GLOBAL'
+TEST_TOOL3_HOST = 'TEST_TOOL3_HOST_RESOURCE_GLOBAL'
+TEST_TOOL3_HOST_LOCAL = 'TEST_TOOL3_HOST_LOCAL'
+TEST_TOOL_HOST = 'TEST_TOOL_HOST_RESOURCE_GLOBAL'
+TEST_TOOL_HOST_LOCAL = 'TEST_TOOL_HOST_LOCAL'
+TEST_TOOL_TARGET = 'TEST_TOOL_TARGET_RESOURCE_GLOBAL'
+TEST_TOOL_TARGET_LOCAL = 'TEST_TOOL_TARGET_LOCAL'
+XCODE_TOOLS_RESOURCE = 'XCODE_TOOLS_ROOT_RESOURCE_GLOBAL'
+WINE_TOOL = 'WINE_TOOL_RESOURCE_GLOBAL'
+WINE32_TOOL = 'WINE32_TOOL_RESOURCE_GLOBAL'
+
+
+class Enum(object):
+ @classmethod
+ def enumerate(cls):
+ return [v for k, v in cls.__dict__.items() if not k.startswith("_")]
+
+
+class TestRequirements(Enum):
+ Container = 'container'
+ Cpu = 'cpu'
+ DiskUsage = 'disk_usage'
+ Dns = 'dns'
+ Kvm = 'kvm'
+ Network = 'network'
+ Ram = 'ram'
+ RamDisk = 'ram_disk'
+ SbVault = 'sb_vault'
+ YavSecret = 'yav'
+
+
+class TestRequirementsConstants(Enum):
+ All = 'all'
+ AllCpuValue = 50
+ AllRamDiskValue = 50
+ MinCpu = 1
+ MinRam = 1
+ MinRamDisk = 0
+
+ @classmethod
+ def is_all_cpu(cls, value):
+ return value == cls.All
+
+ @classmethod
+ def get_cpu_value(cls, value):
+ return cls.AllCpuValue if cls.is_all_cpu(value) else value
+
+ @classmethod
+ def is_all_ram_disk(cls, value):
+ return value == cls.All
+
+ @classmethod
+ def get_ram_disk_value(cls, value):
+ return cls.AllRamDiskValue if cls.is_all_ram_disk(value) else value
+
+
+class TestSize(Enum):
+ Small = 'small'
+ Medium = 'medium'
+ Large = 'large'
+
+ DefaultTimeouts = {
+ Small: 60,
+ Medium: 600,
+ Large: 3600,
+ }
+
+ DefaultPriorities = {
+ Small: -1,
+ Medium: -2,
+ Large: -3,
+ }
+
+ DefaultRequirements = {
+ Small: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 8,
+ # TestRequirements.Ram: 2,
+ TestRequirements.RamDisk: 0,
+ },
+ Medium: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 8,
+ # TestRequirements.Ram: 4,
+ TestRequirements.RamDisk: 0,
+ },
+ Large: {
+ TestRequirements.Cpu: 1,
+ TestRequirements.Ram: 8,
+ # TestRequirements.Ram: 8,
+ TestRequirements.RamDisk: 0,
+ },
+ }
+
+ MaxRequirements = {
+ Small: {
+ TestRequirements.Cpu: 4,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 4,
+ TestRequirements.RamDisk: 32,
+ },
+ Medium: {
+ TestRequirements.Cpu: 4,
+ # TestRequirements.Cpu: 8,
+ TestRequirements.Ram: 32,
+ # TestRequirements.Ram: 16,
+ TestRequirements.RamDisk: 32,
+ },
+ Large: {
+ TestRequirements.Cpu: 4,
+ TestRequirements.Ram: 32,
+ TestRequirements.RamDisk: 32,
+ },
+ }
+
+ LargeMarker = "TL"
+ MediumMarker = "TM"
+ SmallMarker = "TS"
+ SizeMarkers = (LargeMarker, MediumMarker, SmallMarker)
+
+ SizeShorthandMap = {
+ Large: LargeMarker,
+ Medium: MediumMarker,
+ Small: SmallMarker,
+ }
+
+ @classmethod
+ def sizes(cls):
+ return cls.DefaultTimeouts.keys()
+
+ @classmethod
+ def get_shorthand(cls, size):
+ return cls.SizeShorthandMap[size]
+
+ @classmethod
+ def is_test_shorthand(cls, name):
+ return name in cls.SizeMarkers
+
+ @classmethod
+ def get_default_timeout(cls, size):
+ if size in cls.DefaultTimeouts:
+ return cls.DefaultTimeouts[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_default_priorities(cls, size):
+ if size in cls.DefaultPriorities:
+ return cls.DefaultPriorities[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_default_requirements(cls, size):
+ if size in cls.DefaultRequirements:
+ return cls.DefaultRequirements[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+ @classmethod
+ def get_max_requirements(cls, size):
+ if size in cls.MaxRequirements:
+ return cls.MaxRequirements[size]
+ raise Exception("Unknown test size '{}'".format(size))
+
+
+class TestRunExitCode(Enum):
+ Skipped = 2
+ Failed = 3
+ TimeOut = 10
+ InfrastructureError = 12
+
+
+class YaTestTags(Enum):
+ AlwaysMinimize = "ya:always_minimize"
+ Dirty = "ya:dirty"
+ DumpNodeEnvironment = "ya:dump_node_env"
+ DumpTestEnvironment = "ya:dump_test_env"
+ ExoticPlatform = "ya:exotic_platform"
+ External = "ya:external"
+ Fat = "ya:fat"
+ ForceDistbuild = "ya:force_distbuild"
+ ForceSandbox = "ya:force_sandbox"
+ GoNoSubtestReport = "ya:go_no_subtest_report"
+ GoTotalReport = "ya:go_total_report"
+ HugeLogs = "ya:huge_logs"
+ Manual = "ya:manual"
+ MapRootUser = "ya:map_root_user"
+ NoFuse = "ya:nofuse"
+ NoGracefulShutdown = "ya:no_graceful_shutdown"
+ Norestart = "ya:norestart"
+ Noretries = "ya:noretries"
+ NotAutocheck = "ya:not_autocheck"
+ Notags = "ya:notags"
+ PerfTest = "ya:perftest"
+ Privileged = "ya:privileged"
+ ReportChunks = "ya:report_chunks"
+ RunWithAsserts = "ya:relwithdebinfo"
+ SandboxCoverage = "ya:sandbox_coverage"
+ SequentialRun = "ya:sequential_run"
+ TraceOutput = "ya:trace_output"
+ YtRunner = "ya:yt"
+
+
+class ServiceTags(Enum):
+ AnyTag = "ya:__any_tag"
+
+
+class Status(object):
+ GOOD, XFAIL, FAIL, XPASS, MISSING, CRASHED, TIMEOUT = range(1, 8)
+ SKIPPED = -100
+ NOT_LAUNCHED = -200
+ CANON_DIFF = -300
+ DESELECTED = -400
+ INTERNAL = -int(2 ** 31 - 1) # maxint
+ FLAKY = -50
+ # XFAILDIFF is internal status and should be replaced
+ # with XFAIL or XPASS during verification stage of canon data
+ XFAILDIFF = -90
+
+ BY_NAME = {
+ 'crashed': CRASHED,
+ 'deselected': DESELECTED,
+ 'diff': CANON_DIFF,
+ 'fail': FAIL,
+ 'flaky': FLAKY,
+ 'good': GOOD,
+ 'internal': INTERNAL,
+ 'missing': MISSING,
+ 'not_launched': NOT_LAUNCHED,
+ 'skipped': SKIPPED,
+ 'timeout': TIMEOUT,
+ 'xfail': XFAIL,
+ 'xfaildiff': XFAILDIFF,
+ 'xpass': XPASS,
+ }
+ TO_STR = {
+ CANON_DIFF: 'diff',
+ CRASHED: 'crashed',
+ DESELECTED: 'deselected',
+ FAIL: 'fail',
+ FLAKY: 'flaky',
+ GOOD: 'good',
+ INTERNAL: 'internal',
+ MISSING: 'missing',
+ NOT_LAUNCHED: 'not_launched',
+ SKIPPED: 'skipped',
+ TIMEOUT: 'timeout',
+ XFAIL: 'xfail',
+ XFAILDIFF: 'xfaildiff',
+ XPASS: 'xpass',
+ }
+
+
+class _Colors(object):
+
+ _NAMES = [
+ "blue",
+ "cyan",
+ "default",
+ "green",
+ "grey",
+ "magenta",
+ "red",
+ "white",
+ "yellow",
+ ]
+ _PREFIXES = ["", "light", "dark"]
+
+ def __init__(self):
+ self._table = {}
+ for prefix in self._PREFIXES:
+ for value in self._NAMES:
+ name = value
+ if prefix:
+ name = "{}_{}".format(prefix, value)
+ value = "{}-{}".format(prefix, value)
+ self.__add_color(name.upper(), value)
+
+ def __add_color(self, name, value):
+ self._table[name] = value
+ self.__setattr__(name, value)
+
+
+Colors = _Colors()
+
+
+class _Highlight(object):
+
+ _MARKERS = {
+ # special
+ "RESET": "rst",
+ "IMPORTANT": "imp",
+ "UNIMPORTANT": "unimp",
+ "BAD": "bad",
+ "WARNING": "warn",
+ "GOOD": "good",
+ "PATH": "path",
+ "ALTERNATIVE1": "alt1",
+ "ALTERNATIVE2": "alt2",
+ "ALTERNATIVE3": "alt3",
+ }
+
+ def __init__(self):
+ # setting attributes because __getattr__ is much slower
+ for attr, value in self._MARKERS.items():
+ self.__setattr__(attr, value)
+
+
+Highlight = _Highlight()
+
+
+class _StatusColorMap(object):
+ # There should be no XFAILDIFF, because it's internal status.
+ # It should be replaced with XFAIL or XPASS during verification of canon data.
+
+ _MAP = {
+ 'crashed': Highlight.WARNING,
+ 'deselected': Highlight.UNIMPORTANT,
+ 'diff': Highlight.BAD,
+ 'fail': Highlight.BAD,
+ 'flaky': Highlight.ALTERNATIVE3,
+ 'good': Highlight.GOOD,
+ 'internal': Highlight.BAD,
+ 'missing': Highlight.ALTERNATIVE1,
+ 'not_launched': Highlight.BAD,
+ 'skipped': Highlight.UNIMPORTANT,
+ 'timeout': Highlight.BAD,
+ 'xfail': Highlight.WARNING,
+ 'xpass': Highlight.WARNING,
+ }
+
+ def __getitem__(self, item):
+ return self._MAP[item]
+
+
+StatusColorMap = _StatusColorMap()
diff --git a/build/plugins/lib/test_const/ya.make b/build/plugins/lib/test_const/ya.make
new file mode 100644
index 0000000000..60f4867e46
--- /dev/null
+++ b/build/plugins/lib/test_const/ya.make
@@ -0,0 +1,9 @@
+OWNER(g:ymake)
+
+PY23_LIBRARY()
+
+PY_SRCS(
+ __init__.py
+)
+
+END()
diff --git a/build/plugins/lib/ya.make b/build/plugins/lib/ya.make
new file mode 100644
index 0000000000..7e61d12080
--- /dev/null
+++ b/build/plugins/lib/ya.make
@@ -0,0 +1,7 @@
+OWNER(g:ymake)
+
+PY23_LIBRARY()
+ PY_SRCS(
+ _metric_resolvers.py
+ )
+END()