aboutsummaryrefslogtreecommitdiffstats
path: root/build/plugins/lib/nots/package_manager/base
diff options
context:
space:
mode:
authoralexv-smirnov <alex@ydb.tech>2023-06-13 11:05:01 +0300
committeralexv-smirnov <alex@ydb.tech>2023-06-13 11:05:01 +0300
commitbf0f13dd39ee3e65092ba3572bb5b1fcd125dcd0 (patch)
tree1d1df72c0541a59a81439842f46d95396d3e7189 /build/plugins/lib/nots/package_manager/base
parent8bfdfa9a9bd19bddbc58d888e180fbd1218681be (diff)
downloadydb-bf0f13dd39ee3e65092ba3572bb5b1fcd125dcd0.tar.gz
add ymake export to ydb
Diffstat (limited to 'build/plugins/lib/nots/package_manager/base')
-rw-r--r--build/plugins/lib/nots/package_manager/base/__init__.py20
-rw-r--r--build/plugins/lib/nots/package_manager/base/constants.py7
-rw-r--r--build/plugins/lib/nots/package_manager/base/lockfile.py69
-rw-r--r--build/plugins/lib/nots/package_manager/base/node_modules_bundler.py66
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_json.py198
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_manager.py153
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/package_json.py201
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/utils.py15
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/package_manager/base/utils.py29
-rw-r--r--build/plugins/lib/nots/package_manager/base/ya.make23
11 files changed, 795 insertions, 0 deletions
diff --git a/build/plugins/lib/nots/package_manager/base/__init__.py b/build/plugins/lib/nots/package_manager/base/__init__.py
new file mode 100644
index 0000000000..022d4a960e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/__init__.py
@@ -0,0 +1,20 @@
+from . import constants, utils
+from .lockfile import BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+from .package_json import PackageJson
+from .package_manager import BasePackageManager, PackageManagerError, PackageManagerCommandError
+from .node_modules_bundler import bundle_node_modules, extract_node_modules
+
+
+__all__ = [
+ "constants",
+ "utils",
+ "BaseLockfile",
+ "LockfilePackageMeta",
+ "LockfilePackageMetaInvalidError",
+ "BasePackageManager",
+ "PackageManagerError",
+ "PackageManagerCommandError",
+ "PackageJson",
+ "bundle_node_modules",
+ "extract_node_modules",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/constants.py b/build/plugins/lib/nots/package_manager/base/constants.py
new file mode 100644
index 0000000000..d03df2a570
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/constants.py
@@ -0,0 +1,7 @@
+BUILD_DIRNAME = "build"
+BUNDLE_DIRNAME = "bundle"
+NODE_MODULES_BUNDLE_FILENAME = "node_modules.tar"
+NODE_MODULES_DIRNAME = "node_modules"
+NODE_MODULES_WORKSPACE_BUNDLE_FILENAME = "workspace_node_modules.tar"
+NPM_REGISTRY_URL = "http://npm.yandex-team.ru"
+PACKAGE_JSON_FILENAME = "package.json"
diff --git a/build/plugins/lib/nots/package_manager/base/lockfile.py b/build/plugins/lib/nots/package_manager/base/lockfile.py
new file mode 100644
index 0000000000..1d7cc6ad3e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/lockfile.py
@@ -0,0 +1,69 @@
+import os
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+class LockfilePackageMeta(object):
+ """
+ Basic struct representing package meta from lockfile.
+ """
+
+ __slots__ = ("name", "version", "sky_id", "integrity", "integrity_algorithm", "tarball_path")
+
+ @staticmethod
+ def from_str(s):
+ return LockfilePackageMeta(*s.strip().split(" "))
+
+ def __init__(self, name, version, sky_id, integrity, integrity_algorithm):
+ self.name = name
+ self.version = version
+ self.sky_id = sky_id
+ self.integrity = integrity
+ self.integrity_algorithm = integrity_algorithm
+ self.tarball_path = "{}-{}.tgz".format(name, version)
+
+ def to_str(self):
+ return " ".join([self.name, self.version, self.sky_id, self.integrity, self.integrity_algorithm])
+
+
+class LockfilePackageMetaInvalidError(RuntimeError):
+ pass
+
+
+@add_metaclass(ABCMeta)
+class BaseLockfile(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: lockfile path
+ :type path: str
+ :rtype: BaseLockfile
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ @abstractmethod
+ def read(self):
+ pass
+
+ @abstractmethod
+ def write(self, path=None):
+ pass
+
+ @abstractmethod
+ def get_packages_meta(self):
+ pass
+
+ @abstractmethod
+ def update_tarball_resolutions(self, fn):
+ pass
diff --git a/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
new file mode 100644
index 0000000000..c835c4d7ca
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
@@ -0,0 +1,66 @@
+import os
+import tarfile
+
+from io import BytesIO
+
+from .utils import build_nm_path
+
+
+PEERS_DIR = ".peers"
+PEERS_INDEX = "index"
+
+
+def bundle_node_modules(build_root, peers, node_modules_path, bundle_path):
+ """
+ Creates node_modules bundle.
+ Bundle contains node_modules directory, peers' node_modules directories,
+ and index file with the list of added peers (\\n delimited).
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param peers: list of peers (arcadia root related)
+ :type peers: list of str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path, "w") as tf:
+ tf.add(node_modules_path, arcname=".")
+
+ # Peers' node_modules.
+ added_peers = []
+ for p in peers:
+ peer_nm_path = build_nm_path(os.path.join(build_root, p))
+ peer_bundled_nm_path = build_nm_path(os.path.join(PEERS_DIR, p))
+ if not os.path.isdir(peer_nm_path):
+ continue
+ tf.add(peer_nm_path, arcname=peer_bundled_nm_path)
+ added_peers.append(p)
+
+ # Peers index.
+ peers_index = "\n".join(added_peers)
+ ti = tarfile.TarInfo(name=os.path.join(PEERS_DIR, PEERS_INDEX))
+ ti.size = len(peers_index)
+ tf.addfile(ti, BytesIO(peers_index.encode()))
+
+
+def extract_node_modules(build_root, node_modules_path, bundle_path):
+ """
+ Extracts node_modules bundle.
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path) as tf:
+ tf.extractall(node_modules_path)
+
+ peers = open(os.path.join(node_modules_path, PEERS_DIR, PEERS_INDEX)).read().split("\n")
+ for p in peers:
+ if not p:
+ continue
+ bundled_nm_path = build_nm_path(os.path.join(node_modules_path, PEERS_DIR, p))
+ nm_path = build_nm_path(os.path.join(build_root, p))
+ os.rename(bundled_nm_path, nm_path)
diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py
new file mode 100644
index 0000000000..d99b1e8254
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_json.py
@@ -0,0 +1,198 @@
+import json
+import logging
+import os
+
+from six import iteritems
+
+from .utils import build_pj_path
+
+logger = logging.getLogger(__name__)
+
+
+class PackageJsonWorkspaceError(RuntimeError):
+ pass
+
+
+class PackageJson(object):
+ DEP_KEY = "dependencies"
+ DEV_DEP_KEY = "devDependencies"
+ PEER_DEP_KEY = "peerDependencies"
+ OPT_DEP_KEY = "optionalDependencies"
+ DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY)
+
+ WORKSPACE_SCHEMA = "workspace:"
+
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: package.json path
+ :type path: str
+ :rtype: PackageJson
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ def read(self):
+ with open(self.path) as f:
+ self.data = json.load(f)
+
+ def write(self, path=None):
+ """
+ :param path: path to store package.json, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ directory = os.path.dirname(path)
+ if not os.path.exists(directory):
+ os.mkdir(directory)
+
+ with open(path, "w") as f:
+ json.dump(self.data, f, indent=2, ensure_ascii=False)
+ f.write('\n') # it's better for diff algorithm in arc
+ logger.debug("Written {}".format(path))
+
+ def get_name(self):
+ name = self.data.get("name")
+
+ if not name:
+ name = self.path.replace("/", "-")
+
+ return name
+
+ def get_version(self):
+ return self.data["version"]
+
+ def get_description(self):
+ return self.data.get("description")
+
+ def get_nodejs_version(self):
+ return self.data.get("engines", {}).get("node")
+
+ def get_dep_specifier(self, dep_name):
+ for name, spec in self.dependencies_iter():
+ if dep_name == name:
+ return spec
+ return None
+
+ def dependencies_iter(self):
+ for key in self.DEP_KEYS:
+ deps = self.data.get(key)
+ if not deps:
+ continue
+
+ for name, spec in iteritems(deps):
+ yield (name, spec)
+
+ def has_dependencies(self):
+ first_dep = next(self.dependencies_iter(), None)
+ return first_dep is not None
+
+ def bins_iter(self):
+ bins = self.data.get("bin")
+ if isinstance(bins, str):
+ yield bins
+ elif isinstance(bins, dict):
+ for bin in bins.values():
+ yield bin
+
+ def get_workspace_dep_spec_paths(self):
+ """
+ Returns names and paths from specifiers of the defined workspace dependencies.
+ :rtype: list of (str, str)
+ """
+ spec_paths = []
+ schema = self.WORKSPACE_SCHEMA
+ schema_len = len(schema)
+
+ for name, spec in self.dependencies_iter():
+ if not spec.startswith(schema):
+ continue
+
+ spec_path = spec[schema_len:]
+ if not (spec_path.startswith(".") or spec_path.startswith("..")):
+ raise PackageJsonWorkspaceError(
+ "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(
+ spec, name, self.path
+ )
+ )
+
+ spec_paths.append((name, spec_path))
+
+ return spec_paths
+
+ def get_workspace_dep_paths(self, base_path=None):
+ """
+ Returns paths of the defined workspace dependencies.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()]
+
+ def get_workspace_deps(self):
+ """
+ :rtype: list of PackageJson
+ """
+ ws_deps = []
+ pj_dir = os.path.dirname(self.path)
+
+ for name, rel_path in self.get_workspace_dep_spec_paths():
+ dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
+ dep_pj = PackageJson.load(build_pj_path(dep_path))
+
+ if name != dep_pj.get_name():
+ raise PackageJsonWorkspaceError(
+ "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(
+ name, dep_pj.get_name(), self.path
+ )
+ )
+
+ ws_deps.append(dep_pj)
+
+ return ws_deps
+
+ def get_workspace_map(self, ignore_self=False):
+ """
+ Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth.
+ :param ignore_self: whether path of the current module will be excluded
+ :type ignore_self: bool
+ :rtype: dict of (PackageJson, int)
+ """
+ ws_deps = {}
+ # list of (pj, depth)
+ pj_queue = [(self, 0)]
+
+ while len(pj_queue):
+ (pj, depth) = pj_queue.pop()
+ pj_dir = os.path.dirname(pj.path)
+ if pj_dir in ws_deps:
+ continue
+
+ if not ignore_self or pj != self:
+ ws_deps[pj_dir] = (pj, depth)
+
+ for dep_pj in pj.get_workspace_deps():
+ pj_queue.append((dep_pj, depth + 1))
+
+ return ws_deps
+
+ def get_dep_paths_by_names(self):
+ """
+ Returns dict of {dependency_name: dependency_path}
+ """
+ ws_map = self.get_workspace_map()
+ return {pj.get_name(): path for path, (pj, _) in ws_map.items()}
diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py
new file mode 100644
index 0000000000..d594d4ea92
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_manager.py
@@ -0,0 +1,153 @@
+import os
+import sys
+import subprocess
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+from .constants import NPM_REGISTRY_URL
+from .package_json import PackageJson
+from .utils import build_nm_path, build_pj_path
+
+
+class PackageManagerError(RuntimeError):
+ pass
+
+
+class PackageManagerCommandError(PackageManagerError):
+ def __init__(self, cmd, code, stdout, stderr):
+ self.cmd = cmd
+ self.code = code
+ self.stdout = stdout
+ self.stderr = stderr
+
+ msg = "package manager exited with code {} while running {}:\n{}\n{}".format(code, cmd, stdout, stderr)
+ super(PackageManagerCommandError, self).__init__(msg)
+
+
+@add_metaclass(ABCMeta)
+class BasePackageManager(object):
+ def __init__(
+ self,
+ build_root,
+ build_path,
+ sources_path,
+ nodejs_bin_path,
+ script_path,
+ contribs_path,
+ module_path=None,
+ sources_root=None,
+ ):
+ self.module_path = build_path[len(build_root) + 1 :] if module_path is None else module_path
+ self.build_path = build_path
+ self.sources_path = sources_path
+ self.build_root = build_root
+ self.sources_root = sources_path[: -len(self.module_path) - 1] if sources_root is None else sources_root
+ self.nodejs_bin_path = nodejs_bin_path
+ self.script_path = script_path
+ self.contribs_path = contribs_path
+
+ @classmethod
+ def load_package_json(cls, path):
+ """
+ :param path: path to package.json
+ :type path: str
+ :rtype: PackageJson
+ """
+ return PackageJson.load(path)
+
+ @classmethod
+ def load_package_json_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with package.json
+ :type dir_path: str
+ :rtype: PackageJson
+ """
+ return cls.load_package_json(build_pj_path(dir_path))
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile(cls, path):
+ pass
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ pass
+
+ @abstractmethod
+ def create_node_modules(self):
+ pass
+
+ @abstractmethod
+ def calc_node_modules_inouts(self):
+ pass
+
+ @abstractmethod
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ pass
+
+ def get_local_peers_from_package_json(self):
+ """
+ Returns paths of direct workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ return self.load_package_json_from_dir(self.sources_path).get_workspace_dep_paths(base_path=self.module_path)
+
+ def get_peers_from_package_json(self):
+ """
+ Returns paths of workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+ prefix_len = len(self.sources_root) + 1
+
+ return [p[prefix_len:] for p in pj.get_workspace_map(ignore_self=True).keys()]
+
+ def _exec_command(self, args, include_defaults=True):
+ if not self.nodejs_bin_path:
+ raise PackageManagerError("Unable to execute command: nodejs_bin_path is not configured")
+
+ cmd = (
+ [self.nodejs_bin_path, self.script_path] + args + (self._get_default_options() if include_defaults else [])
+ )
+ p = subprocess.Popen(
+ cmd,
+ cwd=self.build_path,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = p.communicate()
+
+ if p.returncode != 0:
+ self._dump_debug_log()
+
+ raise PackageManagerCommandError(cmd, p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8"))
+
+ def _nm_path(self, *parts):
+ return os.path.join(build_nm_path(self.build_path), *parts)
+
+ def _contrib_tarball_path(self, pkg):
+ return os.path.join(self.contribs_path, pkg.tarball_path)
+
+ def _contrib_tarball_url(self, pkg):
+ return "file:" + self._contrib_tarball_path(pkg)
+
+ def _get_default_options(self):
+ return ["--registry", NPM_REGISTRY_URL]
+
+ def _get_debug_log_path(self):
+ return None
+
+ def _dump_debug_log(self):
+ log_path = self._get_debug_log_path()
+
+ if not log_path:
+ return
+
+ try:
+ with open(log_path) as f:
+ sys.stderr.write("Package manager log {}:\n{}\n".format(log_path, f.read()))
+ except Exception:
+ sys.stderr.write("Failed to dump package manager log {}.\n".format(log_path))
diff --git a/build/plugins/lib/nots/package_manager/base/tests/package_json.py b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
new file mode 100644
index 0000000000..ccf7d4f607
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
@@ -0,0 +1,201 @@
+import os
+import pytest
+
+from build.plugins.lib.nots.package_manager.base.package_json import PackageJson, PackageJsonWorkspaceError
+
+
+def test_get_name_exist():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "name": "package-name",
+ }
+
+ name = pj.get_name()
+
+ assert name == "package-name"
+
+
+def test_get_name_none():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {}
+
+ name = pj.get_name()
+
+ assert name == "packages-foo"
+
+
+def test_get_workspace_dep_spec_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_spec_paths = pj.get_workspace_dep_spec_paths()
+
+ assert ws_dep_spec_paths == [
+ ("@yandex-int/bar", "../bar"),
+ ("@yandex-int/baz", "../baz"),
+ ]
+
+
+def test_get_workspace_dep_spec_paths_invalid_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:*",
+ },
+ }
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_dep_spec_paths()
+
+ assert (
+ str(e.value)
+ == "Expected relative path specifier for workspace dependency, but got 'workspace:*' for @yandex-int/bar in /packages/foo/package.json"
+ )
+
+
+def test_get_workspace_dep_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths()
+
+ assert ws_dep_paths == [
+ "/packages/bar",
+ "/packages/baz",
+ ]
+
+
+def test_get_dep_specifier():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "jestify": "0.0.1",
+ "eslint": ">= 7.27.0",
+ },
+ "devDependencies": {
+ "jest": "27.1.0",
+ "eslinting": "0.0.2",
+ },
+ }
+
+ jest_spec = pj.get_dep_specifier("jest")
+ assert jest_spec == "27.1.0", "Got unexpected jest specifier: {}".format(jest_spec)
+
+ eslint_spec = pj.get_dep_specifier("eslint")
+ assert eslint_spec == ">= 7.27.0", "Got unexpected eslint specifier: {}".format(eslint_spec)
+
+
+def test_get_workspace_dep_paths_with_custom_base_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths(base_path="custom/dir")
+
+ assert ws_dep_paths == [
+ "custom/bar",
+ "custom/baz",
+ ]
+
+
+def test_get_workspace_deps_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+
+ PackageJson.load = classmethod(load_mock)
+
+ ws_deps = pj.get_workspace_deps()
+
+ assert len(ws_deps) == 2
+ assert ws_deps[0].path == "/packages/bar/package.json"
+ assert ws_deps[1].path == "/packages/baz/package.json"
+
+
+def test_get_workspace_deps_with_wrong_name():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@shouldbe/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+
+ PackageJson.load = classmethod(load_mock)
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_deps()
+
+ assert (
+ str(e.value)
+ == "Workspace dependency name mismatch, found '@yandex-int/bar' instead of '@shouldbe/bar' in /packages/foo/package.json"
+ )
+
+
+def test_get_workspace_map_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ name = os.path.basename(os.path.dirname(path))
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(name),
+ "dependencies": ({"@yandex-int/qux": "workspace:../qux"} if name == "bar" else {}),
+ }
+ return p
+
+ PackageJson.load = classmethod(load_mock)
+
+ ws_map = pj.get_workspace_map()
+
+ assert len(ws_map) == 3
+ assert ws_map["/packages/foo"][0].path == "/packages/foo/package.json"
+ assert ws_map["/packages/foo"][1] == 0
+ assert ws_map["/packages/bar"][0].path == "/packages/bar/package.json"
+ assert ws_map["/packages/bar"][1] == 1
+ assert ws_map["/packages/qux"][0].path == "/packages/qux/package.json"
+ assert ws_map["/packages/qux"][1] == 2
diff --git a/build/plugins/lib/nots/package_manager/base/tests/utils.py b/build/plugins/lib/nots/package_manager/base/tests/utils.py
new file mode 100644
index 0000000000..4287beec47
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/utils.py
@@ -0,0 +1,15 @@
+from build.plugins.lib.nots.package_manager.base import utils
+
+
+def test_extract_package_name_from_path():
+ happy_checklist = [
+ ("@yandex-int/foo-bar-baz/some/path/inside/the/package", "@yandex-int/foo-bar-baz"),
+ ("@yandex-int/foo-bar-buzz", "@yandex-int/foo-bar-buzz"),
+ ("package-wo-scope", "package-wo-scope"),
+ ("p", "p"),
+ ("", ""),
+ ]
+
+ for item in happy_checklist:
+ package_name = utils.extract_package_name_from_path(item[0])
+ assert package_name == item[1]
diff --git a/build/plugins/lib/nots/package_manager/base/tests/ya.make b/build/plugins/lib/nots/package_manager/base/tests/ya.make
new file mode 100644
index 0000000000..1bece69c33
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/ya.make
@@ -0,0 +1,14 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ package_json.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/base/utils.py b/build/plugins/lib/nots/package_manager/base/utils.py
new file mode 100644
index 0000000000..017bf4ca41
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/utils.py
@@ -0,0 +1,29 @@
+import os
+
+from .constants import PACKAGE_JSON_FILENAME, NODE_MODULES_DIRNAME, NODE_MODULES_BUNDLE_FILENAME
+
+
+def s_rooted(p):
+ return os.path.join("$S", p)
+
+
+def b_rooted(p):
+ return os.path.join("$B", p)
+
+
+def build_pj_path(p):
+ return os.path.join(p, PACKAGE_JSON_FILENAME)
+
+
+def build_nm_path(p):
+ return os.path.join(p, NODE_MODULES_DIRNAME)
+
+
+def build_nm_bundle_path(p):
+ return os.path.join(p, NODE_MODULES_BUNDLE_FILENAME)
+
+
+def extract_package_name_from_path(p):
+ # if we have scope prefix then we are using the first two tokens, otherwise - only the first one
+ parts = p.split("/", 2)
+ return "/".join(parts[:2]) if p.startswith("@") else parts[0]
diff --git a/build/plugins/lib/nots/package_manager/base/ya.make b/build/plugins/lib/nots/package_manager/base/ya.make
new file mode 100644
index 0000000000..4b7f22f05a
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/ya.make
@@ -0,0 +1,23 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ node_modules_bundler.py
+ package_json.py
+ package_manager.py
+ utils.py
+)
+
+PEERDIR(
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)