summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorzaverden <[email protected]>2025-08-12 20:01:32 +0300
committerzaverden <[email protected]>2025-08-12 21:09:42 +0300
commitc72e97e9fc6fa23f0167dbe81e80e3484976bc5d (patch)
treee3db041105fe8e14adf377210de2e010c8dee0c4
parent6eaa7546be3bf7b9cbed2de5167fce47223839e1 (diff)
feat(TS_PROTO): TS_PROTO_AUTO, package generation
commit_hash:9960194d44a237d2a0a946d4c19be42e59164732
-rw-r--r--build/conf/proto.conf8
-rw-r--r--build/conf/ts/node_modules.conf2
-rw-r--r--build/conf/ts/ts.conf2
-rw-r--r--build/conf/ts/ts_proto.conf66
-rw-r--r--build/plugins/lib/nots/package_manager/__init__.py8
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_json.py12
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_manager.py23
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/constants.py1
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/package_manager.py32
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/utils.py11
-rw-r--r--build/plugins/nots.py39
11 files changed, 170 insertions, 34 deletions
diff --git a/build/conf/proto.conf b/build/conf/proto.conf
index 823d717a7c9..77c6eaf9c5b 100644
--- a/build/conf/proto.conf
+++ b/build/conf/proto.conf
@@ -925,6 +925,14 @@ multimodule PROTO_LIBRARY {
.EPILOGUE=_TS_CONFIG_EPILOGUE
}
+ module TS_PROTO_AUTO: _TS_PROTO_AUTO_IMPL {
+ .SEM=_SEM_IGNORED
+ # opt-in. We don't want to have TS_PROTO_AUTO by default
+ # To include TS_PROTO_AUTO user have to set INCLUDE_TAGS(TS_PROTO_AUTO TS_PREPARE_DEPS) in ya.make
+ .INCLUDE_TAG=no
+ .PEERDIRSELF=TS_PREPARE_DEPS
+ }
+
module TS_PREPARE_DEPS: _TS_PROTO_PREPARE_DEPS_IMPL {
.SEM=_SEM_IGNORED
.INCLUDE_TAG=no
diff --git a/build/conf/ts/node_modules.conf b/build/conf/ts/node_modules.conf
index 7c2fdd91450..30face6c637 100644
--- a/build/conf/ts/node_modules.conf
+++ b/build/conf/ts/node_modules.conf
@@ -33,12 +33,14 @@ _TARBALLS_STORE=__tarballs__
_PREPARE_DEPS_INOUTS=
_PREPARE_DEPS_RESOURCES=
_PREPARE_DEPS_USE_RESOURCES_FLAG=
+_PREPARE_DEPS_TS_PROTO_AUTO_FLAG=
_PREPARE_DEPS_CMD=$TOUCH_UNIT \
&& $NOTS_TOOL $NOTS_TOOL_BASE_ARGS prepare-deps \
--tarballs-store $_TARBALLS_STORE \
$_PREPARE_DEPS_INOUTS \
$_PREPARE_DEPS_RESOURCES \
$_PREPARE_DEPS_USE_RESOURCES_FLAG \
+ $_PREPARE_DEPS_TS_PROTO_AUTO_FLAG \
${hide;kv:"pc magenta"} ${hide;kv:"p TS_DEP"}
# In case of no deps we need to create empty outputs for graph connectivity
diff --git a/build/conf/ts/ts.conf b/build/conf/ts/ts.conf
index 50710c96d56..c9629aaacbf 100644
--- a/build/conf/ts/ts.conf
+++ b/build/conf/ts/ts.conf
@@ -107,7 +107,7 @@ module _TS_BASE_UNIT: _BARE_UNIT {
SET(MODULE_TAG TS)
SET(MODULE_LANG TS)
# TS should peer to TS
- SET(PEERDIR_TAGS TS TS_PROTO TS_PROTO_FROM_SCHEMA)
+ SET(PEERDIR_TAGS TS TS_PROTO TS_PROTO_AUTO TS_PROTO_FROM_SCHEMA)
SET(MODULE_SUFFIX .output.tar)
diff --git a/build/conf/ts/ts_proto.conf b/build/conf/ts/ts_proto.conf
index 57fe5c9a0fe..6af44799728 100644
--- a/build/conf/ts/ts_proto.conf
+++ b/build/conf/ts/ts_proto.conf
@@ -51,11 +51,77 @@ module _TS_PROTO_IMPL: _TS_BASE_UNIT {
_TS_ADD_NODE_MODULES_FOR_BUILDER()
}
+_TS_PROTO_AUTO_PACKAGE_NAME=@yandex-proto/*
+_TS_PROTO_AUTO_DEPS=library/typescript/ts-proto-deps
+
+_TS_PROTO_AUTO_IMPL_CMD=\
+ $NOTS_TOOL $NOTS_TOOL_BASE_ARGS build-ts-proto $NOTS_TOOL_COMMON_BUILDER_ARGS \
+ --auto-package-name $_TS_PROTO_AUTO_PACKAGE_NAME \
+ --auto-deps-path $_TS_PROTO_AUTO_DEPS \
+ ${_TS_PROTO_OPT} \
+ --protoc-bin $PROTOC \
+ --proto-srcs $_TS_PROTO_SRCS_FILES \
+ --proto-paths ./$PROTO_NAMESPACE $ARCADIA_ROOT/$PROTO_NAMESPACE $_PROTO__INCLUDE $ARCADIA_BUILD_ROOT $PROTOBUF_INCLUDE_PATH \
+ ${hide;output:"package.json"} ${hide;output:"pnpm-lock.yaml"} \
+ ${hide:PROTO_FAKEID} \
+ ${hide;kv:"pc magenta"} ${hide;kv:"p TS_PRO_AUTO"}
+
+### # internal
+module _TS_PROTO_AUTO_IMPL: _TS_BASE_UNIT {
+ .CMD=$_TS_PROTO_AUTO_IMPL_CMD
+ .STRUCT_CMD=yes
+ .SEM=_SEM_IGNORED
+ .IGNORED=GENERATE_ENUM_SERIALIZATION GENERATE_ENUM_SERIALIZATION_WITH_HEADER USE_SKIFF CPP_PROTO_PLUGIN2 PY_PROTO_PLUGIN YMAPS_SPROTO RESOURCE PY_SRCS
+ # We need to collect SRCS paths into list to pass to command
+ .ALIASES=SRCS=_TS_PROTO_SRCS
+
+ # see r14294210
+ DISABLE(_NEED_SBOM_INFO)
+
+ # support for macro USE_COMMON_GOOGLE_APIS()
+ when ($_COMMON_GOOGLE_APIS != "None") {
+ PEERDIR += contrib/libs/googleapis-common-protos
+ }
+
+ # PEERDIR should be resolved to DESC_PROTO submodule
+ SET(PEERDIR_TAGS DESC_PROTO TS)
+ # For users to add some conditional config for TS_PROTO, like
+ # IF (TS_PROTO)
+ # MESSAGE(Building TS_PROTO!!!)
+ ENABLE(TS_PROTO)
+ ENABLE(TS_PROTO_AUTO)
+ # Disable linting
+ NO_LINT()
+
+ SET(MODULE_LANG TS)
+
+ DISABLE(TS_CONFIG_DEDUCE_OUT)
+ PEERDIR($_TS_PROTO_AUTO_DEPS)
+}
+
+
module _TS_PROTO_PREPARE_DEPS_IMPL: _PREPARE_DEPS_BASE {
.IGNORED=PEERDIR SRCS GENERATE_ENUM_SERIALIZATION GENERATE_ENUM_SERIALIZATION_WITH_HEADER USE_SKIFF CPP_PROTO_PLUGIN2 PY_PROTO_PLUGIN YMAPS_SPROTO RESOURCE PY_SRCS
# see r14294210
DISABLE(_NEED_SBOM_INFO)
+
+ ENABLE(TS_PROTO_PREPARE_DEPS)
+}
+
+### @usage: TS_PROTO_PACKAGE_NAME(@scope/pkg)
+###
+### Sets package name for `TS_PROTO_AUTO`.
+### Use `@scope/*` to set package scope with autogenerated name.
+###
+### Documentation: https://docs.yandex-team.ru/frontend-in-arcadia/references/PROTO_LIBRARY#ts_proto_package_name
+###
+### @example
+###
+### TS_PROTO_PACKAGE_NAME(@yandex-proto/ci-tasklet-sidecar)
+### TS_PROTO_PACKAGE_NAME(@yandex-proto/*)
+macro TS_PROTO_PACKAGE_NAME(NAME) {
+ SET(_TS_PROTO_AUTO_PACKAGE_NAME $NAME)
}
diff --git a/build/plugins/lib/nots/package_manager/__init__.py b/build/plugins/lib/nots/package_manager/__init__.py
index 20f47fffa18..9827164bda7 100644
--- a/build/plugins/lib/nots/package_manager/__init__.py
+++ b/build/plugins/lib/nots/package_manager/__init__.py
@@ -12,10 +12,9 @@ from .base import (
)
from .base.package_json import PackageJsonWorkspaceError
from .pnpm import PnpmPackageManager
-from .npm import NpmPackageManager
-type PackageManagerType = Literal["pnpm", "npm"]
+type PackageManagerType = Literal["pnpm"]
manager = PnpmPackageManager
@@ -23,8 +22,8 @@ manager = PnpmPackageManager
def get_package_manager_type(key: PackageManagerType) -> type[BasePackageManager]:
if key == "pnpm":
return PnpmPackageManager
- if key == "npm":
- return NpmPackageManager
+ # if key == "npm":
+ # return NpmPackageManager
raise ValueError(f"Invalid package manager key: {key}")
@@ -32,7 +31,6 @@ __all__ = [
"BaseLockfile",
"BasePackageManager",
"PnpmPackageManager",
- "NpmPackageManager",
"PackageJson",
"PackageJsonWorkspaceError",
"PackageManagerCommandError",
diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py
index 6789134656d..fd4240b8432 100644
--- a/build/plugins/lib/nots/package_manager/base/package_json.py
+++ b/build/plugins/lib/nots/package_manager/base/package_json.py
@@ -134,9 +134,7 @@ class PackageJson(object):
return None
- # TODO: FBP-1254
- # def get_workspace_dep_spec_paths(self) -> list[tuple[str, str]]:
- def get_workspace_dep_spec_paths(self):
+ def get_workspace_dep_spec_paths(self) -> list[tuple[str, str]]:
"""
Returns names and paths from specifiers of the defined workspace dependencies.
:rtype: list[tuple[str, str]]
@@ -182,7 +180,13 @@ class PackageJson(object):
for name, rel_path in self.get_workspace_dep_spec_paths():
dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
- dep_pj = PackageJson.load(build_pj_path(dep_path))
+ dep_pj_path = build_pj_path(dep_path)
+ try:
+ dep_pj = PackageJson.load(dep_pj_path)
+ except IOError as e:
+ logger.debug(f"{self.path}: cannot load {name}: {e}. Process dependency as empty package.")
+ dep_pj = PackageJson(dep_pj_path)
+ dep_pj.data = {"name": name}
if name != dep_pj.get_name():
raise PackageJsonWorkspaceError(
diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py
index ae1a705e765..1ae27a1d242 100644
--- a/build/plugins/lib/nots/package_manager/base/package_manager.py
+++ b/build/plugins/lib/nots/package_manager/base/package_manager.py
@@ -53,13 +53,18 @@ class BasePackageManager(object, metaclass=ABCMeta):
return PackageJson.load(path)
@classmethod
- def load_package_json_from_dir(cls, dir_path):
+ def load_package_json_from_dir(cls, dir_path, empty_if_missing=False):
"""
:param dir_path: path to directory with package.json
:type dir_path: str
:rtype: PackageJson
"""
- return cls.load_package_json(build_pj_path(dir_path))
+ pj_path = build_pj_path(dir_path)
+ if empty_if_missing and not os.path.exists(pj_path):
+ pj = PackageJson(pj_path)
+ pj.data = {}
+ return pj
+ return cls.load_package_json(pj_path)
def _build_package_json(self):
"""
@@ -107,6 +112,10 @@ class BasePackageManager(object, metaclass=ABCMeta):
def build_workspace(self, tarballs_store: str):
pass
+ @abstractmethod
+ def build_ts_proto_auto_workspace(self, deps_mod: str):
+ pass
+
def get_local_peers_from_package_json(self):
"""
Returns paths of direct workspace dependencies (source root related).
@@ -114,16 +123,6 @@ class BasePackageManager(object, metaclass=ABCMeta):
"""
return self.load_package_json_from_dir(self.sources_path).get_workspace_dep_paths(base_path=self.module_path)
- def get_peers_from_package_json(self):
- """
- Returns paths of workspace dependencies (source root related).
- :rtype: list of str
- """
- pj = self.load_package_json_from_dir(self.sources_path)
- prefix_len = len(self.sources_root) + 1
-
- return [p[prefix_len:] for p in pj.get_workspace_map(ignore_self=True).keys()]
-
@timeit
def _exec_command(self, args, cwd: str, include_defaults=True, script_path=None, env={}):
if not self.nodejs_bin_path:
diff --git a/build/plugins/lib/nots/package_manager/pnpm/constants.py b/build/plugins/lib/nots/package_manager/pnpm/constants.py
index 55c05d97326..06afebc84ae 100644
--- a/build/plugins/lib/nots/package_manager/pnpm/constants.py
+++ b/build/plugins/lib/nots/package_manager/pnpm/constants.py
@@ -5,6 +5,7 @@ PNPM_LOCKFILE_FILENAME = "pnpm-lock.yaml"
# This file has a structure same to pnpm-lock.yaml, but all tarballs
# a set relative to the build root.
PNPM_PRE_LOCKFILE_FILENAME = "pre.pnpm-lock.yaml"
+PNPM_BUILD_BACKUP_LOCKFILE_FILENAME = "build.bkp.pnpm-lock.yaml"
# File is to store the last install status hash to avoid installing the same thing
LOCAL_PNPM_INSTALL_HASH_FILENAME = ".__install_hash__"
diff --git a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
index 84ae84722c2..d321d2adfb6 100644
--- a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
+++ b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
@@ -9,7 +9,7 @@ from .constants import (
LOCAL_PNPM_INSTALL_MUTEX_FILENAME,
)
from .lockfile import PnpmLockfile
-from .utils import build_lockfile_path, build_pre_lockfile_path, build_ws_config_path
+from .utils import build_lockfile_path, build_build_backup_lockfile_path, build_pre_lockfile_path, build_ws_config_path
from .workspace import PnpmWorkspace
from ..base import BasePackageManager, PackageManagerError
from ..base.constants import (
@@ -212,7 +212,7 @@ class PnpmPackageManager(BasePackageManager):
json.dump({PNPM_PRE_LOCKFILE_FILENAME: {"hash": pre_pnpm_lockfile_hash}}, f)
@timeit
- def create_node_modules(self, yatool_prebuilder_path=None, local_cli=False, nm_bundle=False):
+ def create_node_modules(self, yatool_prebuilder_path=None, local_cli=False, nm_bundle=False, original_lf_path=None):
"""
Creates node_modules directory according to the lockfile.
"""
@@ -238,7 +238,7 @@ class PnpmPackageManager(BasePackageManager):
self._run_pnpm_install(store_dir, virtual_store_dir, self.build_path, local_cli)
self._run_apply_addons_if_need(yatool_prebuilder_path, virtual_store_dir)
- self._replace_internal_lockfile_with_original(virtual_store_dir)
+ self._restore_original_lockfile(virtual_store_dir, original_lf_path)
if nm_bundle:
bundle_node_modules(
@@ -408,6 +408,22 @@ class PnpmPackageManager(BasePackageManager):
return ws
@timeit
+ def build_ts_proto_auto_workspace(self, deps_mod: str):
+ """
+ :rtype: PnpmWorkspace
+ """
+
+ ws = PnpmWorkspace(build_ws_config_path(self.build_path))
+ ws.packages.add(".")
+ ws.write()
+
+ deps_pre_lockfile_path = build_pre_lockfile_path(os.path.join(self.build_root, deps_mod))
+ pre_lockfile_path = build_pre_lockfile_path(self.build_path)
+ shutil.copyfile(deps_pre_lockfile_path, pre_lockfile_path)
+
+ return ws
+
+ @timeit
def _build_merged_pre_lockfile(self, tarballs_store, dep_paths, local_cli: bool):
"""
:type dep_paths: list of str
@@ -457,15 +473,19 @@ class PnpmPackageManager(BasePackageManager):
)
@timeit
- def _replace_internal_lockfile_with_original(self, virtual_store_dir):
- original_lf_path = build_lockfile_path(self.sources_path)
+ def _restore_original_lockfile(self, virtual_store_dir: str, original_lf_path: str = None):
+ original_lf_path = original_lf_path or build_lockfile_path(self.sources_path)
vs_lf_path = os.path.join(virtual_store_dir, "lock.yaml")
+ build_lf_path = build_lockfile_path(self.build_path)
+ build_bkp_lf_path = build_build_backup_lockfile_path(self.build_path)
shutil.copyfile(original_lf_path, vs_lf_path)
+ shutil.copyfile(build_lf_path, build_bkp_lf_path)
+ shutil.copyfile(original_lf_path, build_lf_path)
@timeit
def _copy_pnpm_patches(self):
- pj = self.load_package_json_from_dir(self.sources_path)
+ pj = self.load_package_json_from_dir(self.build_path)
patched_dependencies: dict[str, str] = pj.data.get("pnpm", {}).get("patchedDependencies", {})
for p in patched_dependencies.values():
diff --git a/build/plugins/lib/nots/package_manager/pnpm/utils.py b/build/plugins/lib/nots/package_manager/pnpm/utils.py
index 9b6b9d80db6..00f76d4e8df 100644
--- a/build/plugins/lib/nots/package_manager/pnpm/utils.py
+++ b/build/plugins/lib/nots/package_manager/pnpm/utils.py
@@ -1,12 +1,21 @@
import os
-from .constants import PNPM_PRE_LOCKFILE_FILENAME, PNPM_LOCKFILE_FILENAME, PNPM_WS_FILENAME
+from .constants import (
+ PNPM_BUILD_BACKUP_LOCKFILE_FILENAME,
+ PNPM_PRE_LOCKFILE_FILENAME,
+ PNPM_LOCKFILE_FILENAME,
+ PNPM_WS_FILENAME,
+)
def build_pre_lockfile_path(p):
return os.path.join(p, PNPM_PRE_LOCKFILE_FILENAME)
+def build_build_backup_lockfile_path(p):
+ return os.path.join(p, PNPM_BUILD_BACKUP_LOCKFILE_FILENAME)
+
+
def build_lockfile_path(p):
return os.path.join(p, PNPM_LOCKFILE_FILENAME)
diff --git a/build/plugins/nots.py b/build/plugins/nots.py
index 14f64d589aa..5ec9f138bfa 100644
--- a/build/plugins/nots.py
+++ b/build/plugins/nots.py
@@ -151,6 +151,11 @@ class NotsUnitType(UnitType):
Setup test recipe to extract peer's output before running tests
"""
+ def on_ts_proto_auto_prepare_deps_configure(self) -> None:
+ """
+ Configure prepare deps for TS_PROTO_AUTO
+ """
+
TS_TEST_FIELDS_BASE = (
df.BinaryPath.normalized,
@@ -317,6 +322,10 @@ def _get_var_name(s: str) -> tuple[bool, str]:
return False, ""
+def _is_real_file(path: str) -> bool:
+ return os.path.isfile(path) and not os.path.islink(path)
+
+
def _build_directives(flags: list[str] | tuple[str], paths: list[str]) -> str:
parts = [p for p in (flags or []) if p]
parts_str = ";".join(parts)
@@ -434,7 +443,7 @@ def _check_nodejs_version(unit: NotsUnitType, major: int) -> None:
def on_peerdir_ts_resource(unit: NotsUnitType, *resources: str) -> None:
from lib.nots.package_manager import BasePackageManager
- pj = BasePackageManager.load_package_json_from_dir(unit.resolve(_get_source_path(unit)))
+ pj = BasePackageManager.load_package_json_from_dir(unit.resolve(_get_source_path(unit)), empty_if_missing=True)
erm_json = _create_erm_json(unit)
dirs = []
@@ -613,7 +622,7 @@ def _setup_eslint(unit: NotsUnitType) -> None:
from lib.nots.package_manager import constants
- peers = _create_pm(unit).get_peers_from_package_json()
+ peers = _create_pm(unit).get_local_peers_from_package_json()
deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
@@ -678,7 +687,7 @@ def _setup_tsc_typecheck(unit: NotsUnitType) -> None:
from lib.nots.package_manager import constants
- peers = _create_pm(unit).get_peers_from_package_json()
+ peers = _create_pm(unit).get_local_peers_from_package_json()
deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
@@ -728,7 +737,7 @@ def _setup_stylelint(unit: NotsUnitType) -> None:
test_type = TsTestType.TS_STYLELINT
- peers = _create_pm(unit).get_peers_from_package_json()
+ peers = _create_pm(unit).get_local_peers_from_package_json()
deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps:
@@ -804,7 +813,15 @@ def _select_matching_version(
@_with_report_configure_error
def on_prepare_deps_configure(unit: NotsUnitType) -> None:
+ from lib.nots.package_manager.base.utils import build_pj_path
+
pm = _create_pm(unit)
+
+ if not _is_real_file(build_pj_path(pm.sources_path)) and unit.get("TS_PROTO_PREPARE_DEPS") == "yes":
+ # if this is a PREPARE_DEPS for TS_PROTO and there is no package.json - this is TS_PROTO_AUTO
+ unit.on_ts_proto_auto_prepare_deps_configure()
+ return
+
pj = pm.load_package_json_from_dir(pm.sources_path)
has_deps = pj.has_dependencies()
local_cli = unit.get("TS_LOCAL_CLI") == "yes"
@@ -822,6 +839,18 @@ def on_prepare_deps_configure(unit: NotsUnitType) -> None:
unit.set(["_PREPARE_DEPS_CMD", "$_PREPARE_NO_DEPS_CMD"])
+@_with_report_configure_error
+def on_ts_proto_auto_prepare_deps_configure(unit: NotsUnitType) -> None:
+ deps_path = unit.get("_TS_PROTO_AUTO_DEPS")
+ unit.onpeerdir([deps_path])
+
+ pm = _create_pm(unit)
+ local_cli = unit.get("TS_LOCAL_CLI") == "yes"
+ _, outs, _ = pm.calc_prepare_deps_inouts_and_resources(store_path="", has_deps=False, local_cli=local_cli)
+ __set_append(unit, "_PREPARE_DEPS_INOUTS", _build_directives(["hide", "output"], sorted(outs)))
+ unit.set(["_PREPARE_DEPS_TS_PROTO_AUTO_FLAG", f"--ts-proto-auto-deps-path {deps_path}"])
+
+
def _node_modules_bundle_needed(unit: NotsUnitType, arc_path: str) -> bool:
if unit.get("_WITH_NODE_MODULES") == "yes":
return True
@@ -931,7 +960,7 @@ def on_ts_test_for_configure(
from lib.nots.package_manager import constants
- peers = _create_pm(unit).get_peers_from_package_json()
+ peers = _create_pm(unit).get_local_peers_from_package_json()
deps = df.CustomDependencies.nots_with_recipies(unit, (peers,), {})[df.CustomDependencies.KEY].split()
if deps: