aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorzaverden <zaverden@yandex-team.com>2024-02-08 07:21:59 +0300
committerAlexander Smirnov <alex@ydb.tech>2024-02-09 19:18:48 +0300
commitcddbfcc9622e34652d736b18ff4ddc37e900ed61 (patch)
tree50fb987cad27f90315162cb0d562c9e5c83fa335
parentdd72a9a4ac2bed957bbdba2b1daf415a6aeffdb8 (diff)
downloadydb-cddbfcc9622e34652d736b18ff4ddc37e900ed61.tar.gz
feat(conf): move nots from internal
-rw-r--r--build/conf/ts/node_modules.conf113
-rw-r--r--build/conf/ts/ts.conf125
-rw-r--r--build/conf/ts/ts_next.conf59
-rw-r--r--build/conf/ts/ts_package.conf36
-rw-r--r--build/conf/ts/ts_test.conf159
-rw-r--r--build/conf/ts/ts_tsc.conf49
-rw-r--r--build/conf/ts/ts_vite.conf63
-rw-r--r--build/conf/ts/ts_webpack.conf62
-rw-r--r--build/plugins/lib/nots/__init__.py0
-rw-r--r--build/plugins/lib/nots/a.yaml2
-rw-r--r--build/plugins/lib/nots/erm_json_lite.py122
-rw-r--r--build/plugins/lib/nots/package_manager/__init__.py16
-rw-r--r--build/plugins/lib/nots/package_manager/base/__init__.py20
-rw-r--r--build/plugins/lib/nots/package_manager/base/constants.py9
-rw-r--r--build/plugins/lib/nots/package_manager/base/lockfile.py75
-rw-r--r--build/plugins/lib/nots/package_manager/base/node_modules_bundler.py66
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_json.py222
-rw-r--r--build/plugins/lib/nots/package_manager/base/package_manager.py155
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/package_json.py238
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/utils.py15
-rw-r--r--build/plugins/lib/nots/package_manager/base/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/package_manager/base/utils.py52
-rw-r--r--build/plugins/lib/nots/package_manager/base/ya.make23
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/__init__.py14
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/constants.py2
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/lockfile.py174
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/package_manager.py262
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/test_lockfile.py404
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/test_workspace.py68
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/tests/ya.make15
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/utils.py11
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/workspace.py81
-rw-r--r--build/plugins/lib/nots/package_manager/pnpm/ya.make24
-rw-r--r--build/plugins/lib/nots/package_manager/ya.make19
-rw-r--r--build/plugins/lib/nots/semver/__init__.py7
-rw-r--r--build/plugins/lib/nots/semver/semver.py244
-rw-r--r--build/plugins/lib/nots/semver/tests/test_version.py269
-rw-r--r--build/plugins/lib/nots/semver/tests/test_version_range.py106
-rw-r--r--build/plugins/lib/nots/semver/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/semver/ya.make14
-rw-r--r--build/plugins/lib/nots/typescript/__init__.py10
-rw-r--r--build/plugins/lib/nots/typescript/tests/test_ts_config.py321
-rw-r--r--build/plugins/lib/nots/typescript/tests/test_ts_glob.py118
-rw-r--r--build/plugins/lib/nots/typescript/tests/ya.make14
-rw-r--r--build/plugins/lib/nots/typescript/ts_config.py282
-rw-r--r--build/plugins/lib/nots/typescript/ts_errors.py10
-rw-r--r--build/plugins/lib/nots/typescript/ts_glob.py79
-rw-r--r--build/plugins/lib/nots/typescript/ya.make26
-rw-r--r--build/plugins/lib/nots/ya.make22
-rw-r--r--build/plugins/nots.py611
-rw-r--r--build/plugins/ya.make2
-rw-r--r--build/ymake.core.conf1
52 files changed, 4919 insertions, 0 deletions
diff --git a/build/conf/ts/node_modules.conf b/build/conf/ts/node_modules.conf
new file mode 100644
index 0000000000..dd4aaaf0e4
--- /dev/null
+++ b/build/conf/ts/node_modules.conf
@@ -0,0 +1,113 @@
+PNPM_ROOT=
+PNPM_SCRIPT=$PNPM_ROOT/node_modules/pnpm/dist/pnpm.cjs
+NPM_CONTRIBS_PATH=contrib/typescript
+# inputs list, just paths, deprecated (use _NODE_MODULES_INOUTS instead), used only for eslint/jest/hermione
+_NODE_MODULES_INS=
+# outputs list, just paths, deprecated (use _NODE_MODULES_INOUTS instead), used only for eslint/jest/hermione
+_NODE_MODULES_OUTS=
+# combined input/outputs records as list of directives ${input;hide:<path>} ${output;hide:<path>}, used in builders
+_NODE_MODULES_INOUTS=
+_YATOOL_PREBUILDER_ARG=
+
+# TOUCH_UNIT is required to create module identity file.
+# We can "call" macro as `$_GET_NODE_MODULES_INS_OUTS(...)`. in this case we will get .CMD from it.
+# This is the only way to process a variable data as an array.
+# ${output;hide:_NODE_MODULES_OUTS} does not produce list of paths, but a single value (space-separeted paths)
+_NODE_MODULES_CMD=$TOUCH_UNIT \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS create-node-modules \
+ $_GET_NODE_MODULES_INS_OUTS(IN $_NODE_MODULES_INS OUT $_NODE_MODULES_OUTS) \
+ ${kv;hide:"pc magenta"} ${kv;hide:"p TS_NM"}
+
+
+module _NODE_MODULES_BASE: _BARE_UNIT {
+ .CMD=_NODE_MODULES_CMD
+ # ignore SRCS macro, use TS_FILES instead of FILES
+ .ALIASES=SRCS=_NOOP_MACRO FILES=TS_FILES
+ # Propagates peers to related modules
+ .PEERDIR_POLICY=as_build_from
+ .NODE_TYPE=Bundle
+
+ # TODO: remove this. YMAKE-1096 / FBP-1184
+ _NEVERCACHE()
+
+ # we have several modules in the same dir (.PEERDIRSELF=NODE_MODULES in BUILD)
+ # we need different names for module identity file
+ # .fake tells builder to not materialize it in results
+ SET(MODULE_SUFFIX .n_m.fake)
+ # .NODE_TYPE=Bundle is required for peers propagation, but it also affects
+ # how merging of pic/nopic graphs. Here we can override this merging behaviour
+ SET(MODULE_TYPE LIBRARY)
+ # define own tag
+ SET(MODULE_TAG NODE_MODULES)
+ # what modules it can PEERDIR to
+ SET(PEERDIR_TAGS TS NPM_CONTRIBS)
+ # do not include it into "results" of graph
+ DISABLE(START_TARGET)
+
+ # we read package.json and erm-packages.json during configuration
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${CURDIR}/pnpm-lock.yaml ${CURDIR}/package.json ${ARCADIA_ROOT}/$ERM_PACKAGES_PATH)
+
+ PEERDIR($NPM_CONTRIBS_PATH)
+ # PEERDIR to the right version of nodejs and pnpm
+ _PEERDIR_TS_RESOURCE(nodejs pnpm)
+
+ # run py logic
+ _NODE_MODULES_CONFIGURE()
+}
+
+# called in on_node_modules_configure
+macro _SET_NODE_MODULES_INS_OUTS(IN{input}[], OUT{output}[]) {
+ SET(_NODE_MODULES_INS $IN)
+ SET(_NODE_MODULES_OUTS $OUT)
+}
+
+macro _GET_NODE_MODULES_INS_OUTS(IN{input}[], OUT{output}[]) {
+ .CMD=${input;hide:IN} ${output;hide:OUT}
+}
+
+
+### @usage: NPM_CONTRIBS() # internal
+###
+### Defines special module that provides contrib tarballs from internal npm registry.
+###
+### @see [FROM_NPM_LOCKFILES()](#macro_FROM_NPM_LOCKFILES)
+module NPM_CONTRIBS: _BARE_UNIT {
+ .CMD=TOUCH_UNIT
+ .PEERDIR_POLICY=as_build_from
+ .FINAL_TARGET=no
+ .ALLOWED=FROM_NPM_LOCKFILES
+ .RESTRICTED=PEERDIR
+ .EXTS=_ # Ignore all files, so module is not affected by FROM_NPM output (.EXTS=* is inherited from _BARE_UNIT)
+
+ SET(MODULE_TAG NPM_CONTRIBS)
+
+ # .fake tells builder to not materialize it in results
+ SET(MODULE_SUFFIX .fake)
+}
+
+### @usage: FROM_NPM_LOCKFILES(LOCKFILES...) # internal
+###
+### Defines lockfile list for `NPM_CONTRIBS` module.
+###
+### @see [NPM_CONTRIBS()](#module_NPM_CONTRIBS)
+macro FROM_NPM_LOCKFILES(LOCKFILES...) {
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS $LOCKFILES)
+ # See implementation in build/plugins/nots.py
+ _FROM_NPM_LOCKFILES($LOCKFILES)
+}
+
+FROM_NPM_CWD=$ARCADIA_BUILD_ROOT/$NPM_CONTRIBS_PATH
+macro _FROM_NPM(TARBALL_URL, SKY_ID, INTEGRITY, INTEGRITY_ALGO, TARBALL_PATH) {
+ .CMD=${cwd:FROM_NPM_CWD} $YMAKE_PYTHON ${input:"build/scripts/fetch_from_npm.py"} ${input;hide:"build/scripts/fetch_from.py"} ${input;hide:"build/scripts/sky.py"} --tarball-url $TARBALL_URL --sky-id $SKY_ID --integrity $INTEGRITY --integrity-algorithm $INTEGRITY_ALGO --copy-to ${output;noauto:TARBALL_PATH} ${requirements;hide:"network:full"} ${kv;hide:"p TS_FNPM"} ${kv;hide:"pc magenta"}
+ # we want output to be available for other modules without affecting NPM_CONTRIBS
+ # we need to expose it (some details in https://st.yandex-team.ru/YMAKE-34)
+ _EXPOSE($TARBALL_PATH)
+}
+
+macro _TS_ADD_NODE_MODULES_FOR_BUILDER() {
+ # Provide downloaded dependencies in `/contrib/typescript/-`
+ PEERDIR($NPM_CONTRIBS_PATH)
+
+ # Calculate inputs and outputs of node_modules, fill `_NODE_MODULES_INOUTS` variable
+ _NODE_MODULES_CONFIGURE()
+}
diff --git a/build/conf/ts/ts.conf b/build/conf/ts/ts.conf
new file mode 100644
index 0000000000..bd8e45cd81
--- /dev/null
+++ b/build/conf/ts/ts.conf
@@ -0,0 +1,125 @@
+NODEJS_ROOT=
+NODEJS_BIN=$NODEJS_ROOT/node
+
+TS_TRACE=no
+TS_LOCAL_CLI=no
+# Use outdir defined in tsconfig
+TS_CONFIG_USE_OUTDIR=
+
+NOTS_TOOL=${tool:"devtools/frontend_build_platform/nots/builder"}
+
+# Arguments for the all commands of the `nots/builder`, passed before the command
+NOTS_TOOL_BASE_ARGS=\
+ --arcadia-root $ARCADIA_ROOT \
+ --arcadia-build-root $ARCADIA_BUILD_ROOT \
+ --moddir $MODDIR \
+ --local-cli $TS_LOCAL_CLI \
+ --nodejs-bin $NODEJS_BIN \
+ --pnpm-script $PNPM_SCRIPT \
+ --contribs $NPM_CONTRIBS_PATH \
+ --trace $TS_TRACE \
+ --verbose $TS_LOG \
+ $_YATOOL_PREBUILDER_ARG
+
+# Arguments for builders' commands, passed after the command
+NOTS_TOOL_COMMON_BUILDER_ARGS=\
+ --output-file ${output:TS_OUTPUT_FILE} \
+ --tsconfigs $TS_CONFIG_PATH \
+ --vcs-info "${VCS_INFO_FILE}"
+
+ERM_PACKAGES_PATH=devtools/frontend_build_platform/erm/erm-packages.json
+
+TS_CONFIG_PATH=tsconfig.json
+TS_OUTPUT_FILE=output.tar
+TS_EXCLUDE_DIR_GLOB=(.idea|.vscode|node_modules)/**/*
+TS_COMMON_OUTDIR_GLOB=(build|dist|bundle|$WEBPACK_OUTPUT_DIR|$TS_NEXT_OUTPUT_DIR|$VITE_OUTPUT_DIR)/**/*
+TS_GLOB_EXCLUDE_ADDITIONAL=
+
+module _TS_BASE_UNIT: _BARE_UNIT {
+ # Propagates peers to related modules
+ .PEERDIR_POLICY=as_build_from
+ .NODE_TYPE=Bundle
+ # Needed for DEPENDS in tests to choose right submodule from multimodule
+ .FINAL_TARGET=yes
+ # use TS_FILES instead of FILES
+ .ALIASES=FILES=TS_FILES
+
+ # TODO: remove this. YMAKE-1096 / FBP-1184
+ _NEVERCACHE()
+
+ # .NODE_TYPE=Bundle is required for peers propagation, but it also affects
+ # how merging of pic/nopic graphs. Here we can override this merging behaviour
+ SET(MODULE_TYPE LIBRARY)
+ # Include processor works only for TS tag
+ SET(MODULE_TAG TS)
+ # TS should peer to TS
+ SET(PEERDIR_TAGS TS)
+ # .fake tells builder to not materialize it in results
+ SET(MODULE_SUFFIX .ts.fake)
+
+ # We read erm-packages.json during configuration, so we have to include it to configuration cache key
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/$ERM_PACKAGES_PATH)
+
+ # PEERDIR that reads required version of tool from package.json
+ _PEERDIR_TS_RESOURCE(nodejs pnpm)
+}
+
+
+# tag:test
+ESLINT_CONFIG_PATH=.eslintrc.js
+_TS_LINT_SRCS_VALUE=
+### _TS_CONFIG_EPILOGUE() # internal
+###
+### This macro executes macros which should be invoked after all user specified macros in the ya.make file
+macro _TS_CONFIG_EPILOGUE() {
+ ### Fill $TS_GLOB_FILES with potential inputs.
+ ### It will be reduced later in _TS_CONFIGURE based on `tsconfig.json` rules.
+ _GLOB(TS_GLOB_FILES $TS_GLOB_INCLUDE EXCLUDE $TS_GLOB_EXCLUDE)
+
+ _GLOB(_TS_LINT_SRCS_VALUE **/*.(ts|tsx|js|jsx) EXCLUDE $TS_EXCLUDE_DIR_GLOB $TS_COMMON_OUTDIR_GLOB $TS_GLOB_EXCLUDE_ADDITIONAL)
+
+ _SETUP_EXTRACT_NODE_MODULES_RECIPE(${MODDIR})
+}
+
+# Used as inputs in TS_COMPILE through `$_AS_HIDDEN_INPUTS(IN $TS_INPUT_FILES)`
+TS_INPUT_FILES=
+
+# List of the files, filled in _TS_CONFIG_EPILOGUE. Will be reduced in _TS_CONFIGURE macro to TS_INPUT_FILES.
+TS_GLOB_FILES=
+
+# Hardcoded "include" list (all other files will be ignored)
+TS_GLOB_INCLUDE=**/*
+
+# Hardcoded "exclude" list (reasonable default).
+TS_GLOB_EXCLUDE=$TS_CONFIG_PATH \
+ ya.make a.yaml \
+ $TS_EXCLUDE_DIR_GLOB \
+ $TS_COMMON_OUTDIR_GLOB \
+ $TS_GLOB_EXCLUDE_ADDITIONAL \
+ package.json pnpm-lock.yaml .* \
+ tests/**/* **/*.(test|spec).(ts|tsx|js|jsx)
+
+
+# Ugly hack for using inputs from the variable
+macro _AS_HIDDEN_INPUTS(IN{input}[]) {
+ # "context=TEXT" exclude file from the "include processing"
+ .CMD=${input;hide;context=TEXT:IN}
+}
+
+
+_TS_FILES_COPY_CMD=
+
+### TS_FILES(Files...)
+###
+### Adds files to output as is. Similar to FILES but works for TS build modules
+macro TS_FILES(Files...) {
+ _TS_FILES($Files)
+}
+
+@import "${CONF_ROOT}/conf/ts/node_modules.conf"
+@import "${CONF_ROOT}/conf/ts/ts_next.conf"
+@import "${CONF_ROOT}/conf/ts/ts_package.conf"
+@import "${CONF_ROOT}/conf/ts/ts_test.conf"
+@import "${CONF_ROOT}/conf/ts/ts_tsc.conf"
+@import "${CONF_ROOT}/conf/ts/ts_vite.conf"
+@import "${CONF_ROOT}/conf/ts/ts_webpack.conf"
diff --git a/build/conf/ts/ts_next.conf b/build/conf/ts/ts_next.conf
new file mode 100644
index 0000000000..0bf65e3e2d
--- /dev/null
+++ b/build/conf/ts/ts_next.conf
@@ -0,0 +1,59 @@
+TS_NEXT_OUTPUT_DIR=.next
+TS_NEXT_CONFIG_PATH=next.config.js
+
+TS_NEXT_CMD=$TOUCH_UNIT \
+ && $_TS_FILES_COPY_CMD \
+ && $ADD_VCS_INFO_FILE_CMD \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS build-next $NOTS_TOOL_COMMON_BUILDER_ARGS \
+ --bundler-config-path ${input:TS_NEXT_CONFIG_PATH} \
+ --output-dir ${TS_NEXT_OUTPUT_DIR} \
+ $_NODE_MODULES_INOUTS ${hide:PEERS} \
+ ${input;hide:"package.json"} ${TS_CONFIG_FILES} $_AS_HIDDEN_INPUTS(IN $TS_INPUT_FILES) \
+ ${output;hide:"package.json"} \
+ ${kv;hide:"pc magenta"} ${kv;hide:"p TS_NXT"}
+
+### @usage: TS_NEXT()
+###
+### NextJS app, built with `next build`. Requires sources to be under /src folder.
+### /pages and /app on the root level ar not supported.
+### Build results are output.tar.
+###
+### @example
+###
+### TS_NEXT()
+### END()
+###
+multimodule TS_NEXT {
+ module BUILD: _TS_BASE_UNIT {
+ .CMD=TS_NEXT_CMD
+ .EPILOGUE=_TS_CONFIG_EPILOGUE
+
+ # by default multimodule overrides inherited MODULE_TAG to submodule name (BUILD in this case)
+ # but we have to set it to TS for include processor to work
+ SET(MODULE_TAG TS)
+
+ _PEERDIR_TS_RESOURCE(next)
+
+ DISABLE(TS_CONFIG_DEDUCE_OUT)
+ DISABLE(TS_CONFIG_USE_OUTDIR)
+
+ _TS_CONFIGURE($TS_CONFIG_PATH)
+
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${CURDIR}/package.json ${CURDIR}/pnpm-lock.yaml ${CURDIR}/${TS_CONFIG_PATH})
+ _TS_ADD_NODE_MODULES_FOR_BUILDER()
+ }
+}
+
+macro TS_NEXT_CONFIG(Path) {
+ SET(TS_NEXT_CONFIG_PATH $Path)
+}
+
+
+### @usage: TS_NEXT_OUTPUT(DirName)
+###
+### Macro sets the output directory name for TS_NEXT module.
+###
+### - DirName - output directory name ("bundle" by default).
+macro TS_NEXT_OUTPUT(DirName) {
+ SET(TS_NEXT_OUTPUT_DIR $DirName)
+}
diff --git a/build/conf/ts/ts_package.conf b/build/conf/ts/ts_package.conf
new file mode 100644
index 0000000000..bad2085a62
--- /dev/null
+++ b/build/conf/ts/ts_package.conf
@@ -0,0 +1,36 @@
+
+TS_PACK=$TOUCH_UNIT \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS build-package $_NODE_MODULES_INOUTS \
+ && $COPY_CMD ${input:"package.json"} ${output:"package.json"} \
+ && $_TS_FILES_COPY_CMD \
+ ${kv;hide:"p TS_PKG"} ${kv;hide:"pc magenta"}
+
+### @usage: TS_PACKAGE()
+###
+### The TypeScript/JavaScript library module, that does not need any compilation,
+### and is just a set of files and NPM dependencies. List required files in TS_FILES macro.
+### `package.json` is included by default.
+###
+### @example
+###
+### TS_PACKAGE()
+### TS_FILES(
+### eslint.config.json
+### prettierrc.json
+### )
+### END()
+###
+multimodule TS_PACKAGE {
+ module BUILD: _TS_BASE_UNIT {
+ .CMD=TS_PACK
+ .ALLOWED=TS_FILES
+ .ALIASES=FILES=TS_FILES SRCS=TS_FILES
+
+ # by default multimodule overrides inherited MODULE_TAG to submodule name (BUILD in this case)
+ # but we have to set it to TS for include processor to work
+ SET(MODULE_TAG TS)
+
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${CURDIR}/package.json ${CURDIR}/pnpm-lock.yaml)
+ _TS_ADD_NODE_MODULES_FOR_BUILDER()
+ }
+}
diff --git a/build/conf/ts/ts_test.conf b/build/conf/ts/ts_test.conf
new file mode 100644
index 0000000000..86ab3431c8
--- /dev/null
+++ b/build/conf/ts/ts_test.conf
@@ -0,0 +1,159 @@
+TS_TEST_EXTENSION=
+TS_TEST_EXTRA_SRCS_MASK=
+
+TS_TEST_CONFIG_PATH=
+TS_TEST_NM=
+
+# We have to rename node_modules.tar to workspace_node_modules.tar,
+# so TS_TEST_JEST module has it's own unique output.
+# TS_TEST_JEST_FOR module has to output all files required for test run.
+TS_TEST_JEST_CMD=$TOUCH_UNIT \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS create-node-modules --moddir $TS_TEST_FOR_PATH \
+ $_NODE_MODULES_INOUTS ${hide:PEERS} \
+ && ${cwd:BINDIR} $MOVE_FILE $TS_TEST_NM ${output:"workspace_node_modules.tar"} \
+ ${kv;hide:"p TS_JST"} ${kv;hide:"pc magenta"}
+
+### @usage: TS_TEST_JEST_FOR(Path)
+###
+### Defines testing module with jest test runner.
+###
+### @example
+###
+### TS_TEST_JEST_FOR(path/to/module)
+### TS_TEST_SRCS(../src)
+### TS_TEST_CONFIG(../jest.config.js)
+### END()
+###
+module TS_TEST_JEST_FOR: _TS_TEST_BASE {
+ .CMD=TS_TEST_JEST_CMD
+
+ # for multimodule peers we should choose NODE_MODULES
+ SET(PEERDIR_TAGS NODE_MODULES)
+
+ # compatibility with old TS_TEST_SRCS
+ SET(TS_TEST_EXTENSION test.(ts|tsx|js|jsx))
+ SET(TS_TEST_EXTRA_SRCS_MASK /**/__mocks__/*)
+
+ _PEERDIR_TS_RESOURCE(nodejs pnpm jest)
+ _TS_TEST_FOR_CONFIGURE(jest jest.config.js workspace_node_modules.tar)
+
+ _TS_ADD_NODE_MODULES_FOR_BUILDER()
+}
+
+TS_TEST_HERMIONE_CMD=$TOUCH_UNIT \
+ && ${cwd:BINDIR} $MOVE_FILE ${input:TS_TEST_NM} ${output:"workspace_node_modules.tar"} \
+ ${kv;hide:"p TSHRM"} ${kv;hide:"pc magenta"}
+
+### @usage: TS_TEST_HERMIONE_FOR(Path)
+###
+### Defines testing module with hermione test runner.
+###
+### @example
+###
+### TS_TEST_HERMIONE_FOR(path/to/module)
+### TS_TEST_SRCS(../src)
+### TS_TEST_CONFIG(../hermione.conf.js)
+### END()
+###
+module TS_TEST_HERMIONE_FOR: _TS_TEST_BASE {
+ .CMD=TS_TEST_HERMIONE_CMD
+
+ # for multimodule peers we should choose TS
+ SET(PEERDIR_TAGS TS)
+
+ # compatibility with old TS_TEST_SRCS
+ SET(TS_TEST_EXTENSION hermione.(ts|js))
+
+ _DEPENDS_ON_MOD()
+ _PEERDIR_TS_RESOURCE(nodejs pnpm typescript hermione)
+ _TS_TEST_FOR_CONFIGURE(hermione .hermione.conf.js workspace_node_modules.tar)
+}
+
+module _TS_TEST_BASE: _BARE_UNIT {
+ # ignore SRCS macro
+ .ALIASES=SRCS=_NOOP_MACRO
+ # use this parser to get module args in $MODULE_ARGS_RAW
+ .ARGS_PARSER=Raw
+ .NODE_TYPE=Program
+
+ # TODO: remove this. YMAKE-1096 / FBP-1184
+ _NEVERCACHE()
+
+ # .fake tells builder to not materialize it in results
+ SET(MODULE_SUFFIX .ts_test.fake)
+ # include processor works only for TS tag
+ SET(MODULE_TAG TS)
+ # we read erm-packages.json during configuration, so we have to include it to configuration cache key
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/$ERM_PACKAGES_PATH)
+
+ # parse module args
+ _TS_TEST_FOR_ARGS($MODULE_ARGS_RAW)
+
+ # we don't want to have TS outputs for tests
+ DISABLE(TS_CONFIG_DEDUCE_OUT)
+}
+
+macro _TS_TEST_FOR_ARGS(FOR_MOD, RELATIVE?"${CURDIR}":"${ARCADIA_ROOT}") {
+ # we read testing modules' package.json during configuration,
+ # so we have to include it to configuration cache key
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS $RELATIVE/$FOR_MOD/package.json)
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS $RELATIVE/$FOR_MOD/pnpm-lock.yaml)
+ _VALIDATE_TS_TEST_FOR_ARGS($FOR_MOD $RELATIVE)
+ _SET_TS_TEST_FOR_VARS($FOR_MOD)
+}
+
+macro _SETUP_EXTRACT_NODE_MODULES_RECIPE(FOR_PATH) {
+ DEPENDS(devtools/frontend_build_platform/nots/recipes/extract_node_modules)
+ USE_RECIPE(devtools/frontend_build_platform/nots/recipes/extract_node_modules/recipe $FOR_PATH workspace_node_modules.tar)
+}
+
+macro _SETUP_EXTRACT_PEER_TARS_RECIPE(FOR_PATH) {
+ DEPENDS(devtools/frontend_build_platform/nots/recipes/extract_peer_tars)
+ USE_RECIPE(devtools/frontend_build_platform/nots/recipes/extract_peer_tars/recipe $FOR_PATH)
+}
+
+
+### @usage: TS_TEST_CONFIG(Path)
+###
+### Macro sets the path to configuration file of the test runner.
+###
+### - Path - path to the config file.
+macro TS_TEST_CONFIG(Path) {
+ SET(TS_TEST_CONFIG_PATH $Path)
+}
+
+
+_TS_TEST_SRCS_VALUE=
+_TS_TEST_EXTRA_SRCS_VALUE=
+### @usage: TS_TEST_SRCS(DIRS...)
+###
+### Macro to define directories where the test source files should be located.
+###
+### - DIRS... - directories.
+macro TS_TEST_SRCS(DIRS...) {
+ _GLOB(_TS_TEST_SRCS_VALUE ${suf=/**/*.$TS_TEST_EXTENSION:DIRS})
+ SRCS($_TS_TEST_SRCS_VALUE)
+
+ _GLOB(_TS_TEST_EXTRA_SRCS_VALUE ${suf=$TS_TEST_EXTRA_SRCS_MASK:DIRS})
+ SRCS($_TS_TEST_EXTRA_SRCS_VALUE)
+}
+
+
+_TS_TEST_DATA_VALUE=
+_TS_TEST_DATA_DIRS_RENAME_VALUE=
+### @usage: TS_TEST_DATA([RENAME] GLOBS...)
+###
+### Macro to add tests data (i.e. snapshots) used in testing to a bindir from curdir.
+### Creates symbolic links to directories of files found by the specified globs.
+###
+### Parameters:
+### - RENAME - adds ability to rename paths for tests data from curdir to bindir.
+### For example if your tested module located on "module" path and tests data in "module/tests_data".
+### Then you can be able to rename "tests_data" folder to something else - `RENAME tests_data:example`.
+### As a result in your bindir will be created folder - "module/example" which is a symbolic link on "module/tests_data" in curdir.
+### It is possible to specify multiple renaming rules in the following format "dir1:dir2;dir3/foo:dir4/bar", where "dir1" and "dir3" folders in curdir.
+### - GLOBS... - globs to tests data files, symbolic links will be created to their folders. For example - "tests_data/**/*".
+macro TS_TEST_DATA(RENAME="", GLOBS...) {
+ _GLOB(_TS_TEST_DATA_VALUE $GLOBS)
+ SET(_TS_TEST_DATA_DIRS_RENAME_VALUE $RENAME)
+}
diff --git a/build/conf/ts/ts_tsc.conf b/build/conf/ts/ts_tsc.conf
new file mode 100644
index 0000000000..d76b5088c8
--- /dev/null
+++ b/build/conf/ts/ts_tsc.conf
@@ -0,0 +1,49 @@
+TS_TSC_CMD=$TOUCH_UNIT \
+ && $_TS_FILES_COPY_CMD \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS build-tsc $NOTS_TOOL_COMMON_BUILDER_ARGS \
+ $_NODE_MODULES_INOUTS ${hide:PEERS} \
+ ${input;hide:"package.json"} ${TS_CONFIG_FILES} $_AS_HIDDEN_INPUTS(IN $TS_INPUT_FILES) \
+ ${output;hide:"package.json"} \
+ ${kv;hide:"pc magenta"} ${kv;hide:"p TS_TSC"}
+
+### @usage: TS_TSC([name])
+###
+### The TypeScript/JavaScript library module, compiles TypeScript sources to JavaScript using tsc.
+### Build results are JavaScript files, typings and source mappings (depending on local tsconfig.json settings).
+###
+### @example
+###
+### TS_TSC()
+###
+### END()
+###
+multimodule TS_TSC {
+ module BUILD: _TS_BASE_UNIT {
+ .CMD=TS_TSC_CMD
+ .EPILOGUE=_TS_CONFIG_EPILOGUE
+
+ # by default multimodule overrides inherited MODULE_TAG to submodule name (BUILD in this case)
+ # but we have to set it to TS for include processor to work
+ SET(MODULE_TAG TS)
+
+ _PEERDIR_TS_RESOURCE(typescript)
+
+ DISABLE(TS_CONFIG_DEDUCE_OUT)
+ ENABLE(TS_CONFIG_USE_OUTDIR)
+
+ _TS_CONFIGURE($TS_CONFIG_PATH)
+
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${CURDIR}/package.json ${CURDIR}/pnpm-lock.yaml ${CURDIR}/${TS_CONFIG_PATH})
+ _TS_ADD_NODE_MODULES_FOR_BUILDER()
+ }
+}
+
+### @usage: TS_TSC([name])
+###
+### deprecated, use TS_TSC instead
+module TS_LIBRARY: _BARE_UNIT {
+ MESSAGE(TS_LIBRARY has been renamed to TS_TSC)
+ MESSAGE(To update the project edit "ya.make" manualy or run:)
+ MESSAGE(FATAL_ERROR ya project macro replace "TS_LIBRARY=TS_TSC")
+}
+
diff --git a/build/conf/ts/ts_vite.conf b/build/conf/ts/ts_vite.conf
new file mode 100644
index 0000000000..f5f675d8ee
--- /dev/null
+++ b/build/conf/ts/ts_vite.conf
@@ -0,0 +1,63 @@
+VITE_OUTPUT_DIR=dist
+VITE_CONFIG_PATH=vite.config.ts
+
+TS_VITE_CMD=$TOUCH_UNIT \
+ && $_TS_FILES_COPY_CMD \
+ && $ADD_VCS_INFO_FILE_CMD \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS build-vite $NOTS_TOOL_COMMON_BUILDER_ARGS \
+ --bundler-config-path ${input:VITE_CONFIG_PATH} \
+ --output-dir ${VITE_OUTPUT_DIR} \
+ $_NODE_MODULES_INOUTS ${hide:PEERS} \
+ ${input;hide:"package.json"} ${TS_CONFIG_FILES} $_AS_HIDDEN_INPUTS(IN $TS_INPUT_FILES) \
+ ${output;hide:"package.json"} \
+ ${kv;hide:"pc magenta"} ${kv;hide:"p TS_VIT"}
+
+### @usage: VITE_OUTPUT(DirName)
+###
+### Macro sets the output directory name for TS_VITE module.
+###
+### - DirName - output directory name ("dist" by default).
+macro VITE_OUTPUT(DirName) {
+ SET(VITE_OUTPUT_DIR $DirName)
+}
+
+### @usage: TS_VITE([name])
+###
+### The Vite bundle, bundles JavaScript code.
+### Build results are packed as `output.tar`.
+###
+### @example
+###
+### TS_VITE()
+### END()
+###
+multimodule TS_VITE {
+ module BUILD: _TS_BASE_UNIT {
+ .CMD=TS_VITE_CMD
+ .EPILOGUE=_TS_CONFIG_EPILOGUE
+
+ # by default multimodule overrides inherited MODULE_TAG to submodule name (BUILD in this case)
+ # but we have to set it to TS for include processor to work
+ SET(MODULE_TAG TS)
+
+ _PEERDIR_TS_RESOURCE(vite)
+
+ DISABLE(TS_CONFIG_DEDUCE_OUT)
+ DISABLE(TS_CONFIG_USE_OUTDIR)
+
+ _TS_CONFIGURE($TS_CONFIG_PATH)
+
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${CURDIR}/package.json ${CURDIR}/pnpm-lock.yaml ${CURDIR}/${TS_CONFIG_PATH})
+ _TS_ADD_NODE_MODULES_FOR_BUILDER()
+ }
+}
+
+
+### @usage: TS_VITE([name])
+###
+### deprecated, use TS_VITE instead
+module TS_VITE_BUNDLE: _BARE_UNIT {
+ MESSAGE(TS_VITE_BUNDLE has been renamed to TS_VITE)
+ MESSAGE(To update the project edit "ya.make" manualy or run:)
+ MESSAGE(FATAL_ERROR ya project macro replace "TS_VITE_BUNDLE=TS_VITE")
+}
diff --git a/build/conf/ts/ts_webpack.conf b/build/conf/ts/ts_webpack.conf
new file mode 100644
index 0000000000..36a1bcebf8
--- /dev/null
+++ b/build/conf/ts/ts_webpack.conf
@@ -0,0 +1,62 @@
+WEBPACK_OUTPUT_DIR=bundle
+WEBPACK_CONFIG_PATH=webpack.config.js
+
+TS_WEBPACK_CMD=$TOUCH_UNIT \
+ && $_TS_FILES_COPY_CMD \
+ && $ADD_VCS_INFO_FILE_CMD \
+ && $NOTS_TOOL $NOTS_TOOL_BASE_ARGS build-webpack $NOTS_TOOL_COMMON_BUILDER_ARGS \
+ --bundler-config-path ${input:WEBPACK_CONFIG_PATH} \
+ --output-dir ${WEBPACK_OUTPUT_DIR} \
+ $_NODE_MODULES_INOUTS ${hide:PEERS} \
+ ${input;hide:"package.json"} ${TS_CONFIG_FILES} $_AS_HIDDEN_INPUTS(IN $TS_INPUT_FILES) \
+ ${output;hide:"package.json"} \
+ ${kv;hide:"pc magenta"} ${kv;hide:"p TS_WPK"}
+
+### @usage: WEBPACK_OUTPUT(DirName)
+###
+### Macro sets the output directory name for TS_BUNDLE module.
+###
+### - DirName - output directory name ("bundle" by default).
+macro WEBPACK_OUTPUT(DirName) {
+ SET(WEBPACK_OUTPUT_DIR $DirName)
+}
+
+### @usage: TS_WEBPACK([name])
+###
+### The Webpack bundle, bundles JavaScript code.
+### Build results are packed as `output.tar`.
+###
+### @example
+###
+### TS_WEBPACK()
+### END()
+###
+multimodule TS_WEBPACK {
+ module BUILD: _TS_BASE_UNIT {
+ .CMD=TS_WEBPACK_CMD
+ .EPILOGUE=_TS_CONFIG_EPILOGUE
+
+ # by default multimodule overrides inherited MODULE_TAG to submodule name (BUILD in this case)
+ # but we have to set it to TS for include processor to work
+ SET(MODULE_TAG TS)
+
+ _PEERDIR_TS_RESOURCE(webpack webpack-cli)
+
+ DISABLE(TS_CONFIG_DEDUCE_OUT)
+ DISABLE(TS_CONFIG_USE_OUTDIR)
+
+ _TS_CONFIGURE($TS_CONFIG_PATH)
+
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${CURDIR}/package.json ${CURDIR}/pnpm-lock.yaml ${CURDIR}/${TS_CONFIG_PATH})
+ _TS_ADD_NODE_MODULES_FOR_BUILDER()
+ }
+}
+
+### @usage: TS_TSC([name])
+###
+### deprecated, use TS_TSC instead
+module TS_BUNDLE: _BARE_UNIT {
+ MESSAGE(TS_BUNDLE has been renamed to TS_WEBPACK)
+ MESSAGE(To update the project edit "ya.make" manualy or run:)
+ MESSAGE(FATAL_ERROR ya project macro replace "TS_BUNDLE=TS_WEBPACK")
+}
diff --git a/build/plugins/lib/nots/__init__.py b/build/plugins/lib/nots/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/plugins/lib/nots/__init__.py
diff --git a/build/plugins/lib/nots/a.yaml b/build/plugins/lib/nots/a.yaml
new file mode 100644
index 0000000000..fefc8876d8
--- /dev/null
+++ b/build/plugins/lib/nots/a.yaml
@@ -0,0 +1,2 @@
+service: frontend_build_platform
+title: Frontend build platform lib/nots
diff --git a/build/plugins/lib/nots/erm_json_lite.py b/build/plugins/lib/nots/erm_json_lite.py
new file mode 100644
index 0000000000..e2927f8a98
--- /dev/null
+++ b/build/plugins/lib/nots/erm_json_lite.py
@@ -0,0 +1,122 @@
+import json
+import re
+from functools import cmp_to_key
+
+from .semver import Version, VersionRange
+
+
+class ErmJsonLite(object):
+ """
+ Basic implementation to read `erm-packages.json`.
+
+ It doesn't use any models, works with only raw JSON types: lists, dicts, strings
+ """
+
+ class ResourceType(object):
+ NPM_PACKAGE = "NPM_PACKAGE"
+ NODE_JS = "NODE_JS"
+
+ data = None
+
+ @staticmethod
+ def get_versions_of(er_resource):
+ # type: (dict) -> list[Version]
+ """
+ Return all versions of the resource in ASC order (from older to latest)
+ """
+ unsorted = er_resource.get("versions").keys()
+ # We have to sort because in python 2 the order of keys in a dict is not guaranteed
+ versions = sorted(unsorted, key=cmp_to_key(Version.cmp))
+
+ return [Version.from_str(v) for v in versions]
+
+ @classmethod
+ def load(cls, path):
+ # type: (str) -> ErmJsonLite
+ erm_json = cls()
+
+ with open(path, encoding='utf-8') as f:
+ erm_json.data = dict()
+ for k, v in json.load(f).items():
+ # Ignore comments (when key starts with `_`), used for banner
+ if not k.startswith("_"):
+ erm_json.data[k] = v
+
+ return erm_json
+
+ @staticmethod
+ def canonize_name(resource_name):
+ # type: (str) -> str
+ """
+ Canonize resource name
+
+ For example:
+ hermione -> hermione
+ super-package -> super_package
+ @yatool/nots -> yatool_nots
+ """
+ return re.sub(r"\W+", "_", resource_name).strip("_")
+
+ def get_resource(self, resource_name):
+ # type: (str) -> dict
+ """
+ Return resource by his name
+ """
+ er_resource = self.data.get(resource_name)
+ if not er_resource:
+ raise Exception("Requested resource {} is not a toolchain item".format(resource_name))
+
+ return er_resource
+
+ def get_sb_resources(self, resource_name, version):
+ # type: (str, Version) -> list[dict]
+ """
+ Return a list of SB resources for ER version
+ """
+ er_resource = self.get_resource(resource_name)
+
+ return er_resource.get("versions").get(str(version)).get("resources")
+
+ def is_resource_multiplatform(self, resource_name):
+ # type: (str) -> bool
+ """
+ Return True if resource is multiplatform, False otherwise
+ """
+ er_resource = self.get_resource(resource_name)
+
+ return er_resource.get("multiplatform", False)
+
+ def list_npm_packages(self):
+ # type: () -> list[str]
+ """
+ Returns a list of the names of the npm tools used in the toolchain
+ """
+ result = []
+ for resource_name, resource in self.data.items():
+ if resource.get("type") == self.ResourceType.NPM_PACKAGE:
+ result.append(resource_name)
+
+ return result
+
+ def select_version_of(self, resource_name, range_str=None):
+ # type: (str, str|None) -> Version|None
+ er_resource = self.get_resource(resource_name)
+
+ if range_str is None:
+ return Version.from_str(er_resource.get("default"))
+
+ version_range = VersionRange.from_str(range_str)
+
+ # assuming the version list is sorted from the lowest to the highest version,
+ # we stop the loop as early as possible and hence return the lowest compatible version
+ for version in self.get_versions_of(er_resource):
+ if version_range.is_satisfied_by(version):
+ return version
+
+ return None
+
+ def use_resource_directly(self, resource_name):
+ # type: (str) -> bool
+ er_resource = self.get_resource(resource_name)
+
+ return er_resource.get("useDirectly", False)
diff --git a/build/plugins/lib/nots/package_manager/__init__.py b/build/plugins/lib/nots/package_manager/__init__.py
new file mode 100644
index 0000000000..11387ec27a
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/__init__.py
@@ -0,0 +1,16 @@
+from .base import bundle_node_modules, constants, extract_node_modules, PackageJson, utils, PackageManagerCommandError
+from .base.package_json import PackageJsonWorkspaceError
+from .pnpm import PnpmPackageManager
+
+manager = PnpmPackageManager
+
+__all__ = [
+ "PackageJson",
+ "PackageJsonWorkspaceError",
+ "PackageManagerCommandError",
+ "bundle_node_modules",
+ "constants",
+ "extract_node_modules",
+ "manager",
+ "utils",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/__init__.py b/build/plugins/lib/nots/package_manager/base/__init__.py
new file mode 100644
index 0000000000..022d4a960e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/__init__.py
@@ -0,0 +1,20 @@
+from . import constants, utils
+from .lockfile import BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+from .package_json import PackageJson
+from .package_manager import BasePackageManager, PackageManagerError, PackageManagerCommandError
+from .node_modules_bundler import bundle_node_modules, extract_node_modules
+
+
+__all__ = [
+ "constants",
+ "utils",
+ "BaseLockfile",
+ "LockfilePackageMeta",
+ "LockfilePackageMetaInvalidError",
+ "BasePackageManager",
+ "PackageManagerError",
+ "PackageManagerCommandError",
+ "PackageJson",
+ "bundle_node_modules",
+ "extract_node_modules",
+]
diff --git a/build/plugins/lib/nots/package_manager/base/constants.py b/build/plugins/lib/nots/package_manager/base/constants.py
new file mode 100644
index 0000000000..cd90b78b53
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/constants.py
@@ -0,0 +1,9 @@
+BUILD_DIRNAME = "build"
+BUNDLE_DIRNAME = "bundle"
+NODE_MODULES_BUNDLE_FILENAME = "node_modules.tar"
+NODE_MODULES_DIRNAME = "node_modules"
+NODE_MODULES_WORKSPACE_BUNDLE_FILENAME = "workspace_node_modules.tar"
+NPM_REGISTRY_URL = "http://npm.yandex-team.ru"
+OUTPUT_TAR_FILENAME = "output.tar"
+PACKAGE_JSON_FILENAME = "package.json"
+PNPM_LOCKFILE = "pnpm-lock.yaml"
diff --git a/build/plugins/lib/nots/package_manager/base/lockfile.py b/build/plugins/lib/nots/package_manager/base/lockfile.py
new file mode 100644
index 0000000000..f13168b320
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/lockfile.py
@@ -0,0 +1,75 @@
+import os
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+class LockfilePackageMeta(object):
+ """
+ Basic struct representing package meta from lockfile.
+ """
+
+ __slots__ = ("tarball_url", "sky_id", "integrity", "integrity_algorithm", "tarball_path")
+
+ @staticmethod
+ def from_str(s):
+ return LockfilePackageMeta(*s.strip().split(" "))
+
+ def __init__(self, tarball_url, sky_id, integrity, integrity_algorithm):
+ # http://npm.yandex-team.ru/@scope%2fname/-/name-0.0.1.tgz
+ parts = tarball_url.split("/")
+
+ self.tarball_url = tarball_url
+ self.sky_id = sky_id
+ self.integrity = integrity
+ self.integrity_algorithm = integrity_algorithm
+ self.tarball_path = "/".join(parts[-3:]) # @scope%2fname/-/name-0.0.1.tgz
+
+ def to_str(self):
+ return " ".join([self.tarball_url, self.sky_id, self.integrity, self.integrity_algorithm])
+
+
+class LockfilePackageMetaInvalidError(RuntimeError):
+ pass
+
+
+@add_metaclass(ABCMeta)
+class BaseLockfile(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: lockfile path
+ :type path: str
+ :rtype: BaseLockfile
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ @abstractmethod
+ def read(self):
+ pass
+
+ @abstractmethod
+ def write(self, path=None):
+ pass
+
+ @abstractmethod
+ def get_packages_meta(self):
+ pass
+
+ @abstractmethod
+ def update_tarball_resolutions(self, fn):
+ pass
+
+ @abstractmethod
+ def validate_has_addons_flags(self):
+ pass
diff --git a/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
new file mode 100644
index 0000000000..aae54c0be6
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/node_modules_bundler.py
@@ -0,0 +1,66 @@
+import os
+import tarfile
+
+from io import BytesIO
+
+from .utils import build_nm_path
+
+PEERS_DIR = ".peers"
+PEERS_INDEX = "index"
+
+
+def bundle_node_modules(build_root, peers, node_modules_path, bundle_path):
+ """
+ Creates node_modules bundle.
+ Bundle contains node_modules directory, peers' node_modules directories,
+ and index file with the list of added peers (\\n delimited).
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param peers: list of peers (arcadia root related)
+ :type peers: list of str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path, "w") as tf:
+ tf.add(node_modules_path, arcname=".")
+
+ # Peers' node_modules.
+ added_peers = []
+ for p in peers:
+ peer_nm_path = build_nm_path(os.path.join(build_root, p))
+ peer_bundled_nm_path = build_nm_path(os.path.join(PEERS_DIR, p))
+ if not os.path.isdir(peer_nm_path):
+ continue
+ tf.add(peer_nm_path, arcname=peer_bundled_nm_path)
+ added_peers.append(p)
+
+ # Peers index.
+ peers_index = "\n".join(added_peers)
+ ti = tarfile.TarInfo(name=os.path.join(PEERS_DIR, PEERS_INDEX))
+ ti.size = len(peers_index)
+ tf.addfile(ti, BytesIO(peers_index.encode()))
+
+
+def extract_node_modules(build_root, node_modules_path, bundle_path):
+ """
+ Extracts node_modules bundle.
+ :param build_root: arcadia build root
+ :type build_root: str
+ :param node_modules_path: node_modules path
+ :type node_modules_path: str
+ :param bundle_path: tarball path
+ :type bundle_path: str
+ """
+ with tarfile.open(bundle_path) as tf:
+ tf.extractall(node_modules_path)
+
+ with open(os.path.join(node_modules_path, PEERS_DIR, PEERS_INDEX)) as peers_file:
+ peers = peers_file.read().split("\n")
+ for p in peers:
+ if not p:
+ continue
+ bundled_nm_path = build_nm_path(os.path.join(node_modules_path, PEERS_DIR, p))
+ nm_path = build_nm_path(os.path.join(build_root, p))
+ os.rename(bundled_nm_path, nm_path)
diff --git a/build/plugins/lib/nots/package_manager/base/package_json.py b/build/plugins/lib/nots/package_manager/base/package_json.py
new file mode 100644
index 0000000000..cc498b33f7
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_json.py
@@ -0,0 +1,222 @@
+import json
+import logging
+import os
+
+from six import iteritems
+
+from .utils import build_pj_path
+
+logger = logging.getLogger(__name__)
+
+
+class PackageJsonWorkspaceError(RuntimeError):
+ pass
+
+
+class PackageJson(object):
+ DEP_KEY = "dependencies"
+ DEV_DEP_KEY = "devDependencies"
+ PEER_DEP_KEY = "peerDependencies"
+ OPT_DEP_KEY = "optionalDependencies"
+ DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY)
+
+ WORKSPACE_SCHEMA = "workspace:"
+
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: package.json path
+ :type path: str
+ :rtype: PackageJson
+ """
+ pj = cls(path)
+ pj.read()
+
+ return pj
+
+ def __init__(self, path):
+ # type: (str) -> None
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = None
+
+ def read(self):
+ with open(self.path, 'rb') as f:
+ self.data = json.load(f)
+
+ def write(self, path=None):
+ """
+ :param path: path to store package.json, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ directory = os.path.dirname(path)
+ if not os.path.exists(directory):
+ os.mkdir(directory)
+
+ with open(path, "w") as f:
+ json.dump(self.data, f, indent=2, ensure_ascii=False)
+ f.write('\n') # it's better for diff algorithm in arc
+ logger.debug("Written {}".format(path))
+
+ def get_name(self):
+ # type: () -> str
+ name = self.data.get("name")
+
+ if not name:
+ name = os.path.dirname(self.path).replace("/", "-").strip("-")
+
+ return name
+
+ def get_version(self):
+ return self.data["version"]
+
+ def get_description(self):
+ return self.data.get("description")
+
+ def get_use_prebuilder(self):
+ return self.data.get("usePrebuilder", False)
+
+ def get_nodejs_version(self):
+ return self.data.get("engines", {}).get("node")
+
+ def get_dep_specifier(self, dep_name):
+ for name, spec in self.dependencies_iter():
+ if dep_name == name:
+ return spec
+ return None
+
+ def dependencies_iter(self):
+ for key in self.DEP_KEYS:
+ deps = self.data.get(key)
+ if not deps:
+ continue
+
+ for name, spec in iteritems(deps):
+ yield (name, spec)
+
+ def has_dependencies(self):
+ first_dep = next(self.dependencies_iter(), None)
+ return first_dep is not None
+
+ def bins_iter(self):
+ bins = self.data.get("bin")
+ if isinstance(bins, str):
+ yield bins
+ elif isinstance(bins, dict):
+ for bin in bins.values():
+ yield bin
+
+ def get_bin_path(self, bin_name=None):
+ # type: (str|None) -> str|None
+ actual_bin_name = bin_name or self.get_name() # type: str
+
+ bins = self.data.get("bin")
+
+ if isinstance(bins, str):
+ if bin_name is not None:
+ logger.warning("bin_name is unused, because 'bin' is a string")
+
+ return bins
+
+ if isinstance(bins, dict):
+ for name, path in bins.items():
+ if name == actual_bin_name:
+ return path
+
+ return None
+
+ def get_workspace_dep_spec_paths(self):
+ """
+ Returns names and paths from specifiers of the defined workspace dependencies.
+ :rtype: list of (str, str)
+ """
+ spec_paths = []
+ schema = self.WORKSPACE_SCHEMA
+ schema_len = len(schema)
+
+ for name, spec in self.dependencies_iter():
+ if not spec.startswith(schema):
+ continue
+
+ spec_path = spec[schema_len:]
+ if not (spec_path.startswith(".") or spec_path.startswith("..")):
+ raise PackageJsonWorkspaceError(
+ "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(
+ spec, name, self.path
+ )
+ )
+
+ spec_paths.append((name, spec_path))
+
+ return spec_paths
+
+ def get_workspace_dep_paths(self, base_path=None):
+ """
+ Returns paths of the defined workspace dependencies.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()]
+
+ def get_workspace_deps(self):
+ """
+ :rtype: list of PackageJson
+ """
+ ws_deps = []
+ pj_dir = os.path.dirname(self.path)
+
+ for name, rel_path in self.get_workspace_dep_spec_paths():
+ dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
+ dep_pj = PackageJson.load(build_pj_path(dep_path))
+
+ if name != dep_pj.get_name():
+ raise PackageJsonWorkspaceError(
+ "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(
+ name, dep_pj.get_name(), self.path
+ )
+ )
+
+ ws_deps.append(dep_pj)
+
+ return ws_deps
+
+ def get_workspace_map(self, ignore_self=False):
+ """
+ Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth.
+ :param ignore_self: whether path of the current module will be excluded
+ :type ignore_self: bool
+ :rtype: dict of (PackageJson, int)
+ """
+ ws_deps = {}
+ # list of (pj, depth)
+ pj_queue = [(self, 0)]
+
+ while len(pj_queue):
+ (pj, depth) = pj_queue.pop()
+ pj_dir = os.path.dirname(pj.path)
+ if pj_dir in ws_deps:
+ continue
+
+ if not ignore_self or pj != self:
+ ws_deps[pj_dir] = (pj, depth)
+
+ for dep_pj in pj.get_workspace_deps():
+ pj_queue.append((dep_pj, depth + 1))
+
+ return ws_deps
+
+ def get_dep_paths_by_names(self):
+ """
+ Returns dict of {dependency_name: dependency_path}
+ """
+ ws_map = self.get_workspace_map()
+ return {pj.get_name(): path for path, (pj, _) in ws_map.items()}
diff --git a/build/plugins/lib/nots/package_manager/base/package_manager.py b/build/plugins/lib/nots/package_manager/base/package_manager.py
new file mode 100644
index 0000000000..6b9faa56e8
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/package_manager.py
@@ -0,0 +1,155 @@
+import os
+import sys
+import subprocess
+
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+from .constants import NPM_REGISTRY_URL
+from .package_json import PackageJson
+from .utils import build_nm_path, build_pj_path
+
+
+class PackageManagerError(RuntimeError):
+ pass
+
+
+class PackageManagerCommandError(PackageManagerError):
+ def __init__(self, cmd, code, stdout, stderr):
+ self.cmd = cmd
+ self.code = code
+ self.stdout = stdout
+ self.stderr = stderr
+
+ msg = "package manager exited with code {} while running {}:\n{}\n{}".format(code, cmd, stdout, stderr)
+ super(PackageManagerCommandError, self).__init__(msg)
+
+
+@add_metaclass(ABCMeta)
+class BasePackageManager(object):
+ def __init__(
+ self,
+ build_root,
+ build_path,
+ sources_path,
+ nodejs_bin_path,
+ script_path,
+ contribs_path,
+ module_path=None,
+ sources_root=None,
+ ):
+ self.module_path = build_path[len(build_root) + 1 :] if module_path is None else module_path
+ self.build_path = build_path
+ self.sources_path = sources_path
+ self.build_root = build_root
+ self.sources_root = sources_path[: -len(self.module_path) - 1] if sources_root is None else sources_root
+ self.nodejs_bin_path = nodejs_bin_path
+ self.script_path = script_path
+ self.contribs_path = contribs_path
+
+ @classmethod
+ def load_package_json(cls, path):
+ """
+ :param path: path to package.json
+ :type path: str
+ :rtype: PackageJson
+ """
+ return PackageJson.load(path)
+
+ @classmethod
+ def load_package_json_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with package.json
+ :type dir_path: str
+ :rtype: PackageJson
+ """
+ return cls.load_package_json(build_pj_path(dir_path))
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile(cls, path):
+ pass
+
+ @classmethod
+ @abstractmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ pass
+
+ @abstractmethod
+ def create_node_modules(self):
+ pass
+
+ @abstractmethod
+ def calc_node_modules_inouts(self):
+ pass
+
+ @abstractmethod
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ pass
+
+ def get_local_peers_from_package_json(self):
+ """
+ Returns paths of direct workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ return self.load_package_json_from_dir(self.sources_path).get_workspace_dep_paths(base_path=self.module_path)
+
+ def get_peers_from_package_json(self):
+ """
+ Returns paths of workspace dependencies (source root related).
+ :rtype: list of str
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+ prefix_len = len(self.sources_root) + 1
+
+ return [p[prefix_len:] for p in pj.get_workspace_map(ignore_self=True).keys()]
+
+ def _exec_command(self, args, include_defaults=True, script_path=None):
+ if not self.nodejs_bin_path:
+ raise PackageManagerError("Unable to execute command: nodejs_bin_path is not configured")
+
+ cmd = (
+ [self.nodejs_bin_path, script_path or self.script_path]
+ + args
+ + (self._get_default_options() if include_defaults else [])
+ )
+ p = subprocess.Popen(
+ cmd,
+ cwd=self.build_path,
+ stdin=None,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = p.communicate()
+
+ if p.returncode != 0:
+ self._dump_debug_log()
+
+ raise PackageManagerCommandError(cmd, p.returncode, stdout.decode("utf-8"), stderr.decode("utf-8"))
+
+ def _nm_path(self, *parts):
+ return os.path.join(build_nm_path(self.build_path), *parts)
+
+ def _contrib_tarball_path(self, pkg):
+ return os.path.join(self.contribs_path, pkg.tarball_path)
+
+ def _contrib_tarball_url(self, pkg):
+ return "file:" + self._contrib_tarball_path(pkg)
+
+ def _get_default_options(self):
+ return ["--registry", NPM_REGISTRY_URL]
+
+ def _get_debug_log_path(self):
+ return None
+
+ def _dump_debug_log(self):
+ log_path = self._get_debug_log_path()
+
+ if not log_path:
+ return
+
+ try:
+ with open(log_path) as f:
+ sys.stderr.write("Package manager log {}:\n{}\n".format(log_path, f.read()))
+ except Exception:
+ sys.stderr.write("Failed to dump package manager log {}.\n".format(log_path))
diff --git a/build/plugins/lib/nots/package_manager/base/tests/package_json.py b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
new file mode 100644
index 0000000000..b50f4273d5
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/package_json.py
@@ -0,0 +1,238 @@
+import os
+
+import pytest
+
+from build.plugins.lib.nots.package_manager.base.package_json import PackageJson, PackageJsonWorkspaceError
+
+
+def test_get_name_exist():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "name": "package-name",
+ }
+
+ name = pj.get_name()
+
+ assert name == "package-name"
+
+
+def test_get_name_none():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {}
+
+ name = pj.get_name()
+
+ assert name == "packages-foo"
+
+
+def test_get_workspace_dep_spec_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_spec_paths = pj.get_workspace_dep_spec_paths()
+
+ assert ws_dep_spec_paths == [
+ ("@yandex-int/bar", "../bar"),
+ ("@yandex-int/baz", "../baz"),
+ ]
+
+
+def test_get_workspace_dep_spec_paths_invalid_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:*",
+ },
+ }
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_dep_spec_paths()
+
+ assert (
+ str(e.value)
+ == "Expected relative path specifier for workspace dependency, but got 'workspace:*' for @yandex-int/bar in /packages/foo/package.json"
+ )
+
+
+def test_get_workspace_dep_paths_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths()
+
+ assert ws_dep_paths == [
+ "/packages/bar",
+ "/packages/baz",
+ ]
+
+
+def test_get_dep_specifier():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "jestify": "0.0.1",
+ "eslint": ">= 7.27.0",
+ },
+ "devDependencies": {
+ "jest": "27.1.0",
+ "eslinting": "0.0.2",
+ },
+ }
+
+ jest_spec = pj.get_dep_specifier("jest")
+ assert jest_spec == "27.1.0", "Got unexpected jest specifier: {}".format(jest_spec)
+
+ eslint_spec = pj.get_dep_specifier("eslint")
+ assert eslint_spec == ">= 7.27.0", "Got unexpected eslint specifier: {}".format(eslint_spec)
+
+
+def test_get_workspace_dep_paths_with_custom_base_path():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ ws_dep_paths = pj.get_workspace_dep_paths(base_path="custom/dir")
+
+ assert ws_dep_paths == [
+ "custom/bar",
+ "custom/baz",
+ ]
+
+
+def test_get_workspace_deps_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@yandex-int/baz": "workspace:../baz",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+
+ PackageJson.load = classmethod(load_mock)
+
+ ws_deps = pj.get_workspace_deps()
+
+ assert len(ws_deps) == 2
+ assert ws_deps[0].path == "/packages/bar/package.json"
+ assert ws_deps[1].path == "/packages/baz/package.json"
+
+
+def test_get_workspace_deps_with_wrong_name():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ p = PackageJson(path)
+ p.data = {
+ "name": "@shouldbe/{}".format(os.path.basename(os.path.dirname(path))),
+ }
+ return p
+
+ PackageJson.load = classmethod(load_mock)
+
+ with pytest.raises(PackageJsonWorkspaceError) as e:
+ pj.get_workspace_deps()
+
+ assert (
+ str(e.value)
+ == "Workspace dependency name mismatch, found '@yandex-int/bar' instead of '@shouldbe/bar' in /packages/foo/package.json"
+ )
+
+
+def test_get_workspace_map_ok():
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@yandex-int/bar": "workspace:../bar",
+ },
+ }
+
+ def load_mock(cls, path):
+ name = os.path.basename(os.path.dirname(path))
+ p = PackageJson(path)
+ p.data = {
+ "name": "@yandex-int/{}".format(name),
+ "dependencies": ({"@yandex-int/qux": "workspace:../qux"} if name == "bar" else {}),
+ }
+ return p
+
+ PackageJson.load = classmethod(load_mock)
+
+ ws_map = pj.get_workspace_map()
+
+ assert len(ws_map) == 3
+ assert ws_map["/packages/foo"][0].path == "/packages/foo/package.json"
+ assert ws_map["/packages/foo"][1] == 0
+ assert ws_map["/packages/bar"][0].path == "/packages/bar/package.json"
+ assert ws_map["/packages/bar"][1] == 1
+ assert ws_map["/packages/qux"][0].path == "/packages/qux/package.json"
+ assert ws_map["/packages/qux"][1] == 2
+
+
+def test_get_bin_path_string():
+ # arrange
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "name": "next",
+ "bin": "./dist/bin/next",
+ }
+
+ # act + assert
+ assert pj.get_bin_path() == "./dist/bin/next"
+
+
+def test_get_bin_path_same_name():
+ # arrange
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "name": "next",
+ "bin": {"next": "./dist/bin/next"},
+ }
+
+ # act + assert
+ assert pj.get_bin_path() == "./dist/bin/next"
+
+
+def test_get_bin_path_custom_bin_name():
+ # arrange
+ pj = PackageJson("/packages/foo/package.json")
+ pj.data = {
+ "name": "typescript",
+ "bin": {"tsc": "./bin/tsc"},
+ }
+
+ # act + assert
+ assert pj.get_bin_path("tsc") == "./bin/tsc"
diff --git a/build/plugins/lib/nots/package_manager/base/tests/utils.py b/build/plugins/lib/nots/package_manager/base/tests/utils.py
new file mode 100644
index 0000000000..4287beec47
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/utils.py
@@ -0,0 +1,15 @@
+from build.plugins.lib.nots.package_manager.base import utils
+
+
+def test_extract_package_name_from_path():
+ happy_checklist = [
+ ("@yandex-int/foo-bar-baz/some/path/inside/the/package", "@yandex-int/foo-bar-baz"),
+ ("@yandex-int/foo-bar-buzz", "@yandex-int/foo-bar-buzz"),
+ ("package-wo-scope", "package-wo-scope"),
+ ("p", "p"),
+ ("", ""),
+ ]
+
+ for item in happy_checklist:
+ package_name = utils.extract_package_name_from_path(item[0])
+ assert package_name == item[1]
diff --git a/build/plugins/lib/nots/package_manager/base/tests/ya.make b/build/plugins/lib/nots/package_manager/base/tests/ya.make
new file mode 100644
index 0000000000..1bece69c33
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/tests/ya.make
@@ -0,0 +1,14 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ package_json.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/base/utils.py b/build/plugins/lib/nots/package_manager/base/utils.py
new file mode 100644
index 0000000000..aee3aabdd3
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/utils.py
@@ -0,0 +1,52 @@
+import os
+
+from .constants import NODE_MODULES_DIRNAME, NODE_MODULES_WORKSPACE_BUNDLE_FILENAME, PACKAGE_JSON_FILENAME
+
+
+def home_dir():
+ """
+ Stolen from ya (in the root of arcadia)
+ """
+ # Do not trust $HOME, as it is unreliable in certain environments
+ # Temporarily delete os.environ["HOME"] to force reading current home directory from /etc/passwd
+ home_from_env = os.environ.pop("HOME", None)
+ try:
+ home_from_passwd = os.path.expanduser("~")
+ if os.path.isabs(home_from_passwd):
+ # This home dir is valid, prefer it over $HOME
+ return home_from_passwd
+ else:
+ # When python is built with musl (this is quire weird though),
+ # only users from /etc/passwd will be properly resolved,
+ # as musl does not have nss module for LDAP integration.
+ return home_from_env
+
+ finally:
+ if home_from_env is not None:
+ os.environ["HOME"] = home_from_env
+
+
+def s_rooted(p):
+ return os.path.join("$S", p)
+
+
+def b_rooted(p):
+ return os.path.join("$B", p)
+
+
+def build_pj_path(p):
+ return os.path.join(p, PACKAGE_JSON_FILENAME)
+
+
+def build_nm_path(p):
+ return os.path.join(p, NODE_MODULES_DIRNAME)
+
+
+def build_nm_bundle_path(p):
+ return os.path.join(p, NODE_MODULES_WORKSPACE_BUNDLE_FILENAME)
+
+
+def extract_package_name_from_path(p):
+ # if we have scope prefix then we are using the first two tokens, otherwise - only the first one
+ parts = p.split("/", 2)
+ return "/".join(parts[:2]) if p.startswith("@") else parts[0]
diff --git a/build/plugins/lib/nots/package_manager/base/ya.make b/build/plugins/lib/nots/package_manager/base/ya.make
new file mode 100644
index 0000000000..4b7f22f05a
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/base/ya.make
@@ -0,0 +1,23 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ node_modules_bundler.py
+ package_json.py
+ package_manager.py
+ utils.py
+)
+
+PEERDIR(
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/__init__.py b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
new file mode 100644
index 0000000000..af6de8e62a
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/__init__.py
@@ -0,0 +1,14 @@
+from . import constants
+from .lockfile import PnpmLockfile
+from .package_manager import PnpmPackageManager
+from .utils import build_ws_config_path
+from .workspace import PnpmWorkspace
+
+
+__all__ = [
+ "build_ws_config_path",
+ "constants",
+ "PnpmLockfile",
+ "PnpmPackageManager",
+ "PnpmWorkspace",
+]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/constants.py b/build/plugins/lib/nots/package_manager/pnpm/constants.py
new file mode 100644
index 0000000000..e84a78c55e
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/constants.py
@@ -0,0 +1,2 @@
+PNPM_WS_FILENAME = "pnpm-workspace.yaml"
+PNPM_LOCKFILE_FILENAME = "pnpm-lock.yaml"
diff --git a/build/plugins/lib/nots/package_manager/pnpm/lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
new file mode 100644
index 0000000000..eca1e4015b
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/lockfile.py
@@ -0,0 +1,174 @@
+import base64
+import binascii
+import yaml
+import os
+import io
+import re
+
+from six.moves.urllib import parse as urlparse
+from six import iteritems
+
+from ..base import PackageJson, BaseLockfile, LockfilePackageMeta, LockfilePackageMetaInvalidError
+
+
+class PnpmLockfile(BaseLockfile):
+ IMPORTER_KEYS = PackageJson.DEP_KEYS + ("specifiers",)
+
+ def read(self):
+ with io.open(self.path, "rb") as f:
+ self.data = yaml.load(f, Loader=yaml.CSafeLoader)
+
+ lockfileVersion = "lockfileVersion"
+ version_in_data = lockfileVersion in self.data
+ r = re.compile('^[56]\\.\\d$')
+ if not version_in_data or not r.match(str(self.data[lockfileVersion])):
+ raise Exception(
+ 'Error of project configuration: {} has lockfileVersion: {}. '.format(
+ self.path, self.data[lockfileVersion] if version_in_data else "<no-version>"
+ )
+ + 'This version is not supported. Please, delete pnpm-lock.yaml and regenerate it using "ya tool nots --clean update-lockfile"'
+ )
+
+ def write(self, path=None):
+ """
+ :param path: path to store lockfile, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ yaml.dump(self.data, f, Dumper=yaml.CSafeDumper)
+
+ def get_packages_meta(self):
+ """
+ Extracts packages meta from lockfile.
+ :rtype: list of LockfilePackageMeta
+ """
+ packages = self.data.get("packages", {})
+
+ return map(lambda x: _parse_package_meta(*x), iteritems(packages))
+
+ def update_tarball_resolutions(self, fn):
+ """
+ :param fn: maps `LockfilePackageMeta` instance to new `resolution.tarball` value
+ :type fn: lambda
+ """
+ packages = self.data.get("packages", {})
+
+ for key, meta in iteritems(packages):
+ meta["resolution"]["tarball"] = fn(_parse_package_meta(key, meta, allow_file_protocol=True))
+ packages[key] = meta
+
+ def get_importers(self):
+ """
+ Returns "importers" section from the lockfile or creates similar structure from "dependencies" and "specifiers".
+ :rtype: dict of dict of dict of str
+ """
+ importers = self.data.get("importers")
+ if importers is not None:
+ return importers
+
+ importer = {k: self.data[k] for k in self.IMPORTER_KEYS if k in self.data}
+
+ return {".": importer} if importer else {}
+
+ def merge(self, lf):
+ """
+ Merges two lockfiles:
+ 1. Converts the lockfile to monorepo-like lockfile with "importers" section instead of "dependencies" and "specifiers".
+ 2. Merges `lf`'s dependencies and specifiers to importers.
+ 3. Merges `lf`'s packages to the lockfile.
+ :param lf: lockfile to merge
+ :type lf: PnpmLockfile
+ """
+ importers = self.get_importers()
+ build_path = os.path.dirname(self.path)
+
+ for [importer, imports] in iteritems(lf.get_importers()):
+ importer_path = os.path.normpath(os.path.join(os.path.dirname(lf.path), importer))
+ importer_rel_path = os.path.relpath(importer_path, build_path)
+ importers[importer_rel_path] = imports
+
+ self.data["importers"] = importers
+
+ for k in self.IMPORTER_KEYS:
+ self.data.pop(k, None)
+
+ packages = self.data.get("packages", {})
+ for k, v in iteritems(lf.data.get("packages", {})):
+ if k not in packages:
+ packages[k] = v
+ self.data["packages"] = packages
+
+ def validate_has_addons_flags(self):
+ packages = self.data.get("packages", {})
+ invalid_keys = []
+
+ for key, meta in iteritems(packages):
+ if meta.get("requiresBuild") and "hasAddons" not in meta:
+ invalid_keys.append(key)
+
+ return (not invalid_keys, invalid_keys)
+
+
+def _parse_package_meta(key, meta, allow_file_protocol=False):
+ """
+ :param key: uniq package key from lockfile
+ :type key: string
+ :param meta: package meta dict from lockfile
+ :type meta: dict
+ :rtype: LockfilePackageMetaInvalidError
+ """
+ try:
+ tarball_url = _parse_tarball_url(meta["resolution"]["tarball"], allow_file_protocol)
+ sky_id = _parse_sky_id_from_tarball_url(meta["resolution"]["tarball"])
+ integrity_algorithm, integrity = _parse_package_integrity(meta["resolution"]["integrity"])
+ except KeyError as e:
+ raise TypeError("Invalid package meta for key {}, missing {} key".format(key, e))
+ except LockfilePackageMetaInvalidError as e:
+ raise TypeError("Invalid package meta for key {}, parse error: {}".format(key, e))
+
+ return LockfilePackageMeta(tarball_url, sky_id, integrity, integrity_algorithm)
+
+
+def _parse_tarball_url(tarball_url, allow_file_protocol):
+ if tarball_url.startswith("file:") and not allow_file_protocol:
+ raise LockfilePackageMetaInvalidError("tarball cannot point to a file, got {}".format(tarball_url))
+ return tarball_url.split("?")[0]
+
+
+def _parse_sky_id_from_tarball_url(tarball_url):
+ """
+ :param tarball_url: tarball url
+ :type tarball_url: string
+ :rtype: string
+ """
+ if tarball_url.startswith("file:"):
+ return ""
+
+ rbtorrent_param = urlparse.parse_qs(urlparse.urlparse(tarball_url).query).get("rbtorrent")
+
+ if rbtorrent_param is None:
+ return ""
+
+ return "rbtorrent:{}".format(rbtorrent_param[0])
+
+
+def _parse_package_integrity(integrity):
+ """
+ Returns tuple of algorithm and hash (hex).
+ :param integrity: package integrity in format "{algo}-{base64_of_hash}"
+ :type integrity: string
+ :rtype: (str, str)
+ """
+ algo, hash_b64 = integrity.split("-", 1)
+
+ try:
+ hash_hex = binascii.hexlify(base64.b64decode(hash_b64))
+ except TypeError as e:
+ raise LockfilePackageMetaInvalidError(
+ "Invalid package integrity encoding, integrity: {}, error: {}".format(integrity, e)
+ )
+
+ return (algo, hash_hex)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/package_manager.py b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
new file mode 100644
index 0000000000..1aa4b33d11
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/package_manager.py
@@ -0,0 +1,262 @@
+import os
+import shutil
+
+from six import iteritems
+
+from .lockfile import PnpmLockfile
+from .utils import build_lockfile_path, build_ws_config_path
+from .workspace import PnpmWorkspace
+from ..base import BasePackageManager, PackageManagerError
+from ..base.constants import NODE_MODULES_WORKSPACE_BUNDLE_FILENAME
+from ..base.node_modules_bundler import bundle_node_modules
+from ..base.utils import b_rooted, build_nm_bundle_path, build_pj_path, home_dir, s_rooted
+
+
+class PnpmPackageManager(BasePackageManager):
+ _STORE_NM_PATH = os.path.join(".pnpm", "store")
+ _VSTORE_NM_PATH = os.path.join(".pnpm", "virtual-store")
+ _STORE_VER = "v3"
+
+ @classmethod
+ def load_lockfile(cls, path):
+ """
+ :param path: path to lockfile
+ :type path: str
+ :rtype: PnpmLockfile
+ """
+ return PnpmLockfile.load(path)
+
+ @classmethod
+ def load_lockfile_from_dir(cls, dir_path):
+ """
+ :param dir_path: path to directory with lockfile
+ :type dir_path: str
+ :rtype: PnpmLockfile
+ """
+ return cls.load_lockfile(build_lockfile_path(dir_path))
+
+ def create_node_modules(self, yatool_prebuilder_path=None, local_cli=False):
+ """
+ Creates node_modules directory according to the lockfile.
+ """
+ ws = self._prepare_workspace()
+
+ # Pure `tier 0` logic - isolated stores in the `build_root` (works in `distbuild` and `CI autocheck`)
+ store_dir = self._nm_path(self._STORE_NM_PATH)
+ virtual_store_dir = self._nm_path(self._VSTORE_NM_PATH)
+
+ # Local mode optimizations (run from the `ya tool nots`)
+ if local_cli:
+ # Use single CAS for all the projects built locally
+ store_dir = os.path.join(home_dir(), ".cache", "pnpm-store")
+ # It's a default value of pnpm itself. But it should be defined explicitly for not using values from the lockfiles or from the previous installations.
+ virtual_store_dir = self._nm_path('.pnpm')
+
+ install_cmd = [
+ "install",
+ "--frozen-lockfile",
+ "--ignore-pnpmfile",
+ "--ignore-scripts",
+ "--no-verify-store-integrity",
+ "--offline",
+ "--package-import-method",
+ "hardlink",
+ # "--registry" will be set later inside self._exec_command()
+ "--store-dir",
+ store_dir,
+ "--strict-peer-dependencies",
+ "--virtual-store-dir",
+ virtual_store_dir,
+ ]
+
+ lockfile_version = self.load_lockfile_from_dir(self.sources_path).data["lockfileVersion"]
+ if lockfile_version == '6.0':
+ install_cmd.append("--use-lockfile-v6")
+ os.environ['npm_config_auto_install_peers'] = 'true'
+
+ self._exec_command(install_cmd)
+
+ self._run_apply_addons_if_need(yatool_prebuilder_path, virtual_store_dir)
+ self._replace_internal_lockfile_with_original(virtual_store_dir)
+
+ if not local_cli:
+ bundle_node_modules(
+ build_root=self.build_root,
+ node_modules_path=self._nm_path(),
+ peers=ws.get_paths(base_path=self.module_path, ignore_self=True),
+ bundle_path=os.path.join(self.build_path, NODE_MODULES_WORKSPACE_BUNDLE_FILENAME),
+ )
+
+ def calc_node_modules_inouts(self, local_cli=False):
+ """
+ Returns input and output paths for command that creates `node_modules` bundle.
+ Errors: errors caught while processing lockfiles
+ Inputs:
+ - source package.json and lockfile,
+ - built package.jsons of all deps,
+ - merged lockfiles and workspace configs of direct non-leave deps,
+ - tarballs.
+ Outputs:
+ - merged lockfile,
+ - generated workspace config,
+ - created node_modules bundle.
+ :rtype: (list of errors, list of str, list of str)
+ """
+ ins = [
+ s_rooted(build_pj_path(self.module_path)),
+ ]
+ outs = []
+
+ pj = self.load_package_json_from_dir(self.sources_path)
+ if pj.has_dependencies():
+ ins.extend(
+ [
+ s_rooted(build_lockfile_path(self.module_path)),
+ ]
+ )
+ outs.extend(
+ [
+ b_rooted(build_lockfile_path(self.module_path)),
+ b_rooted(build_ws_config_path(self.module_path)),
+ ]
+ )
+ if not local_cli:
+ outs.extend([b_rooted(build_nm_bundle_path(self.module_path))])
+
+ # Source lockfiles are used only to get tarballs info.
+ src_lf_paths = [build_lockfile_path(self.sources_path)]
+
+ for [dep_src_path, (_, depth)] in iteritems(pj.get_workspace_map(ignore_self=True)):
+ dep_mod_path = dep_src_path[len(self.sources_root) + 1:]
+ # pnpm requires all package.jsons.
+ ins.append(b_rooted(build_pj_path(dep_mod_path)))
+
+ dep_lf_src_path = build_lockfile_path(dep_src_path)
+ if not os.path.isfile(dep_lf_src_path):
+ # It is ok for leaves.
+ continue
+ src_lf_paths.append(dep_lf_src_path)
+
+ if depth == 1:
+ ins.append(b_rooted(build_ws_config_path(dep_mod_path)))
+ ins.append(b_rooted(build_lockfile_path(dep_mod_path)))
+
+ errors = []
+ try:
+ for pkg in self.extract_packages_meta_from_lockfiles(src_lf_paths):
+ ins.append(b_rooted(self._contrib_tarball_path(pkg)))
+ except Exception as e:
+ errors.append(e)
+ pass
+
+ return errors, ins, outs
+
+ def extract_packages_meta_from_lockfiles(self, lf_paths):
+ """
+ :type lf_paths: iterable of BaseLockfile
+ :rtype: iterable of LockfilePackageMeta
+ """
+ tarballs = set()
+ errors = []
+
+ for lf_path in lf_paths:
+ try:
+ for pkg in self.load_lockfile(lf_path).get_packages_meta():
+ if pkg.tarball_path not in tarballs:
+ tarballs.add(pkg.tarball_path)
+ yield pkg
+ except Exception as e:
+ errors.append("{}: {}".format(lf_path, e))
+
+ if errors:
+ raise PackageManagerError("Unable to process some lockfiles:\n{}".format("\n".join(errors)))
+
+ def _prepare_workspace(self):
+ """
+ :rtype: PnpmWorkspace
+ """
+ pj = self._build_package_json()
+
+ ws = PnpmWorkspace(build_ws_config_path(self.build_path))
+ ws.set_from_package_json(pj)
+
+ dep_paths = ws.get_paths(ignore_self=True)
+ self._build_merged_workspace_config(ws, dep_paths)
+ self._build_merged_lockfile(dep_paths)
+
+ return ws
+
+ def _build_package_json(self):
+ """
+ :rtype: PackageJson
+ """
+ pj = self.load_package_json_from_dir(self.sources_path)
+
+ if not os.path.exists(self.build_path):
+ os.makedirs(self.build_path, exist_ok=True)
+
+ pj.path = build_pj_path(self.build_path)
+ pj.write()
+
+ return pj
+
+ def _build_merged_lockfile(self, dep_paths):
+ """
+ :type dep_paths: list of str
+ :rtype: PnpmLockfile
+ """
+ lf = self.load_lockfile_from_dir(self.sources_path)
+ # Change to the output path for correct path calcs on merging.
+ lf.path = build_lockfile_path(self.build_path)
+
+ for dep_path in dep_paths:
+ lf_path = build_lockfile_path(dep_path)
+ if os.path.isfile(lf_path):
+ lf.merge(self.load_lockfile(lf_path))
+
+ lf.update_tarball_resolutions(lambda p: self._contrib_tarball_url(p))
+ lf.write()
+
+ def _build_merged_workspace_config(self, ws, dep_paths):
+ """
+ NOTE: This method mutates `ws`.
+ :type ws: PnpmWorkspaceConfig
+ :type dep_paths: list of str
+ """
+ for dep_path in dep_paths:
+ ws_config_path = build_ws_config_path(dep_path)
+ if os.path.isfile(ws_config_path):
+ ws.merge(PnpmWorkspace.load(ws_config_path))
+
+ ws.write()
+
+ def _run_apply_addons_if_need(self, yatool_prebuilder_path, virtual_store_dir):
+ if not yatool_prebuilder_path:
+ return
+
+ self._exec_command(
+ [
+ "apply-addons",
+ "--virtual-store",
+ virtual_store_dir,
+ ],
+ include_defaults=False,
+ script_path=os.path.join(yatool_prebuilder_path, "build", "bin", "prebuilder.js"),
+ )
+
+ def _replace_internal_lockfile_with_original(self, virtual_store_dir):
+ original_lf_path = build_lockfile_path(self.sources_path)
+ vs_lf_path = os.path.join(virtual_store_dir, "lock.yaml")
+
+ shutil.copyfile(original_lf_path, vs_lf_path)
+
+ def _get_default_options(self):
+ return super(PnpmPackageManager, self)._get_default_options() + [
+ "--stream",
+ "--reporter",
+ "append-only",
+ "--no-color",
+ ]
+
+ def _get_debug_log_path(self):
+ return self._nm_path(".pnpm-debug.log")
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/test_lockfile.py b/build/plugins/lib/nots/package_manager/pnpm/tests/test_lockfile.py
new file mode 100644
index 0000000000..d696f4d53b
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/test_lockfile.py
@@ -0,0 +1,404 @@
+import pytest
+import io
+
+from build.plugins.lib.nots.package_manager.pnpm.lockfile import PnpmLockfile
+
+
+@pytest.fixture()
+def patch_open_correct_version(monkeypatch):
+ def mock_open(a, b):
+ file_like = io.BytesIO(b'lockfileVersion: 5.4')
+ return io.BufferedReader(file_like)
+
+ monkeypatch.setattr(io, "open", mock_open)
+
+
+@pytest.fixture()
+def patch_open_v6(monkeypatch):
+ def mock_open(a, b):
+ file_like = io.BytesIO(b'lockfileVersion: "6.0"')
+ return io.BufferedReader(file_like)
+
+ monkeypatch.setattr(io, "open", mock_open)
+
+
+@pytest.fixture()
+def patch_open_incorrect_version(monkeypatch):
+ def mock_open(a, b):
+ file_like = io.BytesIO(b'lockfileVersion: 0')
+ return io.BufferedReader(file_like)
+
+ monkeypatch.setattr(io, "open", mock_open)
+
+
+@pytest.fixture()
+def patch_open_no_version(monkeypatch):
+ def mock_open(a, b):
+ file_like = io.BytesIO(b'some text')
+ return io.BufferedReader(file_like)
+
+ monkeypatch.setattr(io, "open", mock_open)
+
+
+def test_lockfile_read_yaml_ok(patch_open_correct_version):
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+
+ lf.read()
+
+ assert lf.data == {"lockfileVersion": 5.4}
+
+
+def test_lockfile_read_v6(patch_open_v6):
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+
+ lf.read()
+
+ assert lf.data == {"lockfileVersion": '6.0'}
+
+
+def test_lockfile_read_yaml_error_incorrect_lockfile_version(patch_open_incorrect_version):
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+
+ with pytest.raises(Exception) as e:
+ lf.read()
+
+ assert str(e.value) == (
+ 'Error of project configuration: /pnpm-lock.yaml has lockfileVersion: 0. '
+ + 'This version is not supported. Please, delete pnpm-lock.yaml and regenerate it using "ya tool nots --clean update-lockfile"'
+ )
+
+
+def test_lockfile_read_yaml_error_no_lockfile_version(patch_open_no_version):
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+
+ with pytest.raises(Exception) as e:
+ lf.read()
+
+ assert str(e.value) == (
+ 'Error of project configuration: /pnpm-lock.yaml has lockfileVersion: <no-version>. '
+ + 'This version is not supported. Please, delete pnpm-lock.yaml and regenerate it using "ya tool nots --clean update-lockfile"'
+ )
+
+
+def test_lockfile_get_packages_meta_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.tarball_url == "@babel%2fcli/-/cli-7.6.2.tgz"
+ assert pkg.sky_id == "rbtorrent:cb1849da3e4947e56a8f6bde6a1ec42703ddd187"
+ assert (
+ pkg.integrity
+ == b"24367e4ff6ebf693df4f696600c272a490d34d31ccf5e3c3fc40f5d13463473255744572f89077891961cd8993b796243601efc561a55159cbb5dbfaaee883ad"
+ )
+ assert pkg.integrity_algorithm == "sha512"
+
+
+def test_lockfile_get_packages_empty():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {}
+
+ assert len(list(lf.get_packages_meta())) == 0
+
+
+def test_package_meta_invalid_key():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "in/valid": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key in/valid, missing 'resolution' key"
+
+
+def test_package_meta_missing_resolution():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {},
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'resolution' key"
+
+
+def test_package_meta_missing_tarball():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {},
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert str(e.value) == "Invalid package meta for key /valid/1.2.3, missing 'tarball' key"
+
+
+def test_package_meta_missing_rbtorrent():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/valid/1.2.3": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "valid-without-rbtorrent-1.2.3.tgz",
+ },
+ },
+ },
+ }
+
+ packages = list(lf.get_packages_meta())
+ pkg = packages[0]
+
+ assert len(packages) == 1
+ assert pkg.sky_id == ""
+
+
+def test_lockfile_meta_file_tarball_prohibits_file_protocol():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "file:/some/abs/path.tgz",
+ },
+ },
+ },
+ }
+
+ with pytest.raises(TypeError) as e:
+ list(lf.get_packages_meta())
+
+ assert (
+ str(e.value)
+ == "Invalid package meta for key /@babel/cli/7.6.2, parse error: tarball cannot point to a file, got file:/some/abs/path.tgz"
+ )
+
+
+def test_lockfile_update_tarball_resolutions_ok():
+ lf = PnpmLockfile(path="/pnpm-lock.yaml")
+ lf.data = {
+ "packages": {
+ "/@babel/cli/7.6.2_@babel+core@7.6.2": {
+ "resolution": {
+ "integrity": "sha512-JDZ+T/br9pPfT2lmAMJypJDTTTHM9ePD/ED10TRjRzJVdEVy+JB3iRlhzYmTt5YkNgHvxWGlUVnLtdv6ruiDrQ==",
+ "tarball": "@babel%2fcli/-/cli-7.6.2.tgz?rbtorrent=cb1849da3e4947e56a8f6bde6a1ec42703ddd187",
+ },
+ },
+ },
+ }
+
+ lf.update_tarball_resolutions(lambda p: p.tarball_url)
+
+ assert (
+ lf.data["packages"]["/@babel/cli/7.6.2_@babel+core@7.6.2"]["resolution"]["tarball"]
+ == "@babel%2fcli/-/cli-7.6.2.tgz"
+ )
+
+
+def test_lockfile_merge():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf3 = PnpmLockfile(path="/another/baz/pnpm-lock.yaml")
+ lf3.data = {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+ lf4 = PnpmLockfile(path="/another/quux/pnpm-lock.yaml")
+ lf4.data = {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ }
+
+ lf1.merge(lf2)
+ lf1.merge(lf3)
+ lf1.merge(lf4)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "b": "1.0.0",
+ },
+ },
+ "../another/baz": {
+ "dependencies": {
+ "@a/qux": "link:../qux",
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "@a/qux": "workspace:../qux",
+ "a": "1.0.0",
+ },
+ },
+ "../another/qux": {
+ "dependencies": {
+ "b": "1.0.1",
+ },
+ "specifiers": {
+ "b": "1.0.1",
+ },
+ },
+ "../another/quux": {
+ "dependencies": {
+ "@a/bar": "link:../../bar",
+ },
+ "specifiers": {
+ "@a/bar": "workspace:../../bar",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ "/b/1.0.1": {},
+ },
+ }
+
+
+def test_lockfile_merge_dont_overrides_packages():
+ lf1 = PnpmLockfile(path="/foo/pnpm-lock.yaml")
+ lf1.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ },
+ }
+
+ lf2 = PnpmLockfile(path="/bar/pnpm-lock.yaml")
+ lf2.data = {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "packages": {
+ "/a/1.0.0": {
+ "overriden": True,
+ },
+ "/b/1.0.0": {},
+ },
+ }
+
+ lf1.merge(lf2)
+
+ assert lf1.data == {
+ "importers": {
+ ".": {
+ "dependencies": {
+ "a": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ },
+ },
+ "../bar": {
+ "dependencies": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ "specifiers": {
+ "a": "1.0.0",
+ "b": "1.0.0",
+ },
+ },
+ },
+ "packages": {
+ "/a/1.0.0": {},
+ "/b/1.0.0": {},
+ },
+ }
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/test_workspace.py b/build/plugins/lib/nots/package_manager/pnpm/tests/test_workspace.py
new file mode 100644
index 0000000000..ffc010de88
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/test_workspace.py
@@ -0,0 +1,68 @@
+from build.plugins.lib.nots.package_manager.base import PackageJson
+from build.plugins.lib.nots.package_manager.pnpm.workspace import PnpmWorkspace
+
+
+def test_workspace_get_paths():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_get_paths_with_custom_base_path_without_self():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws.packages = set([".", "../bar", "../../another/baz"])
+
+ assert sorted(ws.get_paths(base_path="some/custom/dir", ignore_self=True)) == [
+ "some/another/baz",
+ "some/custom/bar",
+ ]
+
+
+def test_workspace_set_from_package_json():
+ ws = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ pj = PackageJson(path="/packages/foo/package.json")
+ pj.data = {
+ "dependencies": {
+ "@a/bar": "workspace:../bar",
+ },
+ "devDependencies": {
+ "@a/baz": "workspace:../../another/baz",
+ },
+ "peerDependencies": {
+ "@a/qux": "workspace:../../another/qux",
+ },
+ "optionalDependencies": {
+ "@a/quux": "workspace:../../another/quux",
+ },
+ }
+
+ ws.set_from_package_json(pj)
+
+ assert sorted(ws.get_paths()) == [
+ "/another/baz",
+ "/another/quux",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
+
+
+def test_workspace_merge():
+ ws1 = PnpmWorkspace(path="/packages/foo/pnpm-workspace.yaml")
+ ws1.packages = set([".", "../bar", "../../another/baz"])
+ ws2 = PnpmWorkspace(path="/another/baz/pnpm-workspace.yaml")
+ ws2.packages = set([".", "../qux"])
+
+ ws1.merge(ws2)
+
+ assert sorted(ws1.get_paths()) == [
+ "/another/baz",
+ "/another/qux",
+ "/packages/bar",
+ "/packages/foo",
+ ]
diff --git a/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
new file mode 100644
index 0000000000..dd9ba0c946
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/tests/ya.make
@@ -0,0 +1,15 @@
+PY23_TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ test_lockfile.py
+ test_workspace.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
diff --git a/build/plugins/lib/nots/package_manager/pnpm/utils.py b/build/plugins/lib/nots/package_manager/pnpm/utils.py
new file mode 100644
index 0000000000..1fa4291b9d
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/utils.py
@@ -0,0 +1,11 @@
+import os
+
+from .constants import PNPM_LOCKFILE_FILENAME, PNPM_WS_FILENAME
+
+
+def build_lockfile_path(p):
+ return os.path.join(p, PNPM_LOCKFILE_FILENAME)
+
+
+def build_ws_config_path(p):
+ return os.path.join(p, PNPM_WS_FILENAME)
diff --git a/build/plugins/lib/nots/package_manager/pnpm/workspace.py b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
new file mode 100644
index 0000000000..e596e20a18
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/workspace.py
@@ -0,0 +1,81 @@
+import os
+import yaml
+
+
+class PnpmWorkspace(object):
+ @classmethod
+ def load(cls, path):
+ ws = cls(path)
+ ws.read()
+
+ return ws
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ # NOTE: pnpm requires relative workspace paths.
+ self.packages = set()
+
+ def read(self):
+ with open(self.path) as f:
+ self.packages = set(yaml.load(f, Loader=yaml.CSafeLoader).get("packages", []))
+
+ def write(self, path=None):
+ if not path:
+ path = self.path
+
+ with open(path, "w") as f:
+ data = {
+ "packages": list(self.packages),
+ }
+ yaml.dump(data, f, Dumper=yaml.CSafeDumper)
+
+ def get_paths(self, base_path=None, ignore_self=False):
+ """
+ Returns absolute paths of the workspace packages.
+ :param base_path: base path to resolve relative dep paths
+ :type base_path: str
+ :param ignore_self: whether path of the current module will be excluded (if present)
+ :type ignore_self: bool
+ :rtype: list of str
+ """
+ if base_path is None:
+ base_path = os.path.dirname(self.path)
+
+ return [
+ os.path.normpath(os.path.join(base_path, pkg_path))
+ for pkg_path in self.packages
+ if not ignore_self or pkg_path != "."
+ ]
+
+ def set_from_package_json(self, package_json):
+ """
+ Sets packages to "workspace" deps from given package.json.
+ :param package_json: package.json of workspace
+ :type package_json: PackageJson
+ """
+ if os.path.dirname(package_json.path) != os.path.dirname(self.path):
+ raise TypeError(
+ "package.json should be in workspace directory {}, given: {}".format(
+ os.path.dirname(self.path), package_json.path
+ )
+ )
+
+ self.packages = set(path for _, path in package_json.get_workspace_dep_spec_paths())
+ # Add relative path to self.
+ self.packages.add(".")
+
+ def merge(self, ws):
+ """
+ Adds `ws`'s packages to the workspace.
+ :param ws: workspace to merge
+ :type ws: PnpmWorkspace
+ """
+ dir_path = os.path.dirname(self.path)
+ ws_dir_path = os.path.dirname(ws.path)
+
+ for p_rel_path in ws.packages:
+ p_path = os.path.normpath(os.path.join(ws_dir_path, p_rel_path))
+ self.packages.add(os.path.relpath(p_path, dir_path))
diff --git a/build/plugins/lib/nots/package_manager/pnpm/ya.make b/build/plugins/lib/nots/package_manager/pnpm/ya.make
new file mode 100644
index 0000000000..f57ae4a2ba
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/pnpm/ya.make
@@ -0,0 +1,24 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ constants.py
+ lockfile.py
+ package_manager.py
+ workspace.py
+ utils.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ contrib/python/PyYAML
+ contrib/python/six
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/package_manager/ya.make b/build/plugins/lib/nots/package_manager/ya.make
new file mode 100644
index 0000000000..f001bd5494
--- /dev/null
+++ b/build/plugins/lib/nots/package_manager/ya.make
@@ -0,0 +1,19 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager/base
+ build/plugins/lib/nots/package_manager/pnpm
+)
+
+END()
+
+RECURSE(
+ base
+ pnpm
+)
diff --git a/build/plugins/lib/nots/semver/__init__.py b/build/plugins/lib/nots/semver/__init__.py
new file mode 100644
index 0000000000..be4319f9f3
--- /dev/null
+++ b/build/plugins/lib/nots/semver/__init__.py
@@ -0,0 +1,7 @@
+from .semver import Version, Operator, VersionRange
+
+__all__ = [
+ "Version",
+ "Operator",
+ "VersionRange",
+]
diff --git a/build/plugins/lib/nots/semver/semver.py b/build/plugins/lib/nots/semver/semver.py
new file mode 100644
index 0000000000..1398da8586
--- /dev/null
+++ b/build/plugins/lib/nots/semver/semver.py
@@ -0,0 +1,244 @@
+import re
+
+
+class Version:
+ """
+ This class is intended to provide utility methods to work with semver ranges.
+ Right now it is limited to the simplest case: a ">=" operator followed by an exact version with no prerelease or build specification.
+ Example: ">= 1.2.3"
+ """
+
+ @classmethod
+ def from_str(cls, input):
+ """
+ :param str input: save exact formatted version e.g. 1.2.3
+ :rtype: Version
+ :raises: ValueError
+ """
+ parts = input.strip().split(".", 2)
+ major = int(parts[0])
+ minor = int(parts[1])
+ patch = int(parts[2])
+
+ return cls(major, minor, patch)
+
+ STABLE_VERSION_RE = re.compile(r'^\d+\.\d+\.\d+$')
+
+ @classmethod
+ def is_stable(cls, v):
+ """
+ Verifies that the version is in a supported format.
+
+ :param v:string with the version
+ :return: bool
+ """
+ return cls.STABLE_VERSION_RE.match(v) is not None
+
+ @classmethod
+ def cmp(cls, a, b):
+ """
+ Compare two versions. Should be used with "cmp_to_key" wrapper in sorted(), min(), max()...
+
+ For example:
+ sorted(["1.2.3", "2.4.2", "1.2.7"], key=cmp_to_key(Version.cmp))
+
+ :param a:string with version or Version instance
+ :param b:string with version or Version instance
+ :return: int
+ :raises: ValueError
+ """
+ a_version = a if isinstance(a, cls) else cls.from_str(a)
+ b_version = b if isinstance(b, cls) else cls.from_str(b)
+
+ if a_version > b_version:
+ return 1
+ elif a_version < b_version:
+ return -1
+ else:
+ return 0
+
+ __slots__ = "_values"
+
+ def __init__(self, major, minor, patch):
+ """
+ :param int major
+ :param int minor
+ :param int patch
+ :raises ValueError
+ """
+ version_parts = {
+ "major": major,
+ "minor": minor,
+ "patch": patch,
+ }
+
+ for name, value in version_parts.items():
+ value = int(value)
+ version_parts[name] = value
+ if value < 0:
+ raise ValueError("{!r} is negative. A version can only be positive.".format(name))
+
+ self._values = (version_parts["major"], version_parts["minor"], version_parts["patch"])
+
+ def __str__(self):
+ return "{}.{}.{}".format(self._values[0], self._values[1], self._values[2])
+
+ def __repr__(self):
+ return '<Version({})>'.format(self)
+
+ def __eq__(self, other):
+ """
+ :param Version|str other
+ :rtype: bool
+ """
+ if isinstance(other, str):
+ if self.is_stable(other):
+ other = self.from_str(other)
+ else:
+ return False
+
+ return self.as_tuple() == other.as_tuple()
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __gt__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() > other.as_tuple()
+
+ def __ge__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() >= other.as_tuple()
+
+ def __lt__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() < other.as_tuple()
+
+ def __le__(self, other):
+ """
+ :param Version other
+ :rtype: bool
+ """
+ return self.as_tuple() <= other.as_tuple()
+
+ @property
+ def major(self):
+ """The major part of the version (read-only)."""
+ return self._values[0]
+
+ @major.setter
+ def major(self, value):
+ raise AttributeError("Attribute 'major' is readonly")
+
+ @property
+ def minor(self):
+ """The minor part of the version (read-only)."""
+ return self._values[1]
+
+ @minor.setter
+ def minor(self, value):
+ raise AttributeError("Attribute 'minor' is readonly")
+
+ @property
+ def patch(self):
+ """The patch part of the version (read-only)."""
+ return self._values[2]
+
+ @patch.setter
+ def patch(self, value):
+ raise AttributeError("Attribute 'patch' is readonly")
+
+ def as_tuple(self):
+ """
+ :rtype: tuple
+ """
+ return self._values
+
+
+class Operator:
+ EQ = "="
+ GT = ">"
+ GE = ">="
+ LT = "<"
+ LE = "<="
+
+
+class VersionRange:
+ @classmethod
+ def operator_is_ok(self, operator):
+ return [Operator.GE, Operator.EQ, None].count(operator)
+
+ @classmethod
+ def from_str(cls, input):
+ """
+ :param str input
+ :rtype: VersionRange
+ :raises: ValueError
+ """
+ m = re.match(r"^\s*([<>=]+)?\s*(\d+\.\d+\.\d+)\s*$", input)
+ res = m.groups() if m else None
+ if not res or not cls.operator_is_ok(res[0]):
+ raise ValueError(
+ "Unsupported version range: '{}'. Currently we only support ranges with stable versions and GE / EQ: '>= 1.2.3' / '= 1.2.3' / '1.2.3'".format(
+ input
+ )
+ )
+
+ version = Version.from_str(res[1])
+
+ return cls(res[0], version)
+
+ __slots__ = ("_operator", "_version")
+
+ def __init__(self, operator, version):
+ """
+ :param str operator
+ :raises: ValueError
+ """
+ if not self.operator_is_ok(operator):
+ raise ValueError("Unsupported range operator '{}'".format(operator))
+
+ # None defaults to Operator.EQ
+ self._operator = operator or Operator.EQ
+ self._version = version
+
+ @property
+ def operator(self):
+ """The comparison operator to be used (read-only)."""
+ return self._operator
+
+ @operator.setter
+ def operator(self, value):
+ raise AttributeError("Attribute 'operator' is readonly")
+
+ @property
+ def version(self):
+ """Version to be used with the operator (read-only)."""
+ return self._version
+
+ @version.setter
+ def version(self, value):
+ raise AttributeError("Attribute 'version' is readonly")
+
+ def is_satisfied_by(self, version):
+ """
+ :param Version version
+ :rtype: bool
+ :raises: ValueError
+ """
+ if self._operator == Operator.GE:
+ return version >= self._version
+
+ if self._operator == Operator.EQ:
+ return version == self._version
+
+ raise ValueError("Unsupported operator '{}'".format(self._operator))
diff --git a/build/plugins/lib/nots/semver/tests/test_version.py b/build/plugins/lib/nots/semver/tests/test_version.py
new file mode 100644
index 0000000000..e6c0e44225
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/test_version.py
@@ -0,0 +1,269 @@
+from functools import cmp_to_key
+
+from build.plugins.lib.nots.semver import Version
+
+
+def test_from_str():
+ # arrange
+ version_str = "1.2.3"
+
+ # act
+ version = Version.from_str(version_str)
+
+ # assert
+ assert version.major == 1
+ assert version.minor == 2
+ assert version.patch == 3
+
+
+def test_from_str_bad_version():
+ # arrange
+ version_str = "best version imaginable"
+ error = None
+
+ # act
+ try:
+ Version.from_str(version_str)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert error is not None
+
+
+def test_is_stable_true():
+ # arrange
+ version_str = "1.2.3"
+
+ # act + assert
+ assert Version.is_stable(version_str)
+
+
+def test_is_stable_false():
+ # arrange
+ version_str = "1.2.3-beta1"
+
+ # act + assert
+ assert not Version.is_stable(version_str)
+
+
+def test_is_stable_incorrect():
+ # arrange
+ version_str = "v1.2.3"
+
+ # act + assert
+ assert not Version.is_stable(version_str)
+
+
+def test_cmp_lt():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.5")
+
+ # act + assert
+ assert Version.cmp(a, b) == -1
+
+
+def test_cmp_gt():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.2")
+
+ # act + assert
+ assert Version.cmp(a, b) == 1
+
+
+def test_cmp_eq():
+ # arrange
+ a = Version.from_str("1.2.3")
+ b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert Version.cmp(a, b) == 0
+
+
+def test_cmp_lt_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.5"
+
+ # act + assert
+ assert Version.cmp(a, b) == -1
+
+
+def test_cmp_gt_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.2"
+
+ # act + assert
+ assert Version.cmp(a, b) == 1
+
+
+def test_cmp_eq_str():
+ # arrange
+ a = "1.2.3"
+ b = "1.2.3"
+
+ # act + assert
+ assert Version.cmp(a, b) == 0
+
+
+def test_cmp_usage_in_sorted_asc():
+ # arrange
+ unsorted = ["1.2.3", "2.4.2", "1.2.7"]
+
+ # act + assert
+ assert sorted(unsorted, key=cmp_to_key(Version.cmp)) == ["1.2.3", "1.2.7", "2.4.2"]
+
+
+def test_cmp_usage_in_sorted_desc():
+ # arrange
+ unsorted = ["1.2.3", "2.4.2", "1.2.7"]
+
+ # act + assert
+ assert sorted(unsorted, key=cmp_to_key(Version.cmp), reverse=True) == ["2.4.2", "1.2.7", "1.2.3"]
+
+
+def test_init_negative_numbers():
+ # arrange
+ major = 1
+ minor = -2
+ patch = 3
+
+ error = None
+
+ # act
+ try:
+ Version(major, minor, patch)
+ except Exception as exception:
+ error = exception
+
+ # assert
+ assert isinstance(error, ValueError)
+ assert str(error) == "'minor' is negative. A version can only be positive."
+
+
+def test_eq():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a == version_b
+
+
+def test_eq_negative():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert not version_a == version_b
+
+
+def test_eq_with_str():
+ # arrange
+ version = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version == "1.2.3"
+ assert not version == "1.2.4"
+
+
+def test_eq_with_invalid_str():
+ # arrange
+ version = Version.from_str("1.2.3")
+
+ # act + assert
+ assert not version == "bla-bla"
+ assert not version == "1.2.3-beta"
+
+
+def test_ne():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a != version_b
+
+
+def test_ne_negative():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert not version_a != version_b
+
+
+def test_ne_with_str():
+ # arrange
+ version = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version != "1.2.4"
+ assert not version != "1.2.3"
+
+
+def test_gt():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a > version_b
+
+
+def test_ge_equals():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a >= version_b
+
+
+def test_ge_exceeds():
+ # arrange
+ version_a = Version.from_str("3.2.1")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a >= version_b
+
+
+def test_lt():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert version_a < version_b
+
+
+def test_le_equals():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version_a <= version_b
+
+
+def test_le_is_less():
+ # arrange
+ version_a = Version.from_str("1.2.3")
+ version_b = Version.from_str("3.2.1")
+
+ # act + assert
+ assert version_a <= version_b
+
+
+def test_to_tuple():
+ # arrange
+ version = Version.from_str("1.2.3")
+
+ # act + assert
+ assert version.as_tuple() == (1, 2, 3)
diff --git a/build/plugins/lib/nots/semver/tests/test_version_range.py b/build/plugins/lib/nots/semver/tests/test_version_range.py
new file mode 100644
index 0000000000..eb36d5d598
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/test_version_range.py
@@ -0,0 +1,106 @@
+from build.plugins.lib.nots.semver import Version, Operator, VersionRange
+
+
+def test_from_str():
+ checklist = [
+ (">= 1.2.3", VersionRange, Operator.GE),
+ (">=1.2.3", VersionRange, Operator.GE),
+ (">= 1.2.3", VersionRange, Operator.GE),
+ (" >= 1.2.3 ", VersionRange, Operator.GE),
+ ("= 1.2.3", VersionRange, Operator.EQ),
+ ("=1.2.3", VersionRange, Operator.EQ),
+ ("= 1.2.3", VersionRange, Operator.EQ),
+ (" = 1.2.3 ", VersionRange, Operator.EQ),
+ (" 1.2.3", VersionRange, Operator.EQ),
+ ("1.2.3", VersionRange, Operator.EQ),
+ (" 1.2.3", VersionRange, Operator.EQ),
+ (" 1.2.3 ", VersionRange, Operator.EQ),
+ ]
+
+ for range_str, expected_class, expected_operator in checklist:
+ range = VersionRange.from_str(range_str)
+
+ assert isinstance(range, expected_class), f"unexpected class for '{range_str}': '{type(range)}'"
+ assert range.operator == expected_operator, f"unexpected operator for '{range_str}': '{range.operator}'"
+
+
+def test_from_str_error():
+ error_template = "Unsupported version range: '{}'. Currently we only support ranges with stable versions and GE / EQ: '>= 1.2.3' / '= 1.2.3' / '1.2.3'"
+ checklist = [
+ (r"¯\_(ツ)_/¯", ValueError, error_template),
+ ("<= 1.2.3", ValueError, error_template),
+ ("<=1.2.3", ValueError, error_template),
+ ("<= 1.2.3", ValueError, error_template),
+ (" <= 1.2.3 ", ValueError, error_template),
+ ("< 1.2.3", ValueError, error_template),
+ ("<1.2.3", ValueError, error_template),
+ ("< 1.2.3", ValueError, error_template),
+ (" < 1.2.3 ", ValueError, error_template),
+ ("> 1.2.3", ValueError, error_template),
+ (">1.2.3", ValueError, error_template),
+ ("> 1.2.3", ValueError, error_template),
+ (" > 1.2.3 ", ValueError, error_template),
+ ("0.0.1-beta", ValueError, error_template),
+ ]
+
+ for range_str, expected_class, expected_msg_template in checklist:
+ try:
+ VersionRange.from_str(range_str)
+ except Exception as exception:
+ error = exception
+
+ assert isinstance(error, expected_class), f"unexpected error class for '{range_str}': '{type(error)}'"
+ assert str(error) == expected_msg_template.format(
+ range_str
+ ), f"unexpected error message for '{range_str}': '{error}'"
+
+
+def test_init():
+ checklist = [
+ (Operator.GE, "1.2.3", Operator.GE, Version(1, 2, 3)),
+ (Operator.GE, " 1.2.3 ", Operator.GE, Version(1, 2, 3)),
+ (Operator.GE, "0.0.1", Operator.GE, Version(0, 0, 1)),
+ (Operator.EQ, "1.2.3", Operator.EQ, Version(1, 2, 3)),
+ (Operator.EQ, " 1.2.3 ", Operator.EQ, Version(1, 2, 3)),
+ (Operator.EQ, "0.0.1", Operator.EQ, Version(0, 0, 1)),
+ (None, "1.2.3", Operator.EQ, Version(1, 2, 3)),
+ (None, " 1.2.3 ", Operator.EQ, Version(1, 2, 3)),
+ (None, "0.0.1", Operator.EQ, Version(0, 0, 1)),
+ ]
+
+ for operator_provided, version_provided, expected_operator, expected_version in checklist:
+ range = VersionRange(operator_provided, Version.from_str(version_provided))
+
+ assert (
+ range.operator == expected_operator
+ ), f"unexpected operator for '{operator_provided}', '{version_provided}': '{range.operator}'"
+ assert (
+ range.version == expected_version
+ ), f"unexpected result version for '{operator_provided}', '{version_provided}': '{range.version}'"
+
+
+def test_is_satisfied():
+ checklist = [
+ (">= 1.2.3", "1.2.3", True),
+ (">= 1.2.3", "1.2.4", True),
+ (">= 1.2.3", "1.3.0", True),
+ (">= 1.2.3", "2.0.0", True),
+ (">= 1.2.3", "5.8.2", True),
+ (">= 1.2.3", "1.2.2", False),
+ (">= 1.2.3", "0.100.200", False),
+ ("= 1.2.3", "1.2.3", True),
+ ("1.2.3", "1.2.3", True),
+ ("1.2.3", "1.2.2", False),
+ ("1.2.3", "1.3.3", False),
+ ("1.2.3", "2.2.3", False),
+ ("12345.45634.456234", "12345.45634.456234", True),
+ ("0.0.0", "0.0.0", True),
+ ]
+
+ for range_provided, version_provided, expected_result in checklist:
+ version = Version.from_str(version_provided)
+ range = VersionRange.from_str(range_provided)
+
+ assert (
+ range.is_satisfied_by(version) == expected_result
+ ), f"Unexpected is_satisfied_by result for '{range_provided}', '{version_provided}': {(not expected_result)}"
diff --git a/build/plugins/lib/nots/semver/tests/ya.make b/build/plugins/lib/nots/semver/tests/ya.make
new file mode 100644
index 0000000000..b7605505f3
--- /dev/null
+++ b/build/plugins/lib/nots/semver/tests/ya.make
@@ -0,0 +1,14 @@
+PY3TEST()
+
+OWNER(g:frontend-build-platform)
+
+PEERDIR(
+ build/plugins/lib/nots/semver
+)
+
+TEST_SRCS(
+ test_version_range.py
+ test_version.py
+)
+
+END()
diff --git a/build/plugins/lib/nots/semver/ya.make b/build/plugins/lib/nots/semver/ya.make
new file mode 100644
index 0000000000..7d2be228f2
--- /dev/null
+++ b/build/plugins/lib/nots/semver/ya.make
@@ -0,0 +1,14 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ semver.py
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/typescript/__init__.py b/build/plugins/lib/nots/typescript/__init__.py
new file mode 100644
index 0000000000..e0b3ee901c
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/__init__.py
@@ -0,0 +1,10 @@
+from .ts_config import DEFAULT_TS_CONFIG_FILE, TsConfig
+from .ts_errors import TsError, TsValidationError
+
+
+__all__ = [
+ "DEFAULT_TS_CONFIG_FILE",
+ "TsConfig",
+ "TsError",
+ "TsValidationError",
+]
diff --git a/build/plugins/lib/nots/typescript/tests/test_ts_config.py b/build/plugins/lib/nots/typescript/tests/test_ts_config.py
new file mode 100644
index 0000000000..cf67ca5ff9
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/test_ts_config.py
@@ -0,0 +1,321 @@
+import pytest
+
+from build.plugins.lib.nots.typescript import TsConfig, TsValidationError
+
+
+def test_ts_config_validate_valid():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "outDir": "./build",
+ },
+ }
+
+ # Throws when outdir is not in use but defined in tsconfig
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=False)
+
+ assert e.value.errors == [
+ "'outDir' should be removed - it is not in use",
+ ]
+
+ # Passes well when outDir should be in use and is defined
+ cfg.validate(use_outdir=True)
+
+
+def test_ts_config_validate_empty():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ # When outDir should not be used we got only one error
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=False)
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ ]
+
+ # When outDir should be used we got two errors
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=True)
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ ]
+
+
+def test_ts_config_declaration_with_dir():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "declaration": True,
+ "declarationDir": "some/dir"
+ },
+ }
+
+ cfg.validate(use_outdir=False)
+
+
+def test_ts_config_declaration_without_dir():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "declaration": True
+ },
+ }
+
+ # When outDir should not be used we got the error
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=False)
+
+ assert e.value.errors == [
+ "'declarationDir' option is required when 'declaration' is set",
+ ]
+
+
+def test_ts_config_declaration_with_outdir():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./src",
+ "outDir": "some/dir",
+ "declaration": True
+ },
+ }
+
+ # When we allow outDir it will be enought to set it
+ cfg.validate(use_outdir=True)
+
+
+def test_ts_config_validate_invalid_common():
+ cfg = TsConfig(path="/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "preserveSymlinks": True,
+ "rootDirs": [],
+ "outFile": "./foo.js",
+ },
+ "references": [],
+ "files": [],
+ "include": [],
+ "exclude": [],
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=True)
+
+ assert e.value.errors == [
+ "'rootDir' option is required",
+ "'outDir' option is required",
+ "'outFile' option is not supported",
+ "'preserveSymlinks' option is not supported due to pnpm limitations",
+ "composite builds are not supported, use peerdirs in ya.make instead of 'references' option",
+ ]
+
+
+def test_ts_config_validate_invalid_local_outdir():
+ cfg = TsConfig(path="/tsconfig.json")
+ for out_dir in [".", "", "./"]:
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "./",
+ "outDir": out_dir,
+ },
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=True)
+
+ assert e.value.errors == [
+ "'outDir' value '{}' is not supported, use directory name like 'build'".format(out_dir),
+ ]
+
+
+def test_ts_config_validate_invalid_subdirs():
+ cfg = TsConfig(path="/foo/tsconfig.json")
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "/bar/src",
+ "outDir": "../bar/build",
+ },
+ }
+
+ with pytest.raises(TsValidationError) as e:
+ cfg.validate(use_outdir=True)
+
+ assert e.value.errors == [
+ "'outDir' should be a subdirectory of the module",
+ ]
+
+
+def test_ts_config_compiler_options():
+ cfg = TsConfig(path="/tsconfig.json")
+
+ assert cfg.compiler_option("invalid") is None
+
+ cfg.data = {
+ "compilerOptions": {
+ "rootDir": "src",
+ },
+ }
+
+ assert cfg.compiler_option("rootDir") == "src"
+
+
+class TestTsConfigMerge:
+ def test_merge_paths(self):
+ # arrange
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {"compilerOptions": {"paths": {"path1": ["src/path1"], "path2": ["src/path2"]}}}
+
+ cfg_common = TsConfig(path="/foo/tsconfig.common.json")
+ cfg_common.data = {
+ "compilerOptions": {"paths": {"path0": ["src/path0"]}},
+ }
+
+ # act
+ cfg_main.merge(".", cfg_common)
+
+ # assert
+ assert cfg_main.data == {
+ "compilerOptions": {"paths": {"path1": ["src/path1"], "path2": ["src/path2"]}},
+ }
+
+ def test_create_compiler_options(self):
+ # arrange
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {}
+
+ cfg_common = TsConfig(path="/foo/config/tsconfig.common.json")
+ cfg_common.data = {
+ "compilerOptions": {
+ "moduleResolution": "node",
+ },
+ }
+
+ # act
+ cfg_main.merge("config", cfg_common)
+
+ # assert
+ assert cfg_main.data == {
+ "compilerOptions": {
+ "moduleResolution": "node",
+ },
+ }
+
+ def test_merge_compiler_options(self):
+ # arrange
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {
+ "compilerOptions": {
+ "esModuleInterop": True,
+ "moduleResolution": "nodenext",
+ "rootDir": "./src",
+ },
+ "extraField1": False,
+ "sameField": False,
+ }
+
+ cfg_common = TsConfig(path="/foo/config/tsconfig.common.json")
+ cfg_common.data = {
+ "compilerOptions": {
+ "moduleResolution": "node",
+ "outDir": "./out",
+ "strict": True,
+ },
+ "extraField2": True,
+ "sameField": True,
+ }
+
+ # act
+ cfg_main.merge("config", cfg_common)
+
+ # assert
+ assert cfg_main.data == {
+ "compilerOptions": {
+ "esModuleInterop": True, # own value
+ "moduleResolution": "nodenext", # replaced value
+ "outDir": "config/out", # resolved path
+ "rootDir": "./src", # own path value (untouched)
+ "strict": True, # inherited value
+ },
+ "extraField1": False, # own root field
+ "extraField2": True, # inherited root field
+ "sameField": False, # prefer own value
+ }
+
+
+class TestTsConfigExtends:
+ def create_empty_ts_config(self, path):
+ cfg = TsConfig(path=path)
+ cfg.data = {}
+ return cfg
+
+ def create_ts_config_with_data_once(self, path):
+ cfg = TsConfig(path=path)
+
+ if path == "/foo/./base-tsconfig.json":
+ cfg.data = {"extends": "./extends/recursive/tsconfig.json"}
+ else:
+ cfg.data = {}
+
+ return cfg
+
+ def test_extends_empty(self):
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {}
+
+ paths = cfg_main.inline_extend({})
+
+ assert paths == []
+
+ def test_extends_single_with_dot(self, monkeypatch):
+ monkeypatch.setattr(TsConfig, "load", self.create_empty_ts_config)
+
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = dict({"extends": "./extends/tsconfig.json"})
+
+ paths = cfg_main.inline_extend({})
+
+ assert paths == ["./extends/tsconfig.json"]
+
+ def test_extends_single_without_dot(self, monkeypatch):
+ monkeypatch.setattr(TsConfig, "load", self.create_empty_ts_config)
+
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = dict({"extends": "extends/tsconfig.json"})
+
+ paths = cfg_main.inline_extend({"extends": "dir/extends"})
+
+ assert paths == ["dir/extends/tsconfig.json"]
+
+ def test_extends_array(self, monkeypatch):
+ monkeypatch.setattr(TsConfig, "load", self.create_empty_ts_config)
+
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {"extends": ["extends/tsconfig1.json", "extends/tsconfig2.json"]}
+
+ paths = cfg_main.inline_extend({"extends": "dir/extends"})
+
+ assert paths == ["dir/extends/tsconfig1.json", "dir/extends/tsconfig2.json"]
+
+ def test_extends_empty_array(self):
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {"extends": []}
+
+ paths = cfg_main.inline_extend({})
+
+ assert paths == []
+
+ def test_recursive_extend(self, monkeypatch):
+ monkeypatch.setattr(TsConfig, "load", self.create_ts_config_with_data_once)
+
+ cfg_main = TsConfig(path="/foo/tsconfig.json")
+ cfg_main.data = {"extends": "./base-tsconfig.json"}
+
+ paths = cfg_main.inline_extend({})
+
+ assert paths == ["./base-tsconfig.json", "./extends/recursive/tsconfig.json"]
diff --git a/build/plugins/lib/nots/typescript/tests/test_ts_glob.py b/build/plugins/lib/nots/typescript/tests/test_ts_glob.py
new file mode 100644
index 0000000000..9301603e31
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/test_ts_glob.py
@@ -0,0 +1,118 @@
+from build.plugins.lib.nots.typescript.ts_glob import ts_glob, TsGlobConfig
+
+
+class TestTsGlobIncluding:
+ ts_glob_config = TsGlobConfig(
+ root_dir="src", out_dir="build", include=["src/module_a/**/*", "src/module_b/**/*", "src/module_x"]
+ )
+
+ def test_dir_include(self):
+ # arrange
+ all_files = [
+ "src/module_x/index.ts",
+ ]
+
+ # act + arrange
+ assert ts_glob(self.ts_glob_config, all_files) == [
+ "src/module_x/index.ts",
+ ]
+
+ def test_deep_include(self):
+ # arrange
+ all_files = [
+ "src/module_a/index.ts",
+ "src/module_b/index.ts",
+ "src/module_c/index.ts",
+ ]
+
+ # act + arrange
+ assert ts_glob(self.ts_glob_config, all_files) == [
+ "src/module_a/index.ts",
+ "src/module_b/index.ts",
+ ]
+
+
+class TestTsGlobExcluding:
+ ts_glob_config = TsGlobConfig(root_dir="src", out_dir="build", include=["src/**/*"])
+
+ def test_only_in_root_dir(self):
+ # arrange
+ all_files = [
+ "CHANGELOG.md",
+ "fake-src/one-more-src.ts",
+ "src/index.ts",
+ ]
+
+ # act + assert
+ assert ts_glob(self.ts_glob_config, all_files) == ["src/index.ts"]
+
+ def test_exclude_out_dir(self):
+ # arrange
+ all_files = ["build/index.js"]
+
+ # act + assert
+ assert ts_glob(self.ts_glob_config, all_files) == []
+
+ def test_exclude_out_dir_none(self):
+ # arrange
+ all_files = ["build/index.js"]
+ ts_glob_config = TsGlobConfig(root_dir=".", out_dir=None)
+
+ # act + assert
+ assert ts_glob(ts_glob_config, all_files) == ["build/index.js"]
+
+ def test_complex(self):
+ # arrange
+ all_files = [
+ "CHANGELOG.md",
+ "fake-src/one-more-src.ts",
+ "src/baz.ts",
+ "src/index.ts",
+ "src/required_file.ts",
+ ]
+
+ # act + assert
+ assert ts_glob(self.ts_glob_config, all_files) == ["src/baz.ts", "src/index.ts", "src/required_file.ts"]
+
+
+class TestTsGlobNeedNormalization:
+ ts_glob_config = TsGlobConfig(root_dir="./src", out_dir="./build", include=["./src/**/*"])
+
+ def test_only_in_root_dir(self):
+ # arrange
+ all_files = [
+ "CHANGELOG.md",
+ "fake-src/one-more-src.ts",
+ "src/index.ts",
+ ]
+
+ # act + assert
+ assert ts_glob(self.ts_glob_config, all_files) == ["src/index.ts"]
+
+ def test_exclude_out_dir(self):
+ # arrange
+ all_files = ["build/index.js"]
+
+ # act + assert
+ assert ts_glob(self.ts_glob_config, all_files) == []
+
+ def test_exclude_out_dir_none(self):
+ # arrange
+ all_files = ["build/index.js"]
+ ts_glob_config = TsGlobConfig(root_dir="./.", out_dir=None)
+
+ # act + assert
+ assert ts_glob(ts_glob_config, all_files) == ["build/index.js"]
+
+ def test_complex(self):
+ # arrange
+ all_files = [
+ "CHANGELOG.md",
+ "fake-src/one-more-src.ts",
+ "src/baz.ts",
+ "src/index.ts",
+ "src/required_file.ts",
+ ]
+
+ # act + assert
+ assert ts_glob(self.ts_glob_config, all_files) == ["src/baz.ts", "src/index.ts", "src/required_file.ts"]
diff --git a/build/plugins/lib/nots/typescript/tests/ya.make b/build/plugins/lib/nots/typescript/tests/ya.make
new file mode 100644
index 0000000000..5af512f420
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/tests/ya.make
@@ -0,0 +1,14 @@
+PY3TEST()
+
+OWNER(g:frontend-build-platform)
+
+TEST_SRCS(
+ test_ts_config.py
+ test_ts_glob.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/typescript
+)
+
+END()
diff --git a/build/plugins/lib/nots/typescript/ts_config.py b/build/plugins/lib/nots/typescript/ts_config.py
new file mode 100644
index 0000000000..1b37feb400
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_config.py
@@ -0,0 +1,282 @@
+import copy
+import os
+
+from .ts_errors import TsError, TsValidationError
+from .ts_glob import ts_glob, TsGlobConfig
+from ..package_manager.base import utils
+
+DEFAULT_TS_CONFIG_FILE = "tsconfig.json"
+
+
+class RootFields:
+ extends = 'extends'
+
+ exclude = 'exclude'
+ files = 'files'
+ include = 'include'
+
+ compilerOptions = 'compilerOptions'
+
+ PATH_LIST_FIELDS = {
+ exclude,
+ files,
+ include,
+ }
+
+
+class CompilerOptionsFields:
+ baseUrl = 'baseUrl'
+ declaration = 'declaration'
+ declarationDir = 'declarationDir'
+ outDir = 'outDir'
+ rootDir = 'rootDir'
+
+ PATH_FIELDS = {
+ baseUrl,
+ outDir,
+ rootDir,
+ }
+
+
+class TsConfig(object):
+ @classmethod
+ def load(cls, path):
+ """
+ :param path: tsconfig.json path
+ :type path: str
+ :rtype: TsConfig
+ """
+ tsconfig = cls(path)
+ tsconfig.read()
+
+ return tsconfig
+
+ def __init__(self, path):
+ import rapidjson
+
+ self.rj = rapidjson
+
+ if not os.path.isabs(path):
+ raise TypeError("Absolute path required, given: {}".format(path))
+
+ self.path = path
+ self.data = {}
+
+ def read(self):
+ try:
+ with open(self.path) as f:
+ self.data = self.rj.load(f, parse_mode=(self.rj.PM_COMMENTS | self.rj.PM_TRAILING_COMMAS))
+
+ except Exception as e:
+ raise TsError("Failed to read tsconfig {}: {}".format(self.path, e))
+
+ def merge(self, rel_path, base_tsconfig):
+ # type: (TsConfig, str, TsConfig) -> None
+ """
+ :param rel_path: relative path to the configuration file we are merging in.
+ It is required to set the relative paths correctly.
+
+ :param base_tsconfig: base TsConfig we are merging with our TsConfig instance
+ """
+ if not base_tsconfig.data:
+ return
+
+ # 'data' from the file in 'extends'
+ base_data = copy.deepcopy(base_tsconfig.data)
+
+ def relative_path(p):
+ return os.path.normpath(os.path.join(rel_path, p))
+
+ for root_field, root_value in base_data.items():
+ # extends
+ if root_field == RootFields.extends:
+ # replace itself to its own `extends` (for multi level extends)
+ self.data[RootFields.extends] = relative_path(root_value)
+
+ # exclude, files, include
+ elif root_field in RootFields.PATH_LIST_FIELDS:
+ if root_field not in self.data:
+ self.data[root_field] = [relative_path(p) for p in root_value]
+
+ # compilerOptions
+ elif root_field == RootFields.compilerOptions:
+ for option, option_value in root_value.items():
+ is_path_field = option in CompilerOptionsFields.PATH_FIELDS
+
+ if not self.has_compiler_option(option):
+ new_value = relative_path(option_value) if is_path_field else option_value
+ self.set_compiler_option(option, new_value)
+
+ # other fields (just copy if it has not existed)
+ elif root_field not in self.data:
+ self.data[root_field] = root_value
+ pass
+
+ def extend_one(self, dep_paths, ext_value):
+ if not ext_value:
+ return []
+
+ if ext_value.startswith("."):
+ base_config_path = ext_value
+
+ else:
+ dep_name = utils.extract_package_name_from_path(ext_value)
+ # the rest part is the ext config path
+ file_path_start = len(dep_name) + 1
+ file_path = ext_value[file_path_start:]
+ dep_path = dep_paths.get(dep_name)
+ if dep_path is None:
+ raise Exception(
+ "referenceing from {}, data: {}\n: Dependency '{}' not found in dep_paths: {}".format(
+ self.path, str(self.data), dep_name, dep_paths
+ )
+ )
+ base_config_path = os.path.join(dep_path, file_path)
+
+ rel_path = os.path.dirname(base_config_path)
+ tsconfig_curdir_path = os.path.join(os.path.dirname(self.path), base_config_path)
+ if os.path.isdir(tsconfig_curdir_path):
+ base_config_path = os.path.join(base_config_path, DEFAULT_TS_CONFIG_FILE)
+
+ # processing the base file recursively
+ base_config = TsConfig.load(os.path.join(os.path.dirname(self.path), base_config_path))
+ paths = [base_config_path] + base_config.inline_extend(dep_paths)
+
+ self.merge(rel_path, base_config)
+ return paths
+
+ def inline_extend(self, dep_paths):
+ """
+ Merges the tsconfig parameters from configuration file referred by "extends" if any.
+ Relative paths are adjusted, current parameter values are prioritized higer than
+ those coming from extension file (according to TSC mergin rules).
+ Returns list of file paths for config files merged into the current configuration
+ :param dep_paths: dict of dependency names to their paths
+ :type dep_paths: dict
+ :rtype: list of str
+ """
+ extends = self.data.get(RootFields.extends)
+
+ if type(extends) == list:
+ paths = [self.extend_one(dep_paths, ext_value) for ext_value in extends]
+ flatten_paths = [item for row in paths for item in row]
+ else:
+ flatten_paths = self.extend_one(dep_paths, extends)
+
+ if extends:
+ del self.data[RootFields.extends]
+
+ return flatten_paths
+
+ def get_or_create_compiler_options(self):
+ """
+ Returns ref to the "compilerOptions" dict.
+ :rtype: dict
+ """
+ if RootFields.compilerOptions not in self.data:
+ self.data[RootFields.compilerOptions] = {}
+
+ return self.data[RootFields.compilerOptions]
+
+ def compiler_option(self, name, default=None):
+ """
+ :param name: option key
+ :type name: str
+ :param default: default value
+ :type default: mixed
+ :rtype: mixed
+ """
+ return self.get_or_create_compiler_options().get(name, default)
+
+ def has_compiler_option(self, name):
+ # type: (str) -> bool
+ compiler_options = self.data.get(RootFields.compilerOptions, {})
+
+ return name in compiler_options
+
+ def set_compiler_option(self, name, value):
+ # type: (str, Any) -> None
+ compiler_options = self.get_or_create_compiler_options()
+ compiler_options[name] = value
+
+ def validate(self, use_outdir=False):
+ # type: (bool) -> void
+ """
+ Checks whether the config is compatible with current toolchain.
+ """
+ opts = self.get_or_create_compiler_options()
+ errors = []
+ declaration = opts.get(CompilerOptionsFields.declaration)
+ declaration_dir = opts.get(CompilerOptionsFields.declarationDir)
+ out_dir = opts.get(CompilerOptionsFields.outDir)
+ root_dir = opts.get(CompilerOptionsFields.rootDir)
+ config_dir = os.path.dirname(self.path)
+
+ def is_mod_subdir(p):
+ return not os.path.isabs(p) and os.path.normpath(os.path.join(config_dir, p)).startswith(config_dir)
+
+ if root_dir is None:
+ errors.append("'rootDir' option is required")
+
+ if use_outdir:
+ if out_dir is None:
+ errors.append("'outDir' option is required")
+ elif out_dir in [".", "", "./"]:
+ errors.append("'outDir' value '{}' is not supported, use directory name like 'build'".format(out_dir))
+ elif not is_mod_subdir(out_dir):
+ errors.append("'outDir' should be a subdirectory of the module")
+ else:
+ if out_dir:
+ errors.append("'outDir' should be removed - it is not in use")
+ # Checking only when outDir shouldn't be used, as when we allow outDir,
+ # it routes all the results including declarations.
+ if declaration is True and declaration_dir is None:
+ errors.append("'declarationDir' option is required when 'declaration' is set")
+
+ if opts.get("outFile") is not None:
+ errors.append("'outFile' option is not supported")
+
+ if opts.get("preserveSymlinks"):
+ errors.append("'preserveSymlinks' option is not supported due to pnpm limitations")
+
+ if self.data.get("references") is not None:
+ errors.append("composite builds are not supported, use peerdirs in ya.make instead of 'references' option")
+
+ if len(errors):
+ raise TsValidationError(self.path, errors)
+
+ def write(self, path=None, indent=None):
+ """
+ :param path: tsconfig path, defaults to original path
+ :type path: str
+ """
+ if path is None:
+ path = self.path
+
+ with open(path, "w") as f:
+ self.rj.dump(self.data, f, indent=indent)
+
+ def filter_files(self, all_files):
+ # type: (list[str]) -> list[str]
+ """
+ Filters all the files by the rules from this tsconig.json. The result will be used as input entries in `ya make`.
+
+ Known limits:
+
+ - `exclude` not implemented, because `tsc` still uses "excluded" files as declaration files (for typing and referencing)
+ """
+
+ ts_glob_config = TsGlobConfig(
+ root_dir=self.compiler_option(CompilerOptionsFields.rootDir),
+ out_dir=self.compiler_option(CompilerOptionsFields.outDir),
+ include=self.data.get(RootFields.include),
+ files=self.data.get(RootFields.files)
+ )
+
+ return ts_glob(ts_glob_config, all_files)
+
+ def get_out_dirs(self):
+ # type: () -> list[str]
+ output_dirs = [self.compiler_option("outDir"), self.compiler_option("declarationDir")]
+
+ return [d for d in output_dirs if d is not None]
diff --git a/build/plugins/lib/nots/typescript/ts_errors.py b/build/plugins/lib/nots/typescript/ts_errors.py
new file mode 100644
index 0000000000..105851d9ec
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_errors.py
@@ -0,0 +1,10 @@
+class TsError(RuntimeError):
+ pass
+
+
+class TsValidationError(TsError):
+ def __init__(self, path, errors):
+ self.path = path
+ self.errors = errors
+
+ super(TsValidationError, self).__init__("Invalid tsconfig {}:\n{}".format(path, "\n".join(errors)))
diff --git a/build/plugins/lib/nots/typescript/ts_glob.py b/build/plugins/lib/nots/typescript/ts_glob.py
new file mode 100644
index 0000000000..be7810292d
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ts_glob.py
@@ -0,0 +1,79 @@
+import fnmatch
+import os.path
+
+
+class TsGlobConfig:
+ def __init__(self, root_dir, out_dir=None, include=None, files=None):
+ # type: (TsGlobConfig, str, str, list[str]) -> None
+
+ self.root_dir = os.path.normpath(root_dir) # Required
+ self.out_dir = os.path.normpath(out_dir) if out_dir else out_dir
+ self.include = [os.path.normpath(p) for p in include] if include else ([] if files else ["**/*"])
+ self.files = [os.path.normpath(p) for p in files] if files else []
+
+
+def __path_to_match_rule(path):
+ # type: (str) -> str
+
+ # already a rule
+
+ # convert "**/*" to "*" (python compatible with fnmatch)
+ if path.endswith('**/*'):
+ return path[:-3] # /**/* -> /*
+
+ if path.endswith("*") or ('*' in path or '?' in path):
+ return path
+
+ # special cases
+ if path == ".":
+ return "*"
+
+ # filename
+ _, ext = os.path.splitext(path)
+ if ext:
+ return path
+
+ # dirname ?
+ return os.path.join(path, '*')
+
+
+def __filter_files(files, path_or_rule):
+ # type: (set[str], str) -> set[str]
+
+ rule = __path_to_match_rule(path_or_rule)
+
+ result = set()
+ for path in files:
+ py_rule = __path_to_match_rule(rule)
+ if path == rule or fnmatch.fnmatch(path, py_rule):
+ result.add(path)
+
+ return result
+
+
+def ts_glob(glob_config, all_files):
+ # type: (TsGlobConfig, list[str]) -> list[str]
+
+ result = set(all_files)
+
+ # only in `root_dir`
+ result &= __filter_files(result, glob_config.root_dir)
+
+ # only listed by `include` and `files` options
+ include_only = set()
+ for include_path in glob_config.include:
+ include_only |= __filter_files(result, include_path)
+ for file_path in glob_config.files:
+ include_only |= __filter_files(result, file_path)
+
+ result &= include_only # keep only intersection (common in both sets)
+
+ skip_files = set()
+
+ # exclude out_dir
+ if glob_config.out_dir:
+ skip_files |= __filter_files(result, glob_config.out_dir)
+
+ result -= skip_files # keep only differences (the elements in `result` that not exist in `skip_files`)
+
+ return sorted(result)
diff --git a/build/plugins/lib/nots/typescript/ya.make b/build/plugins/lib/nots/typescript/ya.make
new file mode 100644
index 0000000000..0ab937377d
--- /dev/null
+++ b/build/plugins/lib/nots/typescript/ya.make
@@ -0,0 +1,26 @@
+PY23_LIBRARY()
+
+OWNER(g:frontend-build-platform)
+
+PY_SRCS(
+ __init__.py
+ ts_errors.py
+ ts_glob.py
+ ts_config.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+)
+
+IF (PYTHON3)
+ PEERDIR(
+ contrib/python/python-rapidjson
+ )
+ENDIF()
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/build/plugins/lib/nots/ya.make b/build/plugins/lib/nots/ya.make
new file mode 100644
index 0000000000..06a95ecb56
--- /dev/null
+++ b/build/plugins/lib/nots/ya.make
@@ -0,0 +1,22 @@
+OWNER(g:frontend-build-platform)
+
+PY23_LIBRARY()
+
+PY_SRCS(
+ __init__.py
+ erm_json_lite.py
+)
+
+PEERDIR(
+ build/plugins/lib/nots/package_manager
+ build/plugins/lib/nots/semver
+ build/plugins/lib/nots/typescript
+)
+
+END()
+
+RECURSE(
+ package_manager
+ semver
+ typescript
+)
diff --git a/build/plugins/nots.py b/build/plugins/nots.py
new file mode 100644
index 0000000000..d9c0249b13
--- /dev/null
+++ b/build/plugins/nots.py
@@ -0,0 +1,611 @@
+import os
+
+import ymake
+import ytest
+from _common import get_norm_unit_path, rootrel_arc_src, to_yesno
+
+
+# 1 is 60 files per chunk for TIMEOUT(60) - default timeout for SIZE(SMALL)
+# 0.5 is 120 files per chunk for TIMEOUT(60) - default timeout for SIZE(SMALL)
+# 0.2 is 300 files per chunk for TIMEOUT(60) - default timeout for SIZE(SMALL)
+ESLINT_FILE_PROCESSING_TIME_DEFAULT = 0.2 # seconds per file
+
+
+class PluginLogger(object):
+ def __init__(self):
+ self.unit = None
+ self.prefix = ""
+
+ def reset(self, unit, prefix=""):
+ self.unit = unit
+ self.prefix = prefix
+
+ def get_state(self):
+ return (self.unit, self.prefix)
+
+ def _stringify_messages(self, messages):
+ parts = []
+ for m in messages:
+ if m is None:
+ parts.append("None")
+ else:
+ parts.append(m if isinstance(m, str) else repr(m))
+
+ # cyan color (code 36) for messages
+ return "\033[0;32m{}\033[0;49m \033[0;36m{}\033[0;49m".format(self.prefix, " ".join(parts))
+
+ def info(self, *messages):
+ if self.unit:
+ self.unit.message(["INFO", self._stringify_messages(messages)])
+
+ def warn(self, *messages):
+ if self.unit:
+ self.unit.message(["WARN", self._stringify_messages(messages)])
+
+ def error(self, *messages):
+ if self.unit:
+ self.unit.message(["ERROR", self._stringify_messages(messages)])
+
+ def print_vars(self, *variables):
+ if self.unit:
+ values = ["{}={}".format(v, self.unit.get(v)) for v in variables]
+ self.info("\n".join(values))
+
+
+logger = PluginLogger()
+
+
+def _with_report_configure_error(fn):
+ def _wrapper(*args, **kwargs):
+ last_state = logger.get_state()
+ unit = args[0]
+ logger.reset(unit if unit.get("TS_LOG") == "yes" else None, fn.__name__)
+ try:
+ fn(*args, **kwargs)
+ except Exception as exc:
+ ymake.report_configure_error(str(exc))
+ if unit.get("TS_RAISE") == "yes":
+ raise
+ else:
+ unit.message(["WARN", "Configure error is reported. Add -DTS_RAISE to see actual exception"])
+ finally:
+ logger.reset(*last_state)
+
+ return _wrapper
+
+
+def _build_directives(name, flags, paths):
+ # type: (str, list[str]|tuple[str], list[str]) -> str
+
+ parts = [p for p in [name] + (flags or []) if p]
+
+ expressions = ["${{{parts}:\"{path}\"}}".format(parts=";".join(parts), path=path) for path in paths]
+
+ return " ".join(expressions)
+
+
+def _build_cmd_input_paths(paths, hide=False, disable_include_processor=False):
+ # type: (list[str]|tuple[str], bool, bool) -> str
+ hide_part = "hide" if hide else ""
+ disable_ip_part = "context=TEXT" if disable_include_processor else ""
+
+ return _build_directives("input", [hide_part, disable_ip_part], paths)
+
+
+def _create_pm(unit):
+ from lib.nots.package_manager import manager
+
+ sources_path = unit.path()
+ module_path = unit.get("MODDIR")
+ if unit.get("TS_TEST_FOR"):
+ sources_path = unit.get("TS_TEST_FOR_DIR")
+ module_path = unit.get("TS_TEST_FOR_PATH")
+
+ return manager(
+ sources_path=unit.resolve(sources_path),
+ build_root="$B",
+ build_path=unit.path().replace("$S", "$B", 1),
+ contribs_path=unit.get("NPM_CONTRIBS_PATH"),
+ nodejs_bin_path=None,
+ script_path=None,
+ module_path=module_path,
+ )
+
+
+def _create_erm_json(unit):
+ from lib.nots.erm_json_lite import ErmJsonLite
+
+ erm_packages_path = unit.get("ERM_PACKAGES_PATH")
+ path = unit.resolve(unit.resolve_arc_path(erm_packages_path))
+
+ return ErmJsonLite.load(path)
+
+
+@_with_report_configure_error
+def on_from_npm_lockfiles(unit, *args):
+ from lib.nots.package_manager.base import PackageManagerError
+
+ pm = _create_pm(unit)
+ lf_paths = []
+
+ for lf_path in args:
+ abs_lf_path = unit.resolve(unit.resolve_arc_path(lf_path))
+ if abs_lf_path:
+ lf_paths.append(abs_lf_path)
+ elif unit.get("TS_STRICT_FROM_NPM_LOCKFILES") == "yes":
+ ymake.report_configure_error("lockfile not found: {}".format(lf_path))
+
+ try:
+ for pkg in pm.extract_packages_meta_from_lockfiles(lf_paths):
+ unit.on_from_npm([pkg.tarball_url, pkg.sky_id, pkg.integrity, pkg.integrity_algorithm, pkg.tarball_path])
+ except PackageManagerError as e:
+ logger.warn(str(e))
+ pass
+
+
+def _check_nodejs_version(unit, major):
+ if major < 14:
+ raise Exception(
+ "Node.js {} is unsupported. Update Node.js please. See https://nda.ya.ru/t/joB9Mivm6h4znu".format(major)
+ )
+
+ if major < 18:
+ unit.message(
+ [
+ "WARN",
+ "Node.js {} is deprecated. Update Node.js please. See https://nda.ya.ru/t/joB9Mivm6h4znu".format(major),
+ ]
+ )
+
+
+@_with_report_configure_error
+def on_peerdir_ts_resource(unit, *resources):
+ pm = _create_pm(unit)
+ pj = pm.load_package_json_from_dir(pm.sources_path)
+ erm_json = _create_erm_json(unit)
+ dirs = []
+
+ nodejs_version = _select_matching_version(erm_json, "nodejs", pj.get_nodejs_version())
+
+ _check_nodejs_version(unit, nodejs_version.major)
+ for tool in resources:
+ dir_name = erm_json.canonize_name(tool)
+ if erm_json.use_resource_directly(tool):
+ # raises the configuration error when the version is unsupported
+ _select_matching_version(erm_json, tool, pj.get_dep_specifier(tool), dep_is_required=True)
+ elif tool == "nodejs":
+ dirs.append(os.path.join("build", "platform", dir_name, str(nodejs_version)))
+ _set_resource_vars(unit, erm_json, tool, nodejs_version)
+ elif erm_json.is_resource_multiplatform(tool):
+ v = _select_matching_version(erm_json, tool, pj.get_dep_specifier(tool))
+ sb_resources = [
+ sbr for sbr in erm_json.get_sb_resources(tool, v) if sbr.get("nodejs") == nodejs_version.major
+ ]
+ nodejs_dir = "NODEJS_{}".format(nodejs_version.major)
+ if len(sb_resources) > 0:
+ dirs.append(os.path.join("build", "external_resources", dir_name, str(v), nodejs_dir))
+ _set_resource_vars(unit, erm_json, tool, v, nodejs_version.major)
+ else:
+ unit.message(["WARN", "Missing {}@{} for {}".format(tool, str(v), nodejs_dir)])
+ else:
+ v = _select_matching_version(erm_json, tool, pj.get_dep_specifier(tool))
+ dirs.append(os.path.join("build", "external_resources", dir_name, str(v)))
+ _set_resource_vars(unit, erm_json, tool, v, nodejs_version.major)
+
+ if dirs:
+ unit.onpeerdir(dirs)
+
+
+@_with_report_configure_error
+def on_ts_configure(unit, *tsconfig_paths):
+ # type: (Unit, *str) -> None
+ from lib.nots.package_manager.base import PackageJson
+ from lib.nots.package_manager.base.utils import build_pj_path
+ from lib.nots.typescript import TsConfig
+
+ # for use in CMD as inputs
+ __set_append(
+ unit, "TS_CONFIG_FILES", _build_cmd_input_paths(tsconfig_paths, hide=True, disable_include_processor=True)
+ )
+
+ mod_dir = unit.get("MODDIR")
+ cur_dir = unit.get("TS_TEST_FOR_PATH") if unit.get("TS_TEST_FOR") else mod_dir
+ pj_path = build_pj_path(unit.resolve(unit.resolve_arc_path(cur_dir)))
+ dep_paths = PackageJson.load(pj_path).get_dep_paths_by_names()
+
+ # reversed for using the first tsconfig as the config for include processor (legacy)
+ for tsconfig_path in reversed(tsconfig_paths):
+ abs_tsconfig_path = unit.resolve(unit.resolve_arc_path(tsconfig_path))
+ if not abs_tsconfig_path:
+ raise Exception("tsconfig not found: {}".format(tsconfig_path))
+
+ tsconfig = TsConfig.load(abs_tsconfig_path)
+ config_files = tsconfig.inline_extend(dep_paths)
+ config_files = _resolve_module_files(unit, mod_dir, config_files)
+
+ use_tsconfig_outdir = unit.get("TS_CONFIG_USE_OUTDIR") == "yes"
+ tsconfig.validate(use_tsconfig_outdir)
+
+ # add tsconfig files from which root tsconfig files were extended
+ __set_append(
+ unit, "TS_CONFIG_FILES", _build_cmd_input_paths(config_files, hide=True, disable_include_processor=True)
+ )
+
+ # region include processor
+ unit.set(["TS_CONFIG_ROOT_DIR", tsconfig.compiler_option("rootDir")]) # also for hermione
+ if use_tsconfig_outdir:
+ unit.set(["TS_CONFIG_OUT_DIR", tsconfig.compiler_option("outDir")]) # also for hermione
+
+ unit.set(["TS_CONFIG_SOURCE_MAP", to_yesno(tsconfig.compiler_option("sourceMap"))])
+ unit.set(["TS_CONFIG_DECLARATION", to_yesno(tsconfig.compiler_option("declaration"))])
+ unit.set(["TS_CONFIG_DECLARATION_MAP", to_yesno(tsconfig.compiler_option("declarationMap"))])
+ unit.set(["TS_CONFIG_PRESERVE_JSX", to_yesno(tsconfig.compiler_option("jsx") == "preserve")])
+ # endregion
+
+ _filter_inputs_by_rules_from_tsconfig(unit, tsconfig)
+
+ _setup_eslint(unit)
+
+
+def __set_append(unit, var_name, value):
+ # type: (Unit, str, str|list[str]|tuple[str]) -> None
+ """
+ SET_APPEND() python naive implementation - append value/values to the list of values
+ """
+ previous_value = unit.get(var_name) or ""
+ value_in_str = " ".join(value) if isinstance(value, list) or isinstance(value, tuple) else value
+ new_value = previous_value + " " + value_in_str
+
+ unit.set([var_name, new_value])
+
+
+def __strip_prefix(prefix, line):
+ # type: (str, str) -> str
+
+ if line.startswith(prefix):
+ prefix_len = len(prefix)
+ return line[prefix_len:]
+
+ return line
+
+
+def _filter_inputs_by_rules_from_tsconfig(unit, tsconfig):
+ """
+ Reduce file list from the TS_GLOB_FILES variable following tsconfig.json rules
+ """
+ mod_dir = unit.get("MODDIR")
+ target_path = os.path.join("${ARCADIA_ROOT}", mod_dir, "") # To have "/" in the end
+
+ all_files = [__strip_prefix(target_path, f) for f in unit.get("TS_GLOB_FILES").split(" ")]
+ filtered_files = tsconfig.filter_files(all_files)
+
+ __set_append(unit, "TS_INPUT_FILES", [os.path.join(target_path, f) for f in filtered_files])
+
+
+def _get_ts_test_data_dirs(unit):
+ return sorted(
+ set(
+ [
+ os.path.dirname(rootrel_arc_src(p, unit))
+ for p in (ytest.get_values_list(unit, "_TS_TEST_DATA_VALUE") or [])
+ ]
+ )
+ )
+
+
+def _resolve_config_path(unit, test_runner, rel_to):
+ config_path = unit.get("ESLINT_CONFIG_PATH") if test_runner == "eslint" else unit.get("TS_TEST_CONFIG_PATH")
+ arc_config_path = unit.resolve_arc_path(config_path)
+ abs_config_path = unit.resolve(arc_config_path)
+ if not abs_config_path:
+ raise Exception("{} config not found: {}".format(test_runner, config_path))
+
+ unit.onsrcs([arc_config_path])
+ abs_rel_to = unit.resolve(unit.resolve_arc_path(unit.get(rel_to)))
+ return os.path.relpath(abs_config_path, start=abs_rel_to)
+
+
+def _is_tests_enabled(unit):
+ if unit.get("TIDY") == "yes":
+ return False
+
+ return True
+
+
+def _get_test_runner_handlers():
+ return {
+ "jest": _add_jest_ts_test,
+ "hermione": _add_hermione_ts_test,
+ }
+
+
+def _add_jest_ts_test(unit, test_runner, test_files, deps, test_record):
+ test_record.update(
+ {
+ "CONFIG-PATH": _resolve_config_path(unit, test_runner, rel_to="TS_TEST_FOR_PATH"),
+ }
+ )
+ _add_test(unit, test_runner, test_files, deps, test_record)
+
+
+def _add_hermione_ts_test(unit, test_runner, test_files, deps, test_record):
+ test_tags = sorted(set(["ya:fat", "ya:external", "ya:noretries"] + ytest.get_values_list(unit, "TEST_TAGS_VALUE")))
+ test_requirements = sorted(set(["network:full"] + ytest.get_values_list(unit, "TEST_REQUIREMENTS_VALUE")))
+
+ test_record.update(
+ {
+ "SIZE": "LARGE",
+ "TAG": ytest.serialize_list(test_tags),
+ "REQUIREMENTS": ytest.serialize_list(test_requirements),
+ "CONFIG-PATH": _resolve_config_path(unit, test_runner, rel_to="TS_TEST_FOR_PATH"),
+ }
+ )
+
+ _add_test(unit, test_runner, test_files, deps, test_record)
+
+
+def _setup_eslint(unit):
+ if not _is_tests_enabled(unit):
+ return
+
+ if unit.get("_NO_LINT_VALUE") == "none":
+ return
+
+ lint_files = ytest.get_values_list(unit, "_TS_LINT_SRCS_VALUE")
+ if not lint_files:
+ return
+
+ unit.on_peerdir_ts_resource("eslint")
+
+ mod_dir = unit.get("MODDIR")
+ lint_files = _resolve_module_files(unit, mod_dir, lint_files)
+ deps = _create_pm(unit).get_peers_from_package_json()
+ test_record = {
+ "ESLINT_CONFIG_PATH": _resolve_config_path(unit, "eslint", rel_to="MODDIR"),
+ "LINT-FILE-PROCESSING-TIME": str(ESLINT_FILE_PROCESSING_TIME_DEFAULT),
+ }
+
+ _add_test(unit, "eslint", lint_files, deps, test_record, mod_dir)
+
+
+def _resolve_module_files(unit, mod_dir, file_paths):
+ resolved_files = []
+
+ for path in file_paths:
+ resolved = rootrel_arc_src(path, unit)
+ if resolved.startswith(mod_dir):
+ mod_dir_with_sep_len = len(mod_dir) + 1
+ resolved = resolved[mod_dir_with_sep_len:]
+ resolved_files.append(resolved)
+
+ return resolved_files
+
+
+def _add_test(unit, test_type, test_files, deps=None, test_record=None, test_cwd=None):
+ from lib.nots.package_manager import constants
+
+ def sort_uniq(text):
+ return sorted(set(text))
+
+ if deps:
+ unit.ondepends(sort_uniq(deps))
+
+ test_dir = get_norm_unit_path(unit)
+ full_test_record = {
+ "TEST-NAME": test_type.lower(),
+ "TEST-TIMEOUT": unit.get("TEST_TIMEOUT") or "",
+ "TEST-ENV": ytest.prepare_env(unit.get("TEST_ENV_VALUE")),
+ "TESTED-PROJECT-NAME": os.path.splitext(unit.filename())[0],
+ "TEST-RECIPES": ytest.prepare_recipes(unit.get("TEST_RECIPES_VALUE")),
+ "SCRIPT-REL-PATH": test_type,
+ "SOURCE-FOLDER-PATH": test_dir,
+ "BUILD-FOLDER-PATH": test_dir,
+ "BINARY-PATH": os.path.join(test_dir, unit.filename()),
+ "SPLIT-FACTOR": unit.get("TEST_SPLIT_FACTOR") or "",
+ "FORK-MODE": unit.get("TEST_FORK_MODE") or "",
+ "SIZE": unit.get("TEST_SIZE_NAME") or "",
+ "TEST-FILES": ytest.serialize_list(test_files),
+ "TEST-CWD": test_cwd or "",
+ "TAG": ytest.serialize_list(ytest.get_values_list(unit, "TEST_TAGS_VALUE")),
+ "REQUIREMENTS": ytest.serialize_list(ytest.get_values_list(unit, "TEST_REQUIREMENTS_VALUE")),
+ "NODEJS-ROOT-VAR-NAME": unit.get("NODEJS-ROOT-VAR-NAME"),
+ "NODE-MODULES-BUNDLE-FILENAME": constants.NODE_MODULES_WORKSPACE_BUNDLE_FILENAME,
+ "CUSTOM-DEPENDENCIES": " ".join(sort_uniq((deps or []) + ytest.get_values_list(unit, "TEST_DEPENDS_VALUE"))),
+ }
+
+ if test_record:
+ full_test_record.update(test_record)
+
+ for k, v in full_test_record.items():
+ if not isinstance(v, str):
+ logger.warn(k, "expected 'str', got:", type(v))
+
+ data = ytest.dump_test(unit, full_test_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
+def _set_resource_vars(unit, erm_json, tool, version, nodejs_major=None):
+ # type: (any, ErmJsonLite, Version, str|None, int|None) -> None
+
+ resource_name = erm_json.canonize_name(tool).upper()
+
+ # example: NODEJS_12_18_4 | HERMIONE_7_0_4_NODEJS_18
+ version_str = str(version).replace(".", "_")
+ yamake_resource_name = "{}_{}".format(resource_name, version_str)
+
+ if erm_json.is_resource_multiplatform(tool):
+ yamake_resource_name += "_NODEJS_{}".format(nodejs_major)
+
+ yamake_resource_var = "{}_RESOURCE_GLOBAL".format(yamake_resource_name)
+
+ unit.set(["{}_ROOT".format(resource_name), "${}".format(yamake_resource_var)])
+ unit.set(["{}-ROOT-VAR-NAME".format(resource_name), yamake_resource_var])
+
+
+def _select_matching_version(erm_json, resource_name, range_str, dep_is_required=False):
+ # type: (ErmJsonLite, str, str, bool) -> Version
+ if dep_is_required and range_str is None:
+ raise Exception(
+ "Please install the '{tool}' package to the project. Run the command:\n"
+ " ya tool nots add -D {tool}".format(tool=resource_name)
+ )
+
+ try:
+ version = erm_json.select_version_of(resource_name, range_str)
+ if version:
+ return version
+
+ raise ValueError("There is no allowed version to satisfy this range: '{}'".format(range_str))
+ except Exception as error:
+ toolchain_versions = erm_json.get_versions_of(erm_json.get_resource(resource_name))
+
+ raise Exception(
+ "Requested {} version range '{}' could not be satisfied. \n"
+ "Please use a range that would include one of the following: {}. \n"
+ "For further details please visit the link: {} \nOriginal error: {} \n".format(
+ resource_name,
+ range_str,
+ ", ".join(map(str, toolchain_versions)),
+ "https://docs.yandex-team.ru/ya-make/manual/typescript/toolchain",
+ str(error),
+ )
+ )
+
+
+@_with_report_configure_error
+def on_node_modules_configure(unit):
+ pm = _create_pm(unit)
+ pj = pm.load_package_json_from_dir(pm.sources_path)
+
+ if pj.has_dependencies():
+ unit.onpeerdir(pm.get_local_peers_from_package_json())
+ local_cli = unit.get("TS_LOCAL_CLI") == "yes"
+ errors, ins, outs = pm.calc_node_modules_inouts(local_cli)
+
+ if errors:
+ ymake.report_configure_error(
+ "There are some issues with lockfiles.\n"
+ + "Please contact support (https://nda.ya.ru/t/sNoSFsO76ygSXL),\n"
+ + "providing following details:\n"
+ + "\n---\n".join([str(err) for err in errors])
+ )
+ else:
+ unit.on_set_node_modules_ins_outs(["IN"] + sorted(ins) + ["OUT"] + sorted(outs))
+
+ __set_append(unit, "_NODE_MODULES_INOUTS", _build_directives("input", ["hide"], sorted(ins)))
+ if not unit.get("TS_TEST_FOR"):
+ __set_append(unit, "_NODE_MODULES_INOUTS", _build_directives("output", ["hide"], sorted(outs)))
+
+ if pj.get_use_prebuilder():
+ lf = pm.load_lockfile_from_dir(pm.sources_path)
+ is_valid, invalid_keys = lf.validate_has_addons_flags()
+
+ if not is_valid:
+ ymake.report_configure_error(
+ "Project is configured to use @yatool/prebuilder. \n"
+ + "Some packages in the pnpm-lock.yaml are misconfigured.\n"
+ + "Run `ya tool nots update-lockfile` to fix lockfile.\n"
+ + "All packages with `requiresBuild:true` have to be marked with `hasAddons:true/false`.\n"
+ + "Misconfigured keys: \n"
+ + " - "
+ + "\n - ".join(invalid_keys)
+ )
+
+ unit.on_peerdir_ts_resource("@yatool/prebuilder")
+ unit.set(
+ [
+ "_YATOOL_PREBUILDER_ARG",
+ "--yatool-prebuilder-path $YATOOL_PREBUILDER_ROOT/node_modules/@yatool/prebuilder",
+ ]
+ )
+
+ else:
+ # default "noop" command
+ unit.set(["_NODE_MODULES_CMD", "$TOUCH_UNIT"])
+
+
+@_with_report_configure_error
+def on_ts_test_for_configure(unit, test_runner, default_config, node_modules_filename):
+ if not _is_tests_enabled(unit):
+ return
+
+ if unit.enabled('TS_COVERAGE'):
+ unit.on_peerdir_ts_resource("nyc")
+
+ for_mod_path = unit.get("TS_TEST_FOR_PATH")
+ unit.onpeerdir([for_mod_path])
+ unit.on_setup_extract_node_modules_recipe([for_mod_path])
+ unit.on_setup_extract_peer_tars_recipe([for_mod_path])
+
+ root = "$B" if test_runner == "hermione" else "$(BUILD_ROOT)"
+ unit.set(["TS_TEST_NM", os.path.join(root, for_mod_path, node_modules_filename)])
+
+ config_path = unit.get("TS_TEST_CONFIG_PATH")
+ if not config_path:
+ config_path = os.path.join(for_mod_path, default_config)
+ unit.set(["TS_TEST_CONFIG_PATH", config_path])
+
+ test_record = _add_ts_resources_to_test_record(
+ unit,
+ {
+ "TS-TEST-FOR-PATH": for_mod_path,
+ "TS-TEST-DATA-DIRS": ytest.serialize_list(_get_ts_test_data_dirs(unit)),
+ "TS-TEST-DATA-DIRS-RENAME": unit.get("_TS_TEST_DATA_DIRS_RENAME_VALUE"),
+ },
+ )
+
+ test_files = ytest.get_values_list(unit, "_TS_TEST_SRCS_VALUE")
+ test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files)
+ if not test_files:
+ ymake.report_configure_error("No tests found")
+ return
+
+ deps = _create_pm(unit).get_peers_from_package_json()
+ add_ts_test = _get_test_runner_handlers()[test_runner]
+ add_ts_test(unit, test_runner, test_files, deps, test_record)
+
+
+@_with_report_configure_error
+def on_validate_ts_test_for_args(unit, for_mod, root):
+ # FBP-1085
+ is_arc_root = root == "${ARCADIA_ROOT}"
+ is_rel_for_mod = for_mod.startswith(".")
+
+ if is_arc_root and is_rel_for_mod:
+ ymake.report_configure_error(
+ "You are using a relative path for a module. "
+ + "You have to add RELATIVE key, like (RELATIVE {})".format(for_mod)
+ )
+
+
+@_with_report_configure_error
+def on_set_ts_test_for_vars(unit, for_mod):
+ unit.set(["TS_TEST_FOR", "yes"])
+ unit.set(["TS_TEST_FOR_DIR", unit.resolve_arc_path(for_mod)])
+ unit.set(["TS_TEST_FOR_PATH", rootrel_arc_src(for_mod, unit)])
+
+
+def _add_ts_resources_to_test_record(unit, test_record):
+ erm_json = _create_erm_json(unit)
+ for tool in erm_json.list_npm_packages():
+ tool_resource_label = "{}-ROOT-VAR-NAME".format(tool.upper())
+ tool_resource_value = unit.get(tool_resource_label)
+ if tool_resource_value:
+ test_record[tool_resource_label] = tool_resource_value
+ return test_record
+
+
+@_with_report_configure_error
+def on_ts_files(unit, *files):
+ new_cmds = ['$COPY_CMD ${{input;context=TEXT:"{0}"}} ${{output;noauto:"{0}"}}'.format(f) for f in files]
+ all_cmds = unit.get("_TS_FILES_COPY_CMD")
+ if all_cmds:
+ new_cmds.insert(0, all_cmds)
+ unit.set(["_TS_FILES_COPY_CMD", " && ".join(new_cmds)])
+
+
+@_with_report_configure_error
+def on_depends_on_mod(unit):
+ for_mod_path = unit.get("TS_TEST_FOR_PATH")
+ unit.ondepends([for_mod_path])
diff --git a/build/plugins/ya.make b/build/plugins/ya.make
index 117d1918c6..f886e8114e 100644
--- a/build/plugins/ya.make
+++ b/build/plugins/ya.make
@@ -26,6 +26,7 @@ PY_SRCS(
lj_archive.py
llvm_bc.py
macros_with_error.py
+ nots.py
pybuild.py
res.py
suppressions.py
@@ -43,6 +44,7 @@ END()
RECURSE(
tests
lib
+ lib/nots
lib/proxy
lib/test_const
lib/test_const/proxy
diff --git a/build/ymake.core.conf b/build/ymake.core.conf
index 1446ee131e..99e78a8116 100644
--- a/build/ymake.core.conf
+++ b/build/ymake.core.conf
@@ -64,6 +64,7 @@ when ($LOCAL && $XCODE) {
@import "${CONF_ROOT}/conf/swig.conf"
@import "${CONF_ROOT}/conf/proto.conf"
@import "${CONF_ROOT}/conf/fbs.conf"
+@import "${CONF_ROOT}/conf/ts/ts.conf"
@import "${CONF_ROOT}/conf/project_specific/other.conf"
@import "${CONF_ROOT}/conf/project_specific/yt.conf"