aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorspreis <spreis@yandex-team.com>2023-10-25 10:17:00 +0300
committerspreis <spreis@yandex-team.com>2023-10-25 10:36:47 +0300
commitb82ee322906e7eec3dc912025458347b871d4ab0 (patch)
treeb35e2c4dc97848fcdeb30581aa08e33d0a36c8af
parenta13962045e5307dc6ab38d061e75b1f518a8ae29 (diff)
downloadydb-b82ee322906e7eec3dc912025458347b871d4ab0.tar.gz
Dismantle FROM_EXTERNAL, process .external file in plugin
-rw-r--r--build/plugins/large_files.py30
-rw-r--r--build/plugins/ytest.py3
-rw-r--r--build/scripts/fetch_from_external.py60
-rw-r--r--build/ymake.core.conf18
4 files changed, 32 insertions, 79 deletions
diff --git a/build/plugins/large_files.py b/build/plugins/large_files.py
index 568e294253..308dc82075 100644
--- a/build/plugins/large_files.py
+++ b/build/plugins/large_files.py
@@ -1,6 +1,7 @@
+import json
import os
import ymake
-from _common import strip_roots
+from _common import strip_roots, resolve_common_const
PLACEHOLDER_EXT = "external"
@@ -31,10 +32,27 @@ def onlarge_files(unit, *args):
unit.message(["warn", msg])
unit.oncopy_file([arg, arg])
else:
- out_file = strip_roots(os.path.join(unit.path(), arg))
external = "{}.{}".format(arg, PLACEHOLDER_EXT)
- from_external_cmd = [external, out_file, 'OUT_NOAUTO', arg]
+ rel_placeholder = resolve_common_const(unit.resolve_arc_path(external))
+ if not rel_placeholder.startswith("$S"):
+ ymake.report_configure_error('LARGE_FILES: neither actual data nor placeholder is found for "{}"'.format(arg))
+ return
+ try:
+ abs_placeholder = unit.resolve(rel_placeholder)
+ with open(abs_placeholder, "r") as f:
+ res_desc = json.load(f)
+ storage = res_desc["storage"]
+ res_id = res_desc["resource_id"]
+ except e:
+ ymake.report_configure_error('LARGE_FILES: error processing placeholder file "{}.": {}'.format(external, e))
+ return
+
+ from_cmd = ['FILE', '{}'.format(res_id), 'OUT_NOAUTO', arg, 'EXTERNAL_FILE', external]
if os.path.dirname(arg):
- from_external_cmd.extend(("RENAME", os.path.basename(arg)))
- unit.on_from_external(from_external_cmd)
- unit.onadd_check(['check.external', external])
+ from_cmd.extend(("RENAME", os.path.basename(arg)))
+
+ method = getattr(unit, 'onfrom_{}'.format(storage.lower()), None)
+ if method:
+ method(from_cmd)
+ else:
+ ymake.report_configure_error('LARGE_FILES: error processing placeholder file "{}.": unknown storage kind "{}"'.format(external, storage))
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index 506466f355..9785684a49 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -624,6 +624,9 @@ def onadd_check(unit, *args):
if check_type in ("check.data", "check.resource") and unit.get('VALIDATE_DATA') == "no":
return
+ if check_type == "check.external" and (len(flat_args) == 1 or not flat_args[1]):
+ return
+
test_dir = _common.get_norm_unit_path(unit)
test_timeout = ''
diff --git a/build/scripts/fetch_from_external.py b/build/scripts/fetch_from_external.py
deleted file mode 100644
index d4ed6f4221..0000000000
--- a/build/scripts/fetch_from_external.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import sys
-import json
-import os.path
-import fetch_from
-import argparse
-import logging
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- fetch_from.add_common_arguments(parser)
- parser.add_argument('--external-file', required=True)
- parser.add_argument('--custom-fetcher')
- parser.add_argument('--resource-file')
- return parser.parse_args()
-
-
-def main(args):
- external_file = args.external_file.rstrip('.external')
- if os.path.isfile(args.resource_file):
- fetch_from.process(args.resource_file, os.path.basename(args.resource_file), args, False)
- return
-
- error = None
- try:
- with open(args.external_file) as f:
- js = json.load(f)
-
- if js['storage'] == 'SANDBOX':
- import fetch_from_sandbox as ffsb
- del args.external_file
- args.resource_id = js['resource_id']
- ffsb.main(args)
- elif js['storage'] == 'MDS':
- import fetch_from_mds as fmds
- del args.external_file
- args.key = js['resource_id']
- fmds.main(args)
- else:
- error = 'Unsupported storage in {}'.format(external_file)
- except:
- logging.error('Invalid external file: {}'.format(external_file))
- raise
- if error:
- raise Exception(error)
-
-
-if __name__ == '__main__':
- args = parse_args()
- fetch_from.setup_logging(args, os.path.basename(__file__))
-
- try:
- main(args)
- except Exception as e:
- logging.exception(e)
- print >>sys.stderr, open(args.abs_log_path).read()
- sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/ymake.core.conf b/build/ymake.core.conf
index 51bcaac370..c031c56db5 100644
--- a/build/ymake.core.conf
+++ b/build/ymake.core.conf
@@ -4513,28 +4513,19 @@ macro _RUN_JAVA(IN{input}[], IN_NOPARSE{input}[], OUT{output}[], OUT_NOAUTO{outp
### If AUTOUPDATED is specified than macro will be regularly updated according to autoupdate script. The dedicated Sandbox task scans the arcadia and
### changes resource_ids in such macros if newer resource of specified type is available. Note that the task seeks AUTOUPDATED in specific position,
### so you shall place it immediately after resource_id.
-macro FROM_SANDBOX(Id, OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], FILE?"--copy-to-dir":"--untar-to", AUTOUPDATED="", PREFIX=".", RENAME[], EXECUTABLE?"--executable":"", SBR="sbr:") {
+macro FROM_SANDBOX(Id, OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], FILE?"--copy-to-dir":"--untar-to", AUTOUPDATED="", PREFIX=".", RENAME[], EXECUTABLE?"--executable":"", SBR="sbr:", EXTERNAL_FILE="") {
.CMD=${hide:SANDBOX_FAKEID} ${cwd:BINDIR} ${resource;pre=$SBR:Id} $YMAKE_PYTHON ${input:"build/scripts/fetch_from_sandbox.py"} --resource-file $(RESOURCE_ROOT)/sbr/$Id/resource --resource-id $Id $FILE $PREFIX ${pre=--rename :RENAME} $EXECUTABLE -- $OUT $OUT_NOAUTO ${input;hide:"build/scripts/fetch_from.py"} ${output_include;hide:OUTPUT_INCLUDES} $INDUCED_DEPS ${output;hide:OUT} ${output;noauto;hide:OUT_NOAUTO} ${requirements;hide:"network:full"} ${kv;hide:"p SB"} ${kv;hide:"pc yellow"} ${kv;hide:"show_out"}
ADD_CHECK(check.resource $Id)
+ ADD_CHECK(check.external $EXTERNAL_FILE)
}
### @usage: FROM_MDS([FILE] key [RENAME <resource files>] OUT_[NOAUTO] <output files> [EXECUTABLE] [OUTPUT_INCLUDES <include files>] [INDUCED_DEPS $VARs...])
###
### Download resource from MDS with the specified key and process like [FROM_SANDBOX()](#macro_FROM_SANDBOX).
-macro FROM_MDS(Key, OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], FILE?"--copy-to-dir":"--untar-to", PREFIX=".", RENAME[], EXECUTABLE?"--executable":"") {
+macro FROM_MDS(Key, OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], FILE?"--copy-to-dir":"--untar-to", PREFIX=".", RENAME[], EXECUTABLE?"--executable":"", EXTERNAL_FILE="") {
.CMD=${cwd:BINDIR} $YMAKE_PYTHON ${input:"build/scripts/fetch_from_mds.py"} --key $Key $FILE $PREFIX ${pre=--rename :RENAME} $EXECUTABLE -- $OUT $OUT_NOAUTO ${input;hide:"build/scripts/fetch_from.py"} ${output_include;hide:OUTPUT_INCLUDES} $INDUCED_DEPS ${output;hide:OUT} ${output;noauto;hide:OUT_NOAUTO} ${requirements;hide:"network:full"} ${kv;hide:"p MD"} ${kv;hide:"pc yellow"} ${kv;hide:"show_out"}
ADD_CHECK(check.mds $Key)
-}
-
-# tag:internal
-### @usage: _FROM_EXTERNAL(ExtFile [AUTOUPDATED script] [RENAME <resource files>] OUT_[NOAUTO] <output files> [EXECUTABLE]) #internal
-###
-### Use resource described as .external file as [FROM_SANDBOX()](#macro_FROM_SANDBOX)/[FROM_MDS()](#macro_FROM_MDS).
-macro _FROM_EXTERNAL(File, OutFile, OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], AUTOUPDATED="", PREFIX=".", RENAME[], EXECUTABLE?"--executable":"", EXT="ext:") {
- .CMD=${hide:SANDBOX_FAKEID} ${cwd:BINDIR} ${resource;pre=$EXT;suf=.external:OutFile} $YMAKE_PYTHON ${input:"build/scripts/fetch_from_external.py"} --external-file ${input:File} --resource-file $(RESOURCE_ROOT)/ext/$OutFile --copy-to-dir $PREFIX ${pre=--rename :RENAME} $EXECUTABLE -- $OUT $OUT_NOAUTO ${input;hide:"build/scripts/fetch_from.py"} ${input;hide:"build/scripts/fetch_from_sandbox.py"} ${input;hide:"build/scripts/fetch_from_mds.py"} ${output_include;hide:OUTPUT_INCLUDES} $INDUCED_DEPS ${output;hide:OUT} ${output;noauto;hide:OUT_NOAUTO} ${requirements;hide:"network:full"} ${kv;hide:"p XT"} ${kv;hide:"pc yellow"} ${kv;hide:"show_out"}
-
-#FIXME: add '${resource;pre=$EXT:OutFile}' when support of the scheme is added to executors
-#FIXME: add 'ADD_CHECK(check.external $File)' when proper testing is implemented
+ ADD_CHECK(check.external $EXTERNAL_FILE)
}
### @usage LARGE_FILES([AUTOUPDATED] Files...)
@@ -4545,6 +4536,7 @@ macro _FROM_EXTERNAL(File, OutFile, OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_
macro LARGE_FILES(AUTOUPDATED?, Files...) {
# This is needed to correctly switch between remote and local modes
_GLOB($LF $Files)
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${suf=.external:Files})
}
### @usage: FROM_ARCHIVE(Src [RENAME <resource files>] OUT_[NOAUTO] <output files> [EXECUTABLE] [OUTPUT_INCLUDES <include files>] [INDUCED_DEPS $VARs...])