aboutsummaryrefslogtreecommitdiffstats
path: root/build
diff options
context:
space:
mode:
authordskor <dskor@yandex-team.com>2023-03-24 11:39:32 +0300
committerdskor <dskor@yandex-team.com>2023-03-24 11:39:32 +0300
commitdcf085f3d299207e41f1b9d99d8841d70e511129 (patch)
treed5068d24333e2d8aef0124c80707b16d43396f5c /build
parent247cb5398c9261bc86473933a9b399624b7f9fbc (diff)
downloadydb-dcf085f3d299207e41f1b9d99d8841d70e511129.tar.gz
Make local directory resources same to distbuild
better
Diffstat (limited to 'build')
-rwxr-xr-xbuild/scripts/fetch_from.py3
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py9
2 files changed, 9 insertions, 3 deletions
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
index 78306d1794..a73462e2d4 100755
--- a/build/scripts/fetch_from.py
+++ b/build/scripts/fetch_from.py
@@ -238,7 +238,8 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=
with tarfile.open(tmp_file_name, mode="r|gz") as tar:
tar.extractall(tmp_dir)
tmp_file_name = os.path.join(tmp_dir, resource_file_name)
- real_md5 = md5file(tmp_file_name)
+ if expected_md5:
+ real_md5 = md5file(tmp_file_name)
logging.info('File size %s (expected %s)', real_file_size, expected_file_size or "UNKNOWN")
logging.info('File md5 %s (expected %s)', real_md5, expected_md5)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
index a99542e174..7a556be7da 100755
--- a/build/scripts/fetch_from_sandbox.py
+++ b/build/scripts/fetch_from_sandbox.py
@@ -146,10 +146,13 @@ def fetch(resource_id, custom_fetcher):
logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info))
+ is_multifile = resource_info.get('multifile', False)
resource_file_name = os.path.basename(resource_info["file_name"])
expected_md5 = resource_info.get('md5')
proxy_link = resource_info['http']['proxy'] + ORIGIN_SUFFIX
+ if is_multifile:
+ proxy_link += '&stream=tgz'
mds_id = resource_info.get('attributes', {}).get('mds')
mds_link = MDS_PREFIX + mds_id if mds_id else None
@@ -172,14 +175,14 @@ def fetch(resource_id, custom_fetcher):
yield lambda: fetch_via_script(custom_fetcher, resource_id)
# Don't try too hard here: we will get back to proxy later on
- yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5, tries=2)
+ yield lambda: fetch_from.fetch_url(proxy_link, is_multifile, resource_file_name, expected_md5, tries=2)
for x in get_storage_links():
# Don't spend too much time connecting single host
yield lambda: fetch_from.fetch_url(x, False, resource_file_name, expected_md5, tries=1)
if mds_link is not None:
# Don't try too hard here: we will get back to MDS later on
yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5, tries=2)
- yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5)
+ yield lambda: fetch_from.fetch_url(proxy_link, is_multifile, resource_file_name, expected_md5)
if mds_link is not None:
yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5)
@@ -247,6 +250,8 @@ def main(args):
if resource_info:
fetched_file = args.resource_file
file_name = resource_info['file_name']
+ if resource_info['multifile'] and os.path.isfile(fetched_file):
+ args.rename = [os.path.join(file_name, path) for path in args.rename]
else:
# This code should be merged to ya and removed.
fetched_file, file_name = fetch(args.resource_id, custom_fetcher)