aboutsummaryrefslogtreecommitdiffstats
path: root/build
diff options
context:
space:
mode:
authorilnurkh <ilnurkh@yandex-team.com>2023-03-24 21:20:04 +0300
committerilnurkh <ilnurkh@yandex-team.com>2023-03-24 21:20:04 +0300
commit4ec807d707bcf5633133b072cb006494dc0b3211 (patch)
tree6128eb759eed9b8b025ca717f6712abe32136475 /build
parent75afa64ae6543777e6f3b3fe4122490811f027e6 (diff)
downloadydb-4ec807d707bcf5633133b072cb006494dc0b3211.tar.gz
Revert commit rXXXXXX
Diffstat (limited to 'build')
-rwxr-xr-xbuild/scripts/fetch_from.py3
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py9
2 files changed, 3 insertions, 9 deletions
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
index a73462e2d4..78306d1794 100755
--- a/build/scripts/fetch_from.py
+++ b/build/scripts/fetch_from.py
@@ -238,8 +238,7 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=
with tarfile.open(tmp_file_name, mode="r|gz") as tar:
tar.extractall(tmp_dir)
tmp_file_name = os.path.join(tmp_dir, resource_file_name)
- if expected_md5:
- real_md5 = md5file(tmp_file_name)
+ real_md5 = md5file(tmp_file_name)
logging.info('File size %s (expected %s)', real_file_size, expected_file_size or "UNKNOWN")
logging.info('File md5 %s (expected %s)', real_md5, expected_md5)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
index 7a556be7da..a99542e174 100755
--- a/build/scripts/fetch_from_sandbox.py
+++ b/build/scripts/fetch_from_sandbox.py
@@ -146,13 +146,10 @@ def fetch(resource_id, custom_fetcher):
logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info))
- is_multifile = resource_info.get('multifile', False)
resource_file_name = os.path.basename(resource_info["file_name"])
expected_md5 = resource_info.get('md5')
proxy_link = resource_info['http']['proxy'] + ORIGIN_SUFFIX
- if is_multifile:
- proxy_link += '&stream=tgz'
mds_id = resource_info.get('attributes', {}).get('mds')
mds_link = MDS_PREFIX + mds_id if mds_id else None
@@ -175,14 +172,14 @@ def fetch(resource_id, custom_fetcher):
yield lambda: fetch_via_script(custom_fetcher, resource_id)
# Don't try too hard here: we will get back to proxy later on
- yield lambda: fetch_from.fetch_url(proxy_link, is_multifile, resource_file_name, expected_md5, tries=2)
+ yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5, tries=2)
for x in get_storage_links():
# Don't spend too much time connecting single host
yield lambda: fetch_from.fetch_url(x, False, resource_file_name, expected_md5, tries=1)
if mds_link is not None:
# Don't try too hard here: we will get back to MDS later on
yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5, tries=2)
- yield lambda: fetch_from.fetch_url(proxy_link, is_multifile, resource_file_name, expected_md5)
+ yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5)
if mds_link is not None:
yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5)
@@ -250,8 +247,6 @@ def main(args):
if resource_info:
fetched_file = args.resource_file
file_name = resource_info['file_name']
- if resource_info['multifile'] and os.path.isfile(fetched_file):
- args.rename = [os.path.join(file_name, path) for path in args.rename]
else:
# This code should be merged to ya and removed.
fetched_file, file_name = fetch(args.resource_id, custom_fetcher)