aboutsummaryrefslogtreecommitdiffstats
path: root/build/scripts/fetch_from.py
diff options
context:
space:
mode:
authorv-korovin <v-korovin@yandex-team.com>2024-09-26 10:35:27 +0300
committerv-korovin <v-korovin@yandex-team.com>2024-09-26 10:44:46 +0300
commitc9843510b39357d5510030691beecfab56e3bd17 (patch)
tree990009b83044e936303dec6b770033b46f59c6fc /build/scripts/fetch_from.py
parent1339bbde1687911362cf2a244869e30003508986 (diff)
downloadydb-c9843510b39357d5510030691beecfab56e3bd17.tar.gz
futurize build/scripts
commit_hash:41fb885eb1e03094e65521671349e66f4225321b
Diffstat (limited to 'build/scripts/fetch_from.py')
-rwxr-xr-xbuild/scripts/fetch_from.py36
1 files changed, 23 insertions, 13 deletions
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
index ca3e7b3684..bd27d75d8e 100755
--- a/build/scripts/fetch_from.py
+++ b/build/scripts/fetch_from.py
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import datetime as dt
import errno
import hashlib
@@ -11,7 +13,15 @@ import socket
import string
import sys
import tarfile
-import urllib2
+
+try:
+ # Python 2
+ import urllib2 as urllib_request
+ from urllib2 import HTTPError, URLError
+except (ImportError, ModuleNotFoundError):
+ # Python 3
+ import urllib.request as urllib_request
+ from urllib.error import HTTPError, URLError
import retry
@@ -122,12 +132,12 @@ def setup_logging(args, base_name):
def is_temporary(e):
def is_broken(e):
- return isinstance(e, urllib2.HTTPError) and e.code in (410, 404)
+ return isinstance(e, HTTPError) and e.code in (410, 404)
if is_broken(e):
return False
- if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
+ if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, URLError, socket.error)):
return True
import error
@@ -147,7 +157,7 @@ def report_to_snowden(value):
'value': json.dumps(value),
}
- urllib2.urlopen(
+ urllib_request.urlopen(
'https://back-snowden.qloud.yandex-team.ru/report/add',
json.dumps(
[
@@ -198,8 +208,8 @@ def git_like_hash_with_size(filepath):
file_size += len(block)
sha.update(block)
- sha.update('\0')
- sha.update(str(file_size))
+ sha.update(b'\0')
+ sha.update(str(file_size).encode('utf-8'))
return sha.hexdigest(), file_size
@@ -213,9 +223,9 @@ def size_printer(display_name, size):
now = dt.datetime.now()
if last_stamp[0] + dt.timedelta(seconds=10) < now:
if size:
- print >> sys.stderr, "##status##{} - [[imp]]{:.1f}%[[rst]]".format(
+ print("##status##{} - [[imp]]{:.1f}%[[rst]]".format(
display_name, 100.0 * sz[0] / size if size else 0
- )
+ ), file=sys.stderr)
last_stamp[0] = now
return printer
@@ -225,9 +235,9 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=
logging.info('Downloading from url %s name %s and expected md5 %s', url, resource_file_name, expected_md5)
tmp_file_name = uniq_string_generator()
- request = urllib2.Request(url, headers={'User-Agent': make_user_agent()})
- req = retry.retry_func(lambda: urllib2.urlopen(request, timeout=30), tries=tries, delay=5, backoff=1.57079)
- logging.debug('Headers: %s', req.headers.headers)
+ request = urllib_request.Request(url, headers={'User-Agent': make_user_agent()})
+ req = retry.retry_func(lambda: urllib_request.urlopen(request, timeout=30), tries=tries, delay=5, backoff=1.57079)
+ logging.debug('Headers: %s', req.headers)
expected_file_size = int(req.headers.get('Content-Length', 0))
real_md5 = hashlib.md5()
real_sha1 = hashlib.sha1()
@@ -244,8 +254,8 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=
real_md5 = real_md5.hexdigest()
real_file_size = os.path.getsize(tmp_file_name)
- real_sha1.update('\0')
- real_sha1.update(str(real_file_size))
+ real_sha1.update(b'\0')
+ real_sha1.update(str(real_file_size).encode('utf-8'))
real_sha1 = real_sha1.hexdigest()
if unpack: