diff options
author | Aleksandr <ivansduck@gmail.com> | 2022-02-10 16:47:52 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:47:52 +0300 |
commit | b05913d1c3c02a773578bceb7285084d2933ae86 (patch) | |
tree | c0748b5dcbade83af788c0abfa89c0383d6b779c /build/scripts | |
parent | ea6c5b7f172becca389cacaff7d5f45f6adccbe6 (diff) | |
download | ydb-b05913d1c3c02a773578bceb7285084d2933ae86.tar.gz |
Restoring authorship annotation for Aleksandr <ivansduck@gmail.com>. Commit 2 of 2.
Diffstat (limited to 'build/scripts')
24 files changed, 504 insertions, 504 deletions
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py index d7fd7b30fd..6b5d53bc71 100644 --- a/build/scripts/append_file.py +++ b/build/scripts/append_file.py @@ -5,5 +5,5 @@ if __name__ == "__main__": file_path = sys.argv[1] with open(file_path, "a") as f: - for text in sys.argv[2:]: - print >>f, text + for text in sys.argv[2:]: + print >>f, text diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py index cc8f88e70c..eb1b690ee9 100644 --- a/build/scripts/clang_tidy.py +++ b/build/scripts/clang_tidy.py @@ -1,11 +1,11 @@ import argparse -import contextlib +import contextlib import json -import os -import re -import shutil +import os +import re +import shutil import sys -import tempfile +import tempfile import subprocess @@ -20,12 +20,12 @@ def setup_script(args): def parse_args(): parser = argparse.ArgumentParser() - parser.add_argument("--testing-src", required=True) - parser.add_argument("--clang-tidy-bin", required=True) + parser.add_argument("--testing-src", required=True) + parser.add_argument("--clang-tidy-bin", required=True) parser.add_argument("--config-validation-script", required=True) parser.add_argument("--ymake-python", required=True) - parser.add_argument("--tidy-json", required=True) - parser.add_argument("--source-root", required=True) + parser.add_argument("--tidy-json", required=True) + parser.add_argument("--source-root", required=True) parser.add_argument("--build-root", required=True) parser.add_argument("--default-config-file", required=True) parser.add_argument("--project-config-file", required=True) @@ -35,27 +35,27 @@ def parse_args(): return parser.parse_known_args() -def generate_compilation_database(clang_cmd, source_root, filename, path): - compile_database = [ - { - "file": filename, +def generate_compilation_database(clang_cmd, source_root, filename, path): + compile_database = [ + { + "file": filename, "command": subprocess.list2cmdline(clang_cmd), - "directory": source_root, - } - ] - compilation_database_json = os.path.join(path, "compile_commands.json") - with open(compilation_database_json, "w") as afile: + "directory": source_root, + } + ] + compilation_database_json = os.path.join(path, "compile_commands.json") + with open(compilation_database_json, "w") as afile: json.dump(compile_database, afile) return compilation_database_json -@contextlib.contextmanager -def gen_tmpdir(): - path = tempfile.mkdtemp() - yield path - shutil.rmtree(path) - - +@contextlib.contextmanager +def gen_tmpdir(): + path = tempfile.mkdtemp() + yield path + shutil.rmtree(path) + + @contextlib.contextmanager def gen_tmpfile(): _, path = tempfile.mkstemp() @@ -63,21 +63,21 @@ def gen_tmpfile(): os.remove(path) -def load_profile(path): - if os.path.exists(path): - files = os.listdir(path) - if len(files) == 1: - with open(os.path.join(path, files[0])) as afile: - return json.load(afile)["profile"] - elif len(files) > 1: - return { - "error": "found several profile files: {}".format(files), - } - return { - "error": "profile file is missing", - } - - +def load_profile(path): + if os.path.exists(path): + files = os.listdir(path) + if len(files) == 1: + with open(os.path.join(path, files[0])) as afile: + return json.load(afile)["profile"] + elif len(files) > 1: + return { + "error": "found several profile files: {}".format(files), + } + return { + "error": "profile file is missing", + } + + def load_fixes(path): if os.path.exists(path): with open(path, 'r') as afile: @@ -125,46 +125,46 @@ def main(): filter_configs(args.project_config_file, filtered_config) result_config_file = tidy_config_validation.merge_tidy_configs(base_config_path=args.default_config_file, additional_config_path=filtered_config, result_config_path=result_config) compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir) - cmd = [ - clang_tidy_bin, - args.testing_src, - "-p", - compile_command_path, - "--warnings-as-errors", - "*", - "--config-file", + cmd = [ + clang_tidy_bin, + args.testing_src, + "-p", + compile_command_path, + "--warnings-as-errors", + "*", + "--config-file", result_config_file, - "--header-filter", - header_filter, - "--use-color", - "--enable-check-profile", + "--header-filter", + header_filter, + "--use-color", + "--enable-check-profile", "--store-check-profile={}".format(profile_tmpdir), - ] + ] if args.export_fixes == "yes": cmd += ["--export-fixes", fixes_file] if args.checks: cmd += ["--checks", args.checks] - res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = res.communicate() - exit_code = res.returncode + res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = res.communicate() + exit_code = res.returncode profile = load_profile(profile_tmpdir) testing_src = os.path.relpath(args.testing_src, args.source_root) tidy_fixes = load_fixes(fixes_file) - - with open(output_json, "wb") as afile: - json.dump( - { + + with open(output_json, "wb") as afile: + json.dump( + { "file": testing_src, - "exit_code": exit_code, - "profile": profile, - "stderr": err, - "stdout": out, + "exit_code": exit_code, + "profile": profile, + "stderr": err, + "stdout": out, "fixes": tidy_fixes, - }, - afile, - ) - + }, + afile, + ) + if __name__ == "__main__": main() diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py index bf85ae053c..c0bec50b2a 100644 --- a/build/scripts/compile_cuda.py +++ b/build/scripts/compile_cuda.py @@ -45,27 +45,27 @@ def main(): cflags.append('-fopenmp') cflags.remove('-fopenmp=libomp') - skip_list = [ - '-gline-tables-only', - # clang coverage - '-fprofile-instr-generate', - '-fcoverage-mapping', + skip_list = [ + '-gline-tables-only', + # clang coverage + '-fprofile-instr-generate', + '-fcoverage-mapping', '/Zc:inline', # disable unreferenced functions (kernel registrators) remove '-Wno-c++17-extensions', '-flto', '-faligned-allocation', - ] - + ] + if skip_nocxxinc: skip_list.append('-nostdinc++') - for flag in skip_list: + for flag in skip_list: if flag in cflags: cflags.remove(flag) skip_prefix_list = [ '-fsanitize=', - '-fsanitize-coverage=', + '-fsanitize-coverage=', '-fsanitize-blacklist=', '--system-header-prefix', ] diff --git a/build/scripts/configure_file.py b/build/scripts/configure_file.py index 6d434c3e8c..1873ed70eb 100755 --- a/build/scripts/configure_file.py +++ b/build/scripts/configure_file.py @@ -53,7 +53,7 @@ if __name__ == "__main__": usage() varDict = {} for x in sys.argv[3:]: - key, value = str(x).split('=', 1) + key, value = str(x).split('=', 1) varDict[key] = value main(sys.argv[1], sys.argv[2], varDict) diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py index 53f3207bb7..9baeb5ffac 100644 --- a/build/scripts/copy_to_dir.py +++ b/build/scripts/copy_to_dir.py @@ -34,7 +34,7 @@ def hardlink_or_copy(src, dst): if e.errno == errno.EEXIST: return elif e.errno == errno.EXDEV: - sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst)) + sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst)) shutil.copy(src, dst) else: raise diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py index d3bf13c4e7..94491d9256 100644 --- a/build/scripts/coverage-info.py +++ b/build/scripts/coverage-info.py @@ -149,7 +149,7 @@ def gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args): lcov_args.append(cov_info) -def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files): +def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files): with tarfile.open(gcno_archive) as gcno_tf: for gcno_item in gcno_tf: if gcno_item.isfile() and gcno_item.name.endswith(GCNO_EXT): @@ -157,13 +157,13 @@ def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_execut gcno_name = gcno_item.name source_fname = gcno_name[:-len(GCNO_EXT)] - if prefix_filter and not source_fname.startswith(prefix_filter): - sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter)) - continue - if exclude_files and exclude_files.search(source_fname): - sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern)) - continue - + if prefix_filter and not source_fname.startswith(prefix_filter): + sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter)) + continue + if exclude_files and exclude_files.search(source_fname): + sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern)) + continue + fname2gcno[source_fname] = gcno_name if os.path.getsize(gcno_name) > 0: @@ -234,7 +234,7 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt def gen_info(cmd, cov_info): gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args) - init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files) + init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files) process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info) if coverage_report_path: diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py index cb7918ff04..45083ff4f7 100644 --- a/build/scripts/create_jcoverage_report.py +++ b/build/scripts/create_jcoverage_report.py @@ -3,7 +3,7 @@ import tarfile import zipfile import os import sys -import time +import time import subprocess @@ -14,23 +14,23 @@ def mkdir_p(path): pass -class Timer(object): - - def __init__(self): - self.start = time.time() - - def step(self, msg): - sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start))) - self.start = time.time() - - +class Timer(object): + + def __init__(self): + self.start = time.time() + + def step(self, msg): + sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start))) + self.start = time.time() + + def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths): - timer = Timer() + timer = Timer() reports_dir = 'jacoco_reports_dir' mkdir_p(reports_dir) with tarfile.open(source) as tf: tf.extractall(reports_dir) - timer.step("Coverage data extracted") + timer.step("Coverage data extracted") reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)] with open(jars_list) as f: @@ -52,10 +52,10 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_ if jar.endswith('devtools-jacoco-agent.jar'): agent_disposition = jar - # Skip java contrib - it's irrelevant coverage - if jar.startswith('contrib/java'): - continue - + # Skip java contrib - it's irrelevant coverage + if jar.startswith('contrib/java'): + continue + with zipfile.ZipFile(jar) as jf: for entry in jf.infolist(): if entry.filename.endswith('.java'): @@ -67,35 +67,35 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_ else: continue - entry.filename = entry.filename.encode('utf-8') + entry.filename = entry.filename.encode('utf-8') jf.extract(entry, dest) - timer.step("Jar files extracted") + timer.step("Jar files extracted") if not agent_disposition: print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.' - if tar_output: - report_dir = 'java.report.temp' - else: - report_dir = output + if tar_output: + report_dir = 'java.report.temp' + else: + report_dir = output mkdir_p(report_dir) if agent_disposition: - agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format] + agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format] agent_cmd += reports subprocess.check_call(agent_cmd) - timer.step("Jacoco finished") + timer.step("Jacoco finished") - if tar_output: - with tarfile.open(output, 'w') as outf: - outf.add(report_dir, arcname='.') + if tar_output: + with tarfile.open(output, 'w') as outf: + outf.add(report_dir, arcname='.') if __name__ == '__main__': - if 'LC_ALL' in os.environ: - if os.environ['LC_ALL'] == 'C': - os.environ['LC_ALL'] = 'en_GB.UTF-8' - + if 'LC_ALL' in os.environ: + if os.environ['LC_ALL'] == 'C': + os.environ['LC_ALL'] = 'en_GB.UTF-8' + parser = argparse.ArgumentParser() parser.add_argument('--source', action='store') @@ -104,9 +104,9 @@ if __name__ == '__main__': parser.add_argument('--prefix-filter', action='store') parser.add_argument('--exclude-filter', action='store') parser.add_argument('--jars-list', action='store') - parser.add_argument('--output-format', action='store', default="html") - parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True) - parser.add_argument('--agent-disposition', action='store') + parser.add_argument('--output-format', action='store', default="html") + parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True) + parser.add_argument('--agent-disposition', action='store') parser.add_argument('--runner-path', dest='runners_paths', action='append', default=[]) args = parser.parse_args() main(**vars(args)) diff --git a/build/scripts/error.py b/build/scripts/error.py index 6e4256e5c2..f7d8ecb2cc 100644 --- a/build/scripts/error.py +++ b/build/scripts/error.py @@ -1,19 +1,19 @@ -# Sync content of this file with devtools/ya/core/error/__init__.py - +# Sync content of this file with devtools/ya/core/error/__init__.py + TEMPORARY_ERROR_MESSAGES = [ - 'Connection reset by peer', - 'Connection timed out', - 'Function not implemented', - 'I/O operation on closed file', - 'Internal Server Error', - 'Network connection closed unexpectedly', + 'Connection reset by peer', + 'Connection timed out', + 'Function not implemented', + 'I/O operation on closed file', + 'Internal Server Error', + 'Network connection closed unexpectedly', 'Network is unreachable', 'No route to host', - 'No space left on device', - 'Not enough space', - 'Temporary failure in name resolution', + 'No space left on device', + 'Not enough space', + 'Temporary failure in name resolution', 'The read operation timed out', - 'timeout: timed out', + 'timeout: timed out', ] @@ -23,55 +23,55 @@ class ExitCodes(object): COMPILATION_FAILED = 11 INFRASTRUCTURE_ERROR = 12 NOT_RETRIABLE_ERROR = 13 - YT_STORE_FETCH_ERROR = 14 + YT_STORE_FETCH_ERROR = 14 def merge_exit_codes(exit_codes): - return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0 + return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0 def is_temporary_error(exc): - import logging - logger = logging.getLogger(__name__) - + import logging + logger = logging.getLogger(__name__) + if getattr(exc, 'temporary', False): - logger.debug("Exception has temporary attribute: %s", exc) + logger.debug("Exception has temporary attribute: %s", exc) return True import errno err = getattr(exc, 'errno', None) if err == errno.ECONNREFUSED or err == errno.ENETUNREACH: - logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err) + logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err) return True import socket if isinstance(exc, socket.timeout) or isinstance(getattr(exc, 'reason', None), socket.timeout): - logger.debug("Socket timeout exception: %s", exc) + logger.debug("Socket timeout exception: %s", exc) return True if isinstance(exc, socket.gaierror): - logger.debug("Getaddrinfo exception: %s", exc) + logger.debug("Getaddrinfo exception: %s", exc) + return True + + import urllib2 + + if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ): + logger.debug("urllib2.HTTPError: %s", exc) return True - import urllib2 - - if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ): - logger.debug("urllib2.HTTPError: %s", exc) - return True - import httplib if isinstance(exc, httplib.IncompleteRead): - logger.debug("IncompleteRead exception: %s", exc) + logger.debug("IncompleteRead exception: %s", exc) return True exc_str = str(exc) for message in TEMPORARY_ERROR_MESSAGES: if message in exc_str: - logger.debug("Found temporary error pattern (%s): %s", message, exc_str) + logger.debug("Found temporary error pattern (%s): %s", message, exc_str) return True return False diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py index bbca65219f..db4fea50bf 100755 --- a/build/scripts/fetch_from.py +++ b/build/scripts/fetch_from.py @@ -1,19 +1,19 @@ -import datetime as dt -import errno +import datetime as dt +import errno import hashlib -import json -import logging -import os +import json +import logging +import os import platform import random -import shutil -import socket +import shutil +import socket import string import sys -import tarfile -import urllib2 +import tarfile +import urllib2 -import retry +import retry def make_user_agent(): @@ -29,7 +29,7 @@ def add_common_arguments(parser): parser.add_argument('--executable', action='store_true', help='make outputs executable') parser.add_argument('--log-path') parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity') - parser.add_argument('outputs', nargs='*', default=[]) + parser.add_argument('outputs', nargs='*', default=[]) def ensure_dir(path): @@ -37,7 +37,7 @@ def ensure_dir(path): os.makedirs(path) -# Reference code: library/python/fs/__init__.py +# Reference code: library/python/fs/__init__.py def hardlink_or_copy(src, dst): ensure_dir(os.path.dirname(dst)) @@ -49,23 +49,23 @@ def hardlink_or_copy(src, dst): except OSError as e: if e.errno == errno.EEXIST: return - elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES): - sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst)) + elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES): + sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst)) shutil.copy(src, dst) else: raise -def rename_or_copy_and_remove(src, dst): +def rename_or_copy_and_remove(src, dst): ensure_dir(os.path.dirname(dst)) - - try: - os.rename(src, dst) - except OSError: + + try: + os.rename(src, dst) + except OSError: shutil.copy(src, dst) - os.remove(src) - - + os.remove(src) + + class BadChecksumFetchError(Exception): pass @@ -114,17 +114,17 @@ def is_temporary(e): def is_broken(e): return isinstance(e, urllib2.HTTPError) and e.code in (410, 404) - if is_broken(e): - return False + if is_broken(e): + return False + + if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)): + return True + + import error + + return error.is_temporary_error(e) - if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)): - return True - import error - - return error.is_temporary_error(e) - - def uniq_string_generator(size=6, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @@ -292,28 +292,28 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1= return tmp_file_name -def chmod(filename, mode): +def chmod(filename, mode): if platform.system().lower() == 'windows': # https://docs.microsoft.com/en-us/windows/win32/fileio/hard-links-and-junctions: # hard to reset read-only attribute for removal if there are multiple hardlinks return - stat = os.stat(filename) - if stat.st_mode & 0o777 != mode: - try: - os.chmod(filename, mode) - except OSError: + stat = os.stat(filename) + if stat.st_mode & 0o777 != mode: + try: + os.chmod(filename, mode) + except OSError: import pwd - sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid))) - raise - - + sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid))) + raise + + def process(fetched_file, file_name, args, remove=True): assert len(args.rename) <= len(args.outputs), ( 'too few outputs to rename', args.rename, 'into', args.outputs) - # Forbid changes to the loaded resource - chmod(fetched_file, 0o444) - + # Forbid changes to the loaded resource + chmod(fetched_file, 0o444) + if not os.path.isfile(fetched_file): raise ResourceIsDirectoryError('Resource must be a file, not a directory: %s' % fetched_file) @@ -332,16 +332,16 @@ def process(fetched_file, file_name, args, remove=True): if args.untar_to: ensure_dir(args.untar_to) - # Extract only requested files + # Extract only requested files try: with tarfile.open(fetched_file, mode='r:*') as tar: - inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):])) - members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs] - tar.extractall(args.untar_to, members=members) - # Forbid changes to the loaded resource data - for root, _, files in os.walk(args.untar_to): - for filename in files: - chmod(os.path.join(root, filename), 0o444) + inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):])) + members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs] + tar.extractall(args.untar_to, members=members) + # Forbid changes to the loaded resource data + for root, _, files in os.walk(args.untar_to): + for filename in files: + chmod(os.path.join(root, filename), 0o444) except tarfile.ReadError as e: logging.exception(e) raise ResourceUnpackingError('File {} cannot be untared'.format(fetched_file)) @@ -354,12 +354,12 @@ def process(fetched_file, file_name, args, remove=True): hardlink_or_copy(src, dst) else: logging.info('Renaming %s to %s', src, dst) - if os.path.exists(dst): - raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst)) + if os.path.exists(dst): + raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst)) if remove: rename_or_copy_and_remove(src, dst) else: - hardlink_or_copy(src, dst) + hardlink_or_copy(src, dst) for path in args.outputs: if not os.path.exists(path): @@ -367,9 +367,9 @@ def process(fetched_file, file_name, args, remove=True): if not os.path.isfile(path): raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path)) if args.executable: - chmod(path, os.stat(path).st_mode | 0o111) + chmod(path, os.stat(path).st_mode | 0o111) if os.path.abspath(path) == os.path.abspath(fetched_file): remove = False - + if remove: os.remove(fetched_file) diff --git a/build/scripts/fetch_from_archive.py b/build/scripts/fetch_from_archive.py index 765a3004f6..57aff91b5e 100644 --- a/build/scripts/fetch_from_archive.py +++ b/build/scripts/fetch_from_archive.py @@ -31,6 +31,6 @@ if __name__ == '__main__': logging.exception(e) print >>sys.stderr, open(args.abs_log_path).read() sys.stderr.flush() - - import error - sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) + + import error + sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/fetch_from_external.py b/build/scripts/fetch_from_external.py index cf3c967a49..d4ed6f4221 100644 --- a/build/scripts/fetch_from_external.py +++ b/build/scripts/fetch_from_external.py @@ -55,6 +55,6 @@ if __name__ == '__main__': logging.exception(e) print >>sys.stderr, open(args.abs_log_path).read() sys.stderr.flush() - - import error - sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) + + import error + sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/fetch_from_mds.py b/build/scripts/fetch_from_mds.py index 7ee05b7c2e..5e4e656394 100644 --- a/build/scripts/fetch_from_mds.py +++ b/build/scripts/fetch_from_mds.py @@ -45,6 +45,6 @@ if __name__ == '__main__': logging.exception(e) print >>sys.stderr, open(args.abs_log_path).read() sys.stderr.flush() - - import error - sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) + + import error + sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py index 511d7bf9dd..a99542e174 100755 --- a/build/scripts/fetch_from_sandbox.py +++ b/build/scripts/fetch_from_sandbox.py @@ -106,13 +106,13 @@ def _urlopen(url, data=None, headers=None): time.sleep(retry_after) -def _query(url): - return json.loads(_urlopen(url)) - - +def _query(url): + return json.loads(_urlopen(url)) + + _SANDBOX_BASE_URL = 'https://sandbox.yandex-team.ru/api/v1.0' - - + + def get_resource_info(resource_id, touch=False, no_links=False): url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id))) headers = {} @@ -136,10 +136,10 @@ def fetch(resource_id, custom_fetcher): try: resource_info = get_resource_info(resource_id, touch=True, no_links=True) except Exception as e: - sys.stderr.write( - "Failed to fetch resource {}: {}\n".format(resource_id, str(e)) + sys.stderr.write( + "Failed to fetch resource {}: {}\n".format(resource_id, str(e)) ) - raise + raise if resource_info.get('state', 'DELETED') != 'READY': raise ResourceInfoError("Resource {} is not READY".format(resource_id)) @@ -264,6 +264,6 @@ if __name__ == '__main__': logging.exception(e) print >>sys.stderr, open(args.abs_log_path).read() sys.stderr.flush() - - import error - sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) + + import error + sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py index 5e5ba8c024..c1e98b20c0 100644 --- a/build/scripts/go_tool.py +++ b/build/scripts/go_tool.py @@ -781,7 +781,7 @@ def do_link_test(args): if __name__ == '__main__': args = pcf.get_args(sys.argv[1:]) - + parser = argparse.ArgumentParser(prefix_chars='+') parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True) parser.add_argument('++srcs', nargs='*', required=True) diff --git a/build/scripts/java_pack_to_file.py b/build/scripts/java_pack_to_file.py index f6911c7796..8d2aeb93fd 100644 --- a/build/scripts/java_pack_to_file.py +++ b/build/scripts/java_pack_to_file.py @@ -1,25 +1,25 @@ -import os -import re -import optparse - -PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL) - - -def parse_args(): - parser = optparse.OptionParser() - parser.add_option('-o', '--output') - parser.add_option('-a', '--source-root', dest='source_root') - return parser.parse_args() - - -def get_package_name(filename): - with open(filename) as afile: - match = PACKAGE_REGEX.search(afile.read()) - if match: - return match.group(1).replace('\n\t ', '').replace('.', '/') - return '' - - +import os +import re +import optparse + +PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL) + + +def parse_args(): + parser = optparse.OptionParser() + parser.add_option('-o', '--output') + parser.add_option('-a', '--source-root', dest='source_root') + return parser.parse_args() + + +def get_package_name(filename): + with open(filename) as afile: + match = PACKAGE_REGEX.search(afile.read()) + if match: + return match.group(1).replace('\n\t ', '').replace('.', '/') + return '' + + def write_coverage_sources(output, srcroot, files): with open(output, 'w') as afile: for filename in files: @@ -27,10 +27,10 @@ def write_coverage_sources(output, srcroot, files): afile.write(os.path.join(pname, os.path.basename(filename)) + ':' + filename + '\n') -def main(): - opts, files = parse_args() +def main(): + opts, files = parse_args() write_coverage_sources(opts.output, opts.source_root, files) - - -if __name__ == '__main__': - exit(main()) + + +if __name__ == '__main__': + exit(main()) diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py index 58faf37bbe..23487f5c1e 100644 --- a/build/scripts/link_dyn_lib.py +++ b/build/scripts/link_dyn_lib.py @@ -148,8 +148,8 @@ def fix_cmd(arch, musl, c): return list(f(list(parse_export_file(fname)))) if p.endswith('.supp'): - return [] - + return [] + if p.endswith('.pkg.fake'): return [] diff --git a/build/scripts/link_exe.py b/build/scripts/link_exe.py index 1c3cc4e516..f469e3b442 100644 --- a/build/scripts/link_exe.py +++ b/build/scripts/link_exe.py @@ -5,16 +5,16 @@ import optparse from process_whole_archive_option import ProcessWholeArchiveOption -def get_leaks_suppressions(cmd): - supp, newcmd = [], [] - for arg in cmd: +def get_leaks_suppressions(cmd): + supp, newcmd = [], [] + for arg in cmd: if arg.endswith(".supp"): - supp.append(arg) - else: - newcmd.append(arg) - return supp, newcmd - - + supp.append(arg) + else: + newcmd.append(arg) + return supp, newcmd + + musl_libs = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil' @@ -23,26 +23,26 @@ def fix_cmd(musl, c): def gen_default_suppressions(inputs, output, source_root): - import collections - import os - - supp_map = collections.defaultdict(set) + import collections + import os + + supp_map = collections.defaultdict(set) for filename in inputs: - sanitizer = os.path.basename(filename).split('.', 1)[0] + sanitizer = os.path.basename(filename).split('.', 1)[0] with open(os.path.join(source_root, filename)) as src: - for line in src: - line = line.strip() - if not line or line.startswith('#'): - continue - supp_map[sanitizer].add(line) - + for line in src: + line = line.strip() + if not line or line.startswith('#'): + continue + supp_map[sanitizer].add(line) + with open(output, "wb") as dst: - for supp_type, supps in supp_map.items(): - dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type) - dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps)))) - dst.write('}\n') - - + for supp_type, supps in supp_map.items(): + dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type) + dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps)))) + dst.write('}\n') + + def parse_args(): parser = optparse.OptionParser() parser.disable_interspersed_args() @@ -69,7 +69,7 @@ if __name__ == '__main__': supp, cmd = get_leaks_suppressions(cmd) if supp: - src_file = "default_suppressions.cpp" + src_file = "default_suppressions.cpp" gen_default_suppressions(supp, src_file, opts.source_root) cmd += [src_file] diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py index 9458c0ebfb..c189668b9e 100644 --- a/build/scripts/link_fat_obj.py +++ b/build/scripts/link_fat_obj.py @@ -35,10 +35,10 @@ def get_args(): return parser.parse_args(groups['default']), groups -def strip_suppression_files(srcs): +def strip_suppression_files(srcs): return [s for s in srcs if not s.endswith('.supp')] - - + + def main(): args, groups = get_args() @@ -51,7 +51,7 @@ def main(): # Dependencies global_srcs = groups['global_srcs'] - global_srcs = strip_suppression_files(global_srcs) + global_srcs = strip_suppression_files(global_srcs) global_srcs = ProcessWholeArchiveOption(args.arch).construct_cmd(global_srcs) peers = groups['peers'] diff --git a/build/scripts/retry.py b/build/scripts/retry.py index ac417f7c5f..d14170bfec 100644 --- a/build/scripts/retry.py +++ b/build/scripts/retry.py @@ -1,29 +1,29 @@ -import time -import functools - - -# Partly copy-pasted from contrib/python/retry -def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1): - _tries, _delay = tries, delay - while _tries: - try: - return f() - except exceptions as e: - _tries -= 1 - if not _tries: - raise - - time.sleep(_delay) - _delay *= backoff - - if max_delay is not None: - _delay = min(_delay, max_delay) - - -def retry(**retry_kwargs): - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - return retry_func(lambda: func(*args, **kwargs), **retry_kwargs) - return wrapper - return decorator +import time +import functools + + +# Partly copy-pasted from contrib/python/retry +def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1): + _tries, _delay = tries, delay + while _tries: + try: + return f() + except exceptions as e: + _tries -= 1 + if not _tries: + raise + + time.sleep(_delay) + _delay *= backoff + + if max_delay is not None: + _delay = min(_delay, max_delay) + + +def retry(**retry_kwargs): + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + return retry_func(lambda: func(*args, **kwargs), **retry_kwargs) + return wrapper + return decorator diff --git a/build/scripts/run_junit.py b/build/scripts/run_junit.py index 6944144fa7..5f56403bed 100644 --- a/build/scripts/run_junit.py +++ b/build/scripts/run_junit.py @@ -1,125 +1,125 @@ -import collections -import json -import time -import os -import sys - -SHUTDOWN_SIGNAL = 'SIGUSR1' - -PROVIDES = { - "devtools/junit-runner/devtools-junit-runner.jar": "junit-runner", - "devtools/junit5-runner/devtools-junit5-runner.jar": "junit-runner", -} - - -class SignalInterruptionError(Exception): - pass - - -def on_shutdown(s, f): - raise SignalInterruptionError() - - -def get_tracefile_path(args): - return args[args.index('--output') + 1] - - -def dump_chunk_error(tracefile, name, imps): - with open(tracefile, 'a') as afile: - msg = { - "timestamp": time.time(), - "name": "chunk-event", - "value": { - "errors": [ - [ - "fail", - "[[bad]]Test contains conflicting dependencies for [[imp]]{}[[bad]]: {}[[rst]]".format( - name, ', '.join(imps) - ), - ], - ], - }, - } - json.dump(msg, afile) - afile.write("\n") - - -def verify_classpath(args): - cpfile = args[args.index('-classpath') + 1] - assert cpfile.startswith('@'), cpfile - - cpfile = cpfile[1:] - assert os.path.exists(cpfile) - - with open(cpfile) as afile: - data = afile.read().splitlines() - - collisions = collections.defaultdict(set) - for cp in data: - if cp in PROVIDES: - collisions[PROVIDES[cp]].add(cp) - - for name, imps in collisions.items(): - if len(imps) > 1: - tracefile = get_tracefile_path(args) - dump_chunk_error(tracefile, name, imps) - return False - return True - - -def main(): - args = sys.argv[1:] - - # Emulates PROVIDES(X) for junit-runner and junit5-runner. - # For more info see DEVTOOLSSUPPORT-7454 - if not verify_classpath(args): - return 1 - - def execve(): - os.execve(args[0], args, os.environ) - - jar_binary = args[args.index('--jar-binary') + 1] - java_bin_dir = os.path.dirname(jar_binary) - jstack_binary = os.path.join(java_bin_dir, 'jstack') - - if not os.path.exists(jstack_binary): - sys.stderr.write("jstack is missing: {}\n".format(jstack_binary)) - execve() - - import signal - - signum = getattr(signal, SHUTDOWN_SIGNAL, None) - - if signum is None: - execve() - - import subprocess - - proc = subprocess.Popen(args) - signal.signal(signum, on_shutdown) - timeout = False - - try: - proc.wait() - except SignalInterruptionError: - sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum)) - # Dump stack traces - subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr) - # Kill junit - for more info see DEVTOOLS-7636 - os.kill(proc.pid, signal.SIGKILL) - proc.wait() - timeout = True - - if proc.returncode: - sys.stderr.write('java exit code: {}\n'.format(proc.returncode)) - if timeout: - # In case of timeout return specific exit code - # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301 - proc.returncode = 10 - sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode)) - - return proc.returncode - - -if __name__ == '__main__': - exit(main()) +import collections +import json +import time +import os +import sys + +SHUTDOWN_SIGNAL = 'SIGUSR1' + +PROVIDES = { + "devtools/junit-runner/devtools-junit-runner.jar": "junit-runner", + "devtools/junit5-runner/devtools-junit5-runner.jar": "junit-runner", +} + + +class SignalInterruptionError(Exception): + pass + + +def on_shutdown(s, f): + raise SignalInterruptionError() + + +def get_tracefile_path(args): + return args[args.index('--output') + 1] + + +def dump_chunk_error(tracefile, name, imps): + with open(tracefile, 'a') as afile: + msg = { + "timestamp": time.time(), + "name": "chunk-event", + "value": { + "errors": [ + [ + "fail", + "[[bad]]Test contains conflicting dependencies for [[imp]]{}[[bad]]: {}[[rst]]".format( + name, ', '.join(imps) + ), + ], + ], + }, + } + json.dump(msg, afile) + afile.write("\n") + + +def verify_classpath(args): + cpfile = args[args.index('-classpath') + 1] + assert cpfile.startswith('@'), cpfile + + cpfile = cpfile[1:] + assert os.path.exists(cpfile) + + with open(cpfile) as afile: + data = afile.read().splitlines() + + collisions = collections.defaultdict(set) + for cp in data: + if cp in PROVIDES: + collisions[PROVIDES[cp]].add(cp) + + for name, imps in collisions.items(): + if len(imps) > 1: + tracefile = get_tracefile_path(args) + dump_chunk_error(tracefile, name, imps) + return False + return True + + +def main(): + args = sys.argv[1:] + + # Emulates PROVIDES(X) for junit-runner and junit5-runner. + # For more info see DEVTOOLSSUPPORT-7454 + if not verify_classpath(args): + return 1 + + def execve(): + os.execve(args[0], args, os.environ) + + jar_binary = args[args.index('--jar-binary') + 1] + java_bin_dir = os.path.dirname(jar_binary) + jstack_binary = os.path.join(java_bin_dir, 'jstack') + + if not os.path.exists(jstack_binary): + sys.stderr.write("jstack is missing: {}\n".format(jstack_binary)) + execve() + + import signal + + signum = getattr(signal, SHUTDOWN_SIGNAL, None) + + if signum is None: + execve() + + import subprocess + + proc = subprocess.Popen(args) + signal.signal(signum, on_shutdown) + timeout = False + + try: + proc.wait() + except SignalInterruptionError: + sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum)) + # Dump stack traces + subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr) + # Kill junit - for more info see DEVTOOLS-7636 + os.kill(proc.pid, signal.SIGKILL) + proc.wait() + timeout = True + + if proc.returncode: + sys.stderr.write('java exit code: {}\n'.format(proc.returncode)) + if timeout: + # In case of timeout return specific exit code + # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301 + proc.returncode = 10 + sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode)) + + return proc.returncode + + +if __name__ == '__main__': + exit(main()) diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py index 9bb314a98a..9730dcd711 100644 --- a/build/scripts/unpacking_jtest_runner.py +++ b/build/scripts/unpacking_jtest_runner.py @@ -1,10 +1,10 @@ -import io -import json -import optparse -import os +import io +import json +import optparse +import os import sys import subprocess -import time +import time import zipfile import platform @@ -15,7 +15,7 @@ import platform def parse_args(): parser = optparse.OptionParser() parser.disable_interspersed_args() - parser.add_option('--trace-file') + parser.add_option('--trace-file') parser.add_option('--jar-binary') parser.add_option('--tests-jar-path') parser.add_option('--classpath-option-type', choices=('manifest', 'command_file', 'list'), default='manifest') @@ -49,27 +49,27 @@ def fix_cmd(cmd): return cmd -def dump_event(etype, data, filename): - event = { - 'timestamp': time.time(), - 'value': data, - 'name': etype, - } - - with io.open(filename, 'a', encoding='utf8') as afile: - afile.write(unicode(json.dumps(event) + '\n')) - - -def dump_chunk_event(data, filename): - return dump_event('chunk-event', data, filename) - - -def extract_jars(dest, archive): - os.makedirs(dest) - with zipfile.ZipFile(archive) as zf: - zf.extractall(dest) - - +def dump_event(etype, data, filename): + event = { + 'timestamp': time.time(), + 'value': data, + 'name': etype, + } + + with io.open(filename, 'a', encoding='utf8') as afile: + afile.write(unicode(json.dumps(event) + '\n')) + + +def dump_chunk_event(data, filename): + return dump_event('chunk-event', data, filename) + + +def extract_jars(dest, archive): + os.makedirs(dest) + with zipfile.ZipFile(archive) as zf: + zf.extractall(dest) + + def make_bfg_from_cp(class_path, out): class_path = ' '.join( map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path) @@ -89,7 +89,7 @@ def make_command_file_from_cp(class_path, out): def main(): - s = time.time() + s = time.time() opts, args = parse_args() # unpack tests jar @@ -100,13 +100,13 @@ def main(): build_root = '' dest = os.path.abspath('test-classes') - extract_jars(dest, opts.tests_jar_path) - - metrics = { - 'suite_jtest_extract_jars_(seconds)': time.time() - s, - } - - s = time.time() + extract_jars(dest, opts.tests_jar_path) + + metrics = { + 'suite_jtest_extract_jars_(seconds)': time.time() - s, + } + + s = time.time() # fix java classpath cp_idx = args.index('-classpath') if args[cp_idx + 1].startswith('@'): @@ -131,12 +131,12 @@ def main(): else: args[cp_idx + 1] = args[cp_idx + 1].replace(opts.tests_jar_path, dest) args = fix_cmd(args[:cp_idx]) + args[cp_idx:] - - metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s - - if opts.trace_file: - dump_chunk_event({'metrics': metrics}, opts.trace_file) - + + metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s + + if opts.trace_file: + dump_chunk_event({'metrics': metrics}, opts.trace_file) + # run java cmd if platform.system() == 'Windows': sys.exit(subprocess.Popen(args).wait()) diff --git a/build/scripts/with_coverage.py b/build/scripts/with_coverage.py index 52937490bc..d62435c3b8 100644 --- a/build/scripts/with_coverage.py +++ b/build/scripts/with_coverage.py @@ -1,5 +1,5 @@ -# TODO prettyboy remove after ya-bin release - +# TODO prettyboy remove after ya-bin release + import os import sys import subprocess diff --git a/build/scripts/with_crash_on_timeout.py b/build/scripts/with_crash_on_timeout.py index 775347f9c1..bde864ed29 100644 --- a/build/scripts/with_crash_on_timeout.py +++ b/build/scripts/with_crash_on_timeout.py @@ -1,5 +1,5 @@ -# TODO prettyboy remove after ya-bin release - +# TODO prettyboy remove after ya-bin release + import os import sys import subprocess diff --git a/build/scripts/ya.make b/build/scripts/ya.make index 105f8dfc7b..710165e40d 100644 --- a/build/scripts/ya.make +++ b/build/scripts/ya.make @@ -8,12 +8,12 @@ TEST_SRCS( build_java_codenav_index.py build_java_with_error_prone.py build_java_with_error_prone2.py - build_mn.py - build_pln_header.py + build_mn.py + build_pln_header.py cat.py - cgo1_wrapper.py + cgo1_wrapper.py check_config_h.py - collect_java_srcs.py + collect_java_srcs.py compile_cuda.py compile_java.py compile_jsrc.py @@ -29,7 +29,7 @@ TEST_SRCS( extract_jacoco_report.py f2c.py fail_module_cmd.py - fetch_from.py + fetch_from.py fetch_from_external.py fetch_from_mds.py fetch_from_npm.py @@ -40,10 +40,10 @@ TEST_SRCS( fix_msvc_output.py fs_tools.py gen_aar_gradle_script.py - gen_java_codenav_entry.py + gen_java_codenav_entry.py gen_java_codenav_protobuf.py gen_mx_table.py - gen_py3_reg.py + gen_py3_reg.py gen_py_reg.py gen_test_apk_gradle_script.py gen_ub.py @@ -51,7 +51,7 @@ TEST_SRCS( go_proto_wrapper.py go_tool.py ios_wrapper.py - java_pack_to_file.py + java_pack_to_file.py link_asrc.py link_dyn_lib.py link_exe.py @@ -71,14 +71,14 @@ TEST_SRCS( py_compile.py run_ios_simulator.py run_javac.py - run_junit.py + run_junit.py run_llvm_dsymutil.py run_msvc_wine.py run_tool.py sky.py stdout2stderr.py symlink.py - tar_directory.py + tar_directory.py tar_sources.py tared_protoc.py touch.py @@ -87,7 +87,7 @@ TEST_SRCS( with_coverage.py with_crash_on_timeout.py with_pathsep_resolve.py - wrap_groovyc.py + wrap_groovyc.py wrapper.py writer.py xargs.py |