diff options
author | rnefyodov <rnefyodov@yandex-team.ru> | 2022-02-10 16:47:17 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:47:17 +0300 |
commit | c753751b693cf7c481c0292912e2b7536fa6d36a (patch) | |
tree | 9814fbd1c3effac9b8377c5d604b367b14e2db55 /build/scripts | |
parent | c22320e8c4f3d7be38c504706f137034e91d31e6 (diff) | |
download | ydb-c753751b693cf7c481c0292912e2b7536fa6d36a.tar.gz |
Restoring authorship annotation for <rnefyodov@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'build/scripts')
-rw-r--r-- | build/scripts/clang_wrapper.py | 8 | ||||
-rw-r--r-- | build/scripts/coverage-info.py | 242 | ||||
-rw-r--r-- | build/scripts/error.py | 56 | ||||
-rwxr-xr-x | build/scripts/fetch_from_sandbox.py | 80 | ||||
-rw-r--r-- | build/scripts/find_and_tar.py | 42 | ||||
-rw-r--r-- | build/scripts/gen_tasklet_reg.py | 4 | ||||
-rw-r--r-- | build/scripts/generate_mf.py | 10 | ||||
-rw-r--r-- | build/scripts/llvm_opt_wrapper.py | 30 | ||||
-rw-r--r-- | build/scripts/merge_coverage_data.py | 22 | ||||
-rw-r--r-- | build/scripts/perl_wrapper.py | 6 | ||||
-rw-r--r-- | build/scripts/with_coverage.py | 76 | ||||
-rw-r--r-- | build/scripts/ya.make | 8 | ||||
-rw-r--r-- | build/scripts/yndexer.py | 24 |
13 files changed, 304 insertions, 304 deletions
diff --git a/build/scripts/clang_wrapper.py b/build/scripts/clang_wrapper.py index fb34497567..af3869f789 100644 --- a/build/scripts/clang_wrapper.py +++ b/build/scripts/clang_wrapper.py @@ -3,12 +3,12 @@ import sys def fix(s): - # disable dbg DEVTOOLS-2744 - if s == '-g': - return None + # disable dbg DEVTOOLS-2744 + if s == '-g': + return None if s == '/Z7' or s == '/Od' or s == '/Ob0' or s == '/D_DEBUG': return None - + # disable sanitizers for generated code if s.startswith('-fsanitize') or s == '-Dmemory_sanitizer_enabled' or s.startswith('-fsanitize-blacklist'): return None diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py index cca68ac27f..94491d9256 100644 --- a/build/scripts/coverage-info.py +++ b/build/scripts/coverage-info.py @@ -1,17 +1,17 @@ -import argparse -import os -import sys -import tarfile -import collections -import subprocess +import argparse +import os +import sys +import tarfile +import collections +import subprocess import re - - -GCDA_EXT = '.gcda' -GCNO_EXT = '.gcno' - - -def suffixes(path): + + +GCDA_EXT = '.gcda' +GCNO_EXT = '.gcno' + + +def suffixes(path): """ >>> list(suffixes('/a/b/c')) ['c', 'b/c', '/a/b/c'] @@ -24,67 +24,67 @@ def suffixes(path): >>> list(suffixes('/')) [] """ - path = os.path.normpath(path) - - def up_dirs(cur_path): - while os.path.dirname(cur_path) != cur_path: - cur_path = os.path.dirname(cur_path) - yield cur_path - - for x in up_dirs(path): - yield path.replace(x + os.path.sep, '') - - -def recast(in_file, out_file, probe_path, update_stat): - PREFIX = 'SF:' - - probed_path = None - - any_payload = False - - with open(in_file, 'r') as input, open(out_file, 'w') as output: - active = True - for line in input: - line = line.rstrip('\n') - if line.startswith('TN:'): - output.write(line + '\n') - elif line.startswith(PREFIX): - path = line[len(PREFIX):] - probed_path = probe_path(path) - if probed_path: - output.write(PREFIX + probed_path + '\n') - active = bool(probed_path) - else: - if active: - update_stat(probed_path, line) - output.write(line + '\n') - any_payload = True - - return any_payload - - -def print_stat(da, fnda, teamcity_stat_output): - lines_hit = sum(map(bool, da.values())) - lines_total = len(da.values()) - lines_coverage = 100.0 * lines_hit / lines_total if lines_total else 0 - - func_hit = sum(map(bool, fnda.values())) - func_total = len(fnda.values()) - func_coverage = 100.0 * func_hit / func_total if func_total else 0 - - print >>sys.stderr, '[[imp]]Lines[[rst]] {: >16} {: >16} {: >16.1f}%'.format(lines_hit, lines_total, lines_coverage) - print >>sys.stderr, '[[imp]]Functions[[rst]] {: >16} {: >16} {: >16.1f}%'.format(func_hit, func_total, func_coverage) - - if teamcity_stat_output: - with open(teamcity_stat_output, 'w') as tc_file: - tc_file.write("##teamcity[blockOpened name='Code Coverage Summary']\n") - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLTotal\' value='{}']\n".format(lines_total)) - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLCovered\' value='{}']\n".format(lines_hit)) - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMTotal\' value='{}']\n".format(func_total)) - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMCovered\' value='{}']\n".format(func_hit)) - tc_file.write("##teamcity[blockClosed name='Code Coverage Summary']\n") - - + path = os.path.normpath(path) + + def up_dirs(cur_path): + while os.path.dirname(cur_path) != cur_path: + cur_path = os.path.dirname(cur_path) + yield cur_path + + for x in up_dirs(path): + yield path.replace(x + os.path.sep, '') + + +def recast(in_file, out_file, probe_path, update_stat): + PREFIX = 'SF:' + + probed_path = None + + any_payload = False + + with open(in_file, 'r') as input, open(out_file, 'w') as output: + active = True + for line in input: + line = line.rstrip('\n') + if line.startswith('TN:'): + output.write(line + '\n') + elif line.startswith(PREFIX): + path = line[len(PREFIX):] + probed_path = probe_path(path) + if probed_path: + output.write(PREFIX + probed_path + '\n') + active = bool(probed_path) + else: + if active: + update_stat(probed_path, line) + output.write(line + '\n') + any_payload = True + + return any_payload + + +def print_stat(da, fnda, teamcity_stat_output): + lines_hit = sum(map(bool, da.values())) + lines_total = len(da.values()) + lines_coverage = 100.0 * lines_hit / lines_total if lines_total else 0 + + func_hit = sum(map(bool, fnda.values())) + func_total = len(fnda.values()) + func_coverage = 100.0 * func_hit / func_total if func_total else 0 + + print >>sys.stderr, '[[imp]]Lines[[rst]] {: >16} {: >16} {: >16.1f}%'.format(lines_hit, lines_total, lines_coverage) + print >>sys.stderr, '[[imp]]Functions[[rst]] {: >16} {: >16} {: >16.1f}%'.format(func_hit, func_total, func_coverage) + + if teamcity_stat_output: + with open(teamcity_stat_output, 'w') as tc_file: + tc_file.write("##teamcity[blockOpened name='Code Coverage Summary']\n") + tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLTotal\' value='{}']\n".format(lines_total)) + tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLCovered\' value='{}']\n".format(lines_hit)) + tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMTotal\' value='{}']\n".format(func_total)) + tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMCovered\' value='{}']\n".format(func_hit)) + tc_file.write("##teamcity[blockClosed name='Code Coverage Summary']\n") + + def chunks(l, n): """ >>> list(chunks(range(10), 3)) @@ -118,17 +118,17 @@ def combine_info_files(lcov, files, out_file): def probe_path_global(path, source_root, prefix_filter, exclude_files): if path.endswith('_ut.cpp'): - return None - + return None + for suff in reversed(list(suffixes(path))): if (not prefix_filter or suff.startswith(prefix_filter)) and (not exclude_files or not exclude_files.match(suff)): full_path = source_root + os.sep + suff if os.path.isfile(full_path): return full_path - + return None - - + + def update_stat_global(src_file, line, fnda, da): if line.startswith("FNDA:"): visits, func_name = line[len("FNDA:"):].split(',') @@ -179,27 +179,27 @@ def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_execut def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info): - with tarfile.open(gcda_archive) as gcda_tf: - for gcda_item in gcda_tf: - if gcda_item.isfile() and gcda_item.name.endswith(GCDA_EXT): - gcda_name = gcda_item.name - source_fname = gcda_name[:-len(GCDA_EXT)] - for suff in suffixes(source_fname): - if suff in fname2gcno: + with tarfile.open(gcda_archive) as gcda_tf: + for gcda_item in gcda_tf: + if gcda_item.isfile() and gcda_item.name.endswith(GCDA_EXT): + gcda_name = gcda_item.name + source_fname = gcda_name[:-len(GCDA_EXT)] + for suff in suffixes(source_fname): + if suff in fname2gcno: gcda_new_name = suff + GCDA_EXT - gcda_item.name = gcda_new_name - gcda_tf.extract(gcda_item) - if os.path.getsize(gcda_new_name) > 0: + gcda_item.name = gcda_new_name + gcda_tf.extract(gcda_item) + if os.path.getsize(gcda_new_name) > 0: coverage_info = suff + '.' + str(len(fname2info[suff])) + '.info' fname2info[suff].append(coverage_info) - geninfo_cmd = [ + geninfo_cmd = [ geninfo_executable, - '--gcov-tool', gcov_tool, - gcda_new_name, + '--gcov-tool', gcov_tool, + gcda_new_name, '-o', coverage_info + '.tmp' - ] + ] gen_info(geninfo_cmd, coverage_info) - + def gen_cobertura(tool, output, combined_info): cmd = [ @@ -241,42 +241,42 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt output_dir = coverage_report_path else: output_dir = output + '.dir' - + if not os.path.exists(output_dir): os.makedirs(output_dir) - teamcity_stat_file = None - if teamcity_stat_output: - teamcity_stat_file = os.path.join(output_dir, 'teamcity.out') - print_stat(da, fnda, teamcity_stat_file) - - if lcov_args: + teamcity_stat_file = None + if teamcity_stat_output: + teamcity_stat_file = os.path.join(output_dir, 'teamcity.out') + print_stat(da, fnda, teamcity_stat_file) + + if lcov_args: output_trace = "combined.info" combine_info_files(os.path.join(source_root, 'devtools', 'lcov', 'lcov'), lcov_args, output_trace) cmd = [os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), '-p', source_root, '--ignore-errors', 'source', '-o', output_dir, output_trace] - print >>sys.stderr, '## genhtml', ' '.join(cmd) - subprocess.check_call(cmd) + print >>sys.stderr, '## genhtml', ' '.join(cmd) + subprocess.check_call(cmd) if lcov_cobertura: gen_cobertura(lcov_cobertura, gcov_report, output_trace) - - with tarfile.open(output, 'w') as tar: - tar.add(output_dir, arcname='.') - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - - parser.add_argument('--source-root', action='store') - parser.add_argument('--output', action='store') - parser.add_argument('--gcno-archive', action='store') - parser.add_argument('--gcda-archive', action='store') - parser.add_argument('--gcov-tool', action='store') - parser.add_argument('--prefix-filter', action='store') + + with tarfile.open(output, 'w') as tar: + tar.add(output_dir, arcname='.') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument('--source-root', action='store') + parser.add_argument('--output', action='store') + parser.add_argument('--gcno-archive', action='store') + parser.add_argument('--gcda-archive', action='store') + parser.add_argument('--gcov-tool', action='store') + parser.add_argument('--prefix-filter', action='store') parser.add_argument('--exclude-regexp', action='store') - parser.add_argument('--teamcity-stat-output', action='store_const', const=True) + parser.add_argument('--teamcity-stat-output', action='store_const', const=True) parser.add_argument('--coverage-report-path', action='store') parser.add_argument('--gcov-report', action='store') parser.add_argument('--lcov-cobertura', action='store') - - args = parser.parse_args() - main(**vars(args)) + + args = parser.parse_args() + main(**vars(args)) diff --git a/build/scripts/error.py b/build/scripts/error.py index 644a172d89..f7d8ecb2cc 100644 --- a/build/scripts/error.py +++ b/build/scripts/error.py @@ -8,11 +8,11 @@ TEMPORARY_ERROR_MESSAGES = [ 'Internal Server Error', 'Network connection closed unexpectedly', 'Network is unreachable', - 'No route to host', + 'No route to host', 'No space left on device', 'Not enough space', 'Temporary failure in name resolution', - 'The read operation timed out', + 'The read operation timed out', 'timeout: timed out', ] @@ -30,48 +30,48 @@ def merge_exit_codes(exit_codes): return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0 -def is_temporary_error(exc): +def is_temporary_error(exc): import logging logger = logging.getLogger(__name__) - if getattr(exc, 'temporary', False): + if getattr(exc, 'temporary', False): logger.debug("Exception has temporary attribute: %s", exc) - return True - - import errno - err = getattr(exc, 'errno', None) - - if err == errno.ECONNREFUSED or err == errno.ENETUNREACH: + return True + + import errno + err = getattr(exc, 'errno', None) + + if err == errno.ECONNREFUSED or err == errno.ENETUNREACH: logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err) - return True - - import socket - + return True + + import socket + if isinstance(exc, socket.timeout) or isinstance(getattr(exc, 'reason', None), socket.timeout): logger.debug("Socket timeout exception: %s", exc) - return True - - if isinstance(exc, socket.gaierror): + return True + + if isinstance(exc, socket.gaierror): logger.debug("Getaddrinfo exception: %s", exc) - return True - + return True + import urllib2 if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ): logger.debug("urllib2.HTTPError: %s", exc) return True - import httplib - - if isinstance(exc, httplib.IncompleteRead): + import httplib + + if isinstance(exc, httplib.IncompleteRead): logger.debug("IncompleteRead exception: %s", exc) - return True - - exc_str = str(exc) - + return True + + exc_str = str(exc) + for message in TEMPORARY_ERROR_MESSAGES: if message in exc_str: logger.debug("Found temporary error pattern (%s): %s", message, exc_str) return True - - return False + + return False diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py index 4203aa7a3d..a99542e174 100755 --- a/build/scripts/fetch_from_sandbox.py +++ b/build/scripts/fetch_from_sandbox.py @@ -3,9 +3,9 @@ import json import logging import argparse import os -import random +import random import subprocess -import sys +import sys import time import urllib2 import uuid @@ -13,11 +13,11 @@ import uuid import fetch_from -ORIGIN_SUFFIX = '?origin=fetch-from-sandbox' -MDS_PREFIX = 'http://storage-int.mds.yandex.net/get-sandbox/' +ORIGIN_SUFFIX = '?origin=fetch-from-sandbox' +MDS_PREFIX = 'http://storage-int.mds.yandex.net/get-sandbox/' TEMPORARY_ERROR_CODES = (429, 500, 503, 504) - - + + def parse_args(): parser = argparse.ArgumentParser() fetch_from.add_common_arguments(parser) @@ -128,10 +128,10 @@ def get_resource_http_links(resource_id): return [r['url'] + ORIGIN_SUFFIX for r in _query(url)] -def fetch_via_script(script, resource_id): - return subprocess.check_output([script, str(resource_id)]).rstrip() - - +def fetch_via_script(script, resource_id): + return subprocess.check_output([script, str(resource_id)]).rstrip() + + def fetch(resource_id, custom_fetcher): try: resource_info = get_resource_info(resource_id, touch=True, no_links=True) @@ -146,14 +146,14 @@ def fetch(resource_id, custom_fetcher): logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info)) - resource_file_name = os.path.basename(resource_info["file_name"]) - expected_md5 = resource_info.get('md5') - - proxy_link = resource_info['http']['proxy'] + ORIGIN_SUFFIX - - mds_id = resource_info.get('attributes', {}).get('mds') - mds_link = MDS_PREFIX + mds_id if mds_id else None - + resource_file_name = os.path.basename(resource_info["file_name"]) + expected_md5 = resource_info.get('md5') + + proxy_link = resource_info['http']['proxy'] + ORIGIN_SUFFIX + + mds_id = resource_info.get('attributes', {}).get('mds') + mds_link = MDS_PREFIX + mds_id if mds_id else None + def get_storage_links(): storage_links = get_resource_http_links(resource_id) random.shuffle(storage_links) @@ -164,33 +164,33 @@ def fetch(resource_id, custom_fetcher): if not skynet: logging.info("Skynet is not available, will try other protocols") - def iter_tries(): + def iter_tries(): if skynet: yield lambda: download_by_skynet(resource_info, resource_file_name) - if custom_fetcher: - yield lambda: fetch_via_script(custom_fetcher, resource_id) + if custom_fetcher: + yield lambda: fetch_via_script(custom_fetcher, resource_id) # Don't try too hard here: we will get back to proxy later on yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5, tries=2) for x in get_storage_links(): # Don't spend too much time connecting single host yield lambda: fetch_from.fetch_url(x, False, resource_file_name, expected_md5, tries=1) - if mds_link is not None: + if mds_link is not None: # Don't try too hard here: we will get back to MDS later on yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5, tries=2) yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5) - if mds_link is not None: + if mds_link is not None: yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5) - + if resource_info.get('attributes', {}).get('ttl') != 'inf': sys.stderr.write('WARNING: resource {} ttl is not "inf".\n'.format(resource_id)) - exc_info = None - for i, action in enumerate(itertools.islice(iter_tries(), 0, 10)): - try: - fetched_file = action() - break + exc_info = None + for i, action in enumerate(itertools.islice(iter_tries(), 0, 10)): + try: + fetched_file = action() + break except UnsupportedProtocolException: pass except subprocess.CalledProcessError as e: @@ -201,18 +201,18 @@ def fetch(resource_id, custom_fetcher): if e.code not in TEMPORARY_ERROR_CODES: exc_info = exc_info or sys.exc_info() time.sleep(i) - except Exception as e: - logging.exception(e) - exc_info = exc_info or sys.exc_info() - time.sleep(i) - else: + except Exception as e: + logging.exception(e) + exc_info = exc_info or sys.exc_info() + time.sleep(i) + else: if exc_info: raise exc_info[0], exc_info[1], exc_info[2] else: raise Exception("No available protocol and/or server to fetch resource") return fetched_file, resource_info['file_name'] - + def _get_resource_info_from_file(resource_file): if resource_file is None or not os.path.exists(resource_file): @@ -254,16 +254,16 @@ def main(args): fetch_from.process(fetched_file, file_name, args, remove=not custom_fetcher and not resource_info) -if __name__ == '__main__': +if __name__ == '__main__': args = parse_args() fetch_from.setup_logging(args, os.path.basename(__file__)) - try: + try: main(args) - except Exception as e: - logging.exception(e) + except Exception as e: + logging.exception(e) print >>sys.stderr, open(args.abs_log_path).read() - sys.stderr.flush() + sys.stderr.flush() import error sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/find_and_tar.py b/build/scripts/find_and_tar.py index 1fe17fc743..f251623c68 100644 --- a/build/scripts/find_and_tar.py +++ b/build/scripts/find_and_tar.py @@ -1,22 +1,22 @@ -import os -import sys -import tarfile - - -def find_gcno(dirname, tail): - for cur, _dirs, files in os.walk(dirname): - for f in files: - if f.endswith(tail): - yield os.path.relpath(os.path.join(cur, f)) - - -def main(args): - output = args[0] +import os +import sys +import tarfile + + +def find_gcno(dirname, tail): + for cur, _dirs, files in os.walk(dirname): + for f in files: + if f.endswith(tail): + yield os.path.relpath(os.path.join(cur, f)) + + +def main(args): + output = args[0] tail = args[1] if len(args) > 1 else '' - with tarfile.open(output, 'w:') as tf: - for f in find_gcno(os.getcwd(), tail): - tf.add(f) - - -if __name__ == '__main__': - main(sys.argv[1:]) + with tarfile.open(output, 'w:') as tf: + for f in find_gcno(os.getcwd(), tail): + tf.add(f) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/scripts/gen_tasklet_reg.py b/build/scripts/gen_tasklet_reg.py index ed2f1a89ba..5b747c2eca 100644 --- a/build/scripts/gen_tasklet_reg.py +++ b/build/scripts/gen_tasklet_reg.py @@ -3,9 +3,9 @@ import argparse TEMPLATE = '''\ {includes}\ #include <tasklet/runtime/lib/{language}_wrapper.h> -#include <tasklet/runtime/lib/registry.h> +#include <tasklet/runtime/lib/registry.h> -static const NTasklet::TRegHelper REG( +static const NTasklet::TRegHelper REG( "{name}", new NTasklet::{wrapper} ); diff --git a/build/scripts/generate_mf.py b/build/scripts/generate_mf.py index e1cdc18784..a44a969980 100644 --- a/build/scripts/generate_mf.py +++ b/build/scripts/generate_mf.py @@ -106,8 +106,8 @@ def generate_mf(): if __name__ == '__main__': - try: - generate_mf() - except Exception as e: - sys.stderr.write(str(e) + '\n') - sys.exit(1) + try: + generate_mf() + except Exception as e: + sys.stderr.write(str(e) + '\n') + sys.exit(1) diff --git a/build/scripts/llvm_opt_wrapper.py b/build/scripts/llvm_opt_wrapper.py index d3a14b3738..38ca3004af 100644 --- a/build/scripts/llvm_opt_wrapper.py +++ b/build/scripts/llvm_opt_wrapper.py @@ -1,18 +1,18 @@ import subprocess -import sys - - -def fix(s): - # we use '#' instead of ',' because ymake always splits args by comma - if 'internalize' in s: - return s.replace('#', ',') - - return s - - -if __name__ == '__main__': - path = sys.argv[1] - args = [fix(s) for s in [path] + sys.argv[2:]] - +import sys + + +def fix(s): + # we use '#' instead of ',' because ymake always splits args by comma + if 'internalize' in s: + return s.replace('#', ',') + + return s + + +if __name__ == '__main__': + path = sys.argv[1] + args = [fix(s) for s in [path] + sys.argv[2:]] + rc = subprocess.call(args, shell=False, stderr=sys.stderr, stdout=sys.stdout) sys.exit(rc) diff --git a/build/scripts/merge_coverage_data.py b/build/scripts/merge_coverage_data.py index 75ed820033..b7fa3c6a86 100644 --- a/build/scripts/merge_coverage_data.py +++ b/build/scripts/merge_coverage_data.py @@ -1,11 +1,11 @@ -import sys -import tarfile +import sys +import tarfile import copy import os import uuid - - -def main(args): + + +def main(args): output_file, args = args[0], args[1:] # heretic@: Splits files on which could be merged( files ) and which should not be merged( expendables ) # expendables will be in output_file in form {name}{ordinal number of archive in args[]}.{extension} @@ -14,8 +14,8 @@ def main(args): except ValueError: split_i = len(args) files, expendables = args[:split_i], args[split_i + 1:] - - with tarfile.open(output_file, 'w') as outf: + + with tarfile.open(output_file, 'w') as outf: for x in files: with tarfile.open(x) as tf: for tarinfo in tf: @@ -26,7 +26,7 @@ def main(args): new_basename = '.'.join([basename_parts[0] + str(uuid.uuid4())] + basename_parts[1:]) new_tarinfo.name = os.path.join(dirname, new_basename) outf.addfile(new_tarinfo, tf.extractfile(tarinfo)) - - -if __name__ == '__main__': - main(sys.argv[1:]) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/scripts/perl_wrapper.py b/build/scripts/perl_wrapper.py index 2a0a20b6c7..cb4027f1d3 100644 --- a/build/scripts/perl_wrapper.py +++ b/build/scripts/perl_wrapper.py @@ -1,7 +1,7 @@ -import os -import sys +import os +import sys import shutil - + if __name__ == '__main__': path = sys.argv[1] to = sys.argv[-1] diff --git a/build/scripts/with_coverage.py b/build/scripts/with_coverage.py index 29216d96a7..d62435c3b8 100644 --- a/build/scripts/with_coverage.py +++ b/build/scripts/with_coverage.py @@ -1,40 +1,40 @@ # TODO prettyboy remove after ya-bin release -import os -import sys -import subprocess -import tarfile -import random -import shutil - - -def mkdir_p(path): - try: - os.makedirs(path) - except OSError: - pass - - -def main(args): - coverage_path = os.path.abspath(args[0]) - coverage_dir = coverage_path + '.' + str(random.getrandbits(64)) - - mkdir_p(coverage_dir) - - env = os.environ.copy() - env['GCOV_PREFIX'] = coverage_dir - - subprocess.check_call(args[1:], env=env) - - arch_path = coverage_dir + '.archive' - - with tarfile.open(arch_path, 'w:') as tar: - tar.add(coverage_dir, arcname='.') - - os.rename(arch_path, coverage_path) - - shutil.rmtree(coverage_dir) - - -if __name__ == '__main__': - main(sys.argv[1:]) +import os +import sys +import subprocess +import tarfile +import random +import shutil + + +def mkdir_p(path): + try: + os.makedirs(path) + except OSError: + pass + + +def main(args): + coverage_path = os.path.abspath(args[0]) + coverage_dir = coverage_path + '.' + str(random.getrandbits(64)) + + mkdir_p(coverage_dir) + + env = os.environ.copy() + env['GCOV_PREFIX'] = coverage_dir + + subprocess.check_call(args[1:], env=env) + + arch_path = coverage_dir + '.archive' + + with tarfile.open(arch_path, 'w:') as tar: + tar.add(coverage_dir, arcname='.') + + os.rename(arch_path, coverage_path) + + shutil.rmtree(coverage_dir) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/build/scripts/ya.make b/build/scripts/ya.make index f74d16b11f..710165e40d 100644 --- a/build/scripts/ya.make +++ b/build/scripts/ya.make @@ -1,5 +1,5 @@ OWNER(g:ymake) - + PY2TEST() TEST_SRCS( @@ -57,7 +57,7 @@ TEST_SRCS( link_exe.py link_fat_obj.py link_lib.py - llvm_opt_wrapper.py + llvm_opt_wrapper.py merge_coverage_data.py merge_files.py mkdir.py @@ -92,11 +92,11 @@ TEST_SRCS( writer.py xargs.py yield_line.py - yndexer.py + yndexer.py ) PEERDIR( ydb/library/yql/public/udf ) -END() +END() diff --git a/build/scripts/yndexer.py b/build/scripts/yndexer.py index 71a6167ae4..a38e28ba99 100644 --- a/build/scripts/yndexer.py +++ b/build/scripts/yndexer.py @@ -1,9 +1,9 @@ -import sys -import subprocess +import sys +import subprocess import threading -import os +import os import re - + rx_resource_dir = re.compile(r'libraries: =([^:]*)') @@ -41,19 +41,19 @@ class Process(object): return self._result -if __name__ == '__main__': - args = sys.argv - - yndexer = args[1] +if __name__ == '__main__': + args = sys.argv + + yndexer = args[1] timeout = int(args[2]) arc_root = args[3] build_root = args[4] input_file = args[5] output_file = args[-1] tail_args = args[6:-1] - - subprocess.check_call(tail_args) - + + subprocess.check_call(tail_args) + clang = tail_args[0] out = subprocess.check_output([clang, '-print-search-dirs']) resource_dir = rx_resource_dir.search(out).group(1) @@ -70,7 +70,7 @@ if __name__ == '__main__': ] + tail_args + [ '-resource-dir', resource_dir, ] - + process = Process(yndexer_args) result = process.wait(timeout=timeout) |