diff options
author | akastornov <akastornov@yandex-team.ru> | 2022-02-10 16:46:03 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:46:03 +0300 |
commit | 3a2de774d91ca8d7325aaf81c200b1d2047725e6 (patch) | |
tree | 5674a780ce03a8bbd794733a19c7a70d587e4a14 /build/scripts | |
parent | 7bd11ff35e97544d119e43447e3e865f2588ee7f (diff) | |
download | ydb-3a2de774d91ca8d7325aaf81c200b1d2047725e6.tar.gz |
Restoring authorship annotation for <akastornov@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'build/scripts')
-rw-r--r-- | build/scripts/collect_java_srcs.py | 82 | ||||
-rw-r--r-- | build/scripts/compile_java.py | 132 | ||||
-rw-r--r-- | build/scripts/copy_to_dir.py | 100 | ||||
-rw-r--r-- | build/scripts/create_jcoverage_report.py | 84 | ||||
-rwxr-xr-x | build/scripts/fetch_from.py | 6 | ||||
-rwxr-xr-x | build/scripts/fetch_from_sandbox.py | 72 | ||||
-rw-r--r-- | build/scripts/fetch_resource.py | 66 | ||||
-rw-r--r-- | build/scripts/generate_pom.py | 2 | ||||
-rw-r--r-- | build/scripts/link_dyn_lib.py | 60 | ||||
-rw-r--r-- | build/scripts/merge_coverage_data.py | 24 | ||||
-rw-r--r-- | build/scripts/pack_jcoverage_resources.py | 14 | ||||
-rw-r--r-- | build/scripts/resolve_java_srcs.py | 22 | ||||
-rw-r--r-- | build/scripts/run_javac.py | 142 | ||||
-rw-r--r-- | build/scripts/run_sonar.py | 146 | ||||
-rw-r--r-- | build/scripts/stderr2stdout.py | 12 | ||||
-rw-r--r-- | build/scripts/tared_protoc.py | 62 | ||||
-rw-r--r-- | build/scripts/unpacking_jtest_runner.py | 56 | ||||
-rw-r--r-- | build/scripts/writer.py | 14 |
18 files changed, 548 insertions, 548 deletions
diff --git a/build/scripts/collect_java_srcs.py b/build/scripts/collect_java_srcs.py index 170002520a..a8056932cc 100644 --- a/build/scripts/collect_java_srcs.py +++ b/build/scripts/collect_java_srcs.py @@ -1,51 +1,51 @@ -import os -import sys -import contextlib -import tarfile -import zipfile - - -if __name__ == '__main__': - build_root = sys.argv[1] +import os +import sys +import contextlib +import tarfile +import zipfile + + +if __name__ == '__main__': + build_root = sys.argv[1] root = os.path.normpath(sys.argv[2]) dest = os.path.normpath(sys.argv[3]) - srcs = sys.argv[4:] - - for src in srcs: + srcs = sys.argv[4:] + + for src in srcs: src = os.path.normpath(src) if src.endswith('.java') or src.endswith('.kt'): - src_rel_path = os.path.relpath(src, root) - - if os.path.join(root, src_rel_path) == src: - # Inside root - dst = os.path.join(dest, src_rel_path) - - else: - # Outside root + src_rel_path = os.path.relpath(src, root) + + if os.path.join(root, src_rel_path) == src: + # Inside root + dst = os.path.join(dest, src_rel_path) + + else: + # Outside root print>>sys.stderr, 'External src file "{}" is outside of srcdir {}, ignore'.format( - os.path.relpath(src, build_root), - os.path.relpath(root, build_root), - ) - continue - - if os.path.exists(dst): - print>>sys.stderr, 'Duplicate external src file {}, choice is undefined'.format( - os.path.relpath(dst, root) - ) - - else: + os.path.relpath(src, build_root), + os.path.relpath(root, build_root), + ) + continue + + if os.path.exists(dst): + print>>sys.stderr, 'Duplicate external src file {}, choice is undefined'.format( + os.path.relpath(dst, root) + ) + + else: destdir = os.path.dirname(dst) if destdir and not os.path.exists(destdir): os.makedirs(destdir) os.rename(src, dst) - - elif src.endswith('.jsr'): - with contextlib.closing(tarfile.open(src, 'r')) as tf: - tf.extractall(dst) - - elif src.endswith('-sources.jar'): + + elif src.endswith('.jsr'): + with contextlib.closing(tarfile.open(src, 'r')) as tf: + tf.extractall(dst) + + elif src.endswith('-sources.jar'): with zipfile.ZipFile(src) as zf: - zf.extractall(dst) - - else: - print>>sys.stderr, 'Unrecognized file type', os.path.relpath(src, build_root) + zf.extractall(dst) + + else: + print>>sys.stderr, 'Unrecognized file type', os.path.relpath(src, build_root) diff --git a/build/scripts/compile_java.py b/build/scripts/compile_java.py index e95869e853..8f2ad79e4a 100644 --- a/build/scripts/compile_java.py +++ b/build/scripts/compile_java.py @@ -1,29 +1,29 @@ -import optparse -import contextlib -import os -import shutil -import subprocess as sp -import tarfile -import zipfile +import optparse +import contextlib +import os +import shutil +import subprocess as sp +import tarfile +import zipfile import sys - - + + def parse_args(args): - parser = optparse.OptionParser() - parser.add_option('--javac-bin') - parser.add_option('--jar-bin') + parser = optparse.OptionParser() + parser.add_option('--javac-bin') + parser.add_option('--jar-bin') parser.add_option('--vcs-mf') - parser.add_option('--package-prefix') - parser.add_option('--jar-output') - parser.add_option('--srcs-jar-output') + parser.add_option('--package-prefix') + parser.add_option('--jar-output') + parser.add_option('--srcs-jar-output') return parser.parse_args(args) - - -def mkdir_p(directory): - if not os.path.exists(directory): - os.makedirs(directory) - - + + +def mkdir_p(directory): + if not os.path.exists(directory): + os.makedirs(directory) + + def split_cmd_by_delim(cmd, delim='DELIM'): result = [[]] for arg in cmd: @@ -34,69 +34,69 @@ def split_cmd_by_delim(cmd, delim='DELIM'): return result -def main(): +def main(): cmd_parts = split_cmd_by_delim(sys.argv) assert len(cmd_parts) == 3 args, javac_opts, peers = cmd_parts opts, jsrcs = parse_args(args) - + jsrcs += list(filter(lambda x: x.endswith('.jsrc'), peers)) peers = list(filter(lambda x: not x.endswith('.jsrc'), peers)) - sources_dir = 'src' - mkdir_p(sources_dir) - for s in jsrcs: - if s.endswith('.jsrc'): - with contextlib.closing(tarfile.open(s, 'r')) as tf: - tf.extractall(sources_dir) - - srcs = [] - for r, _, files in os.walk(sources_dir): - for f in files: - srcs.append(os.path.join(r, f)) + sources_dir = 'src' + mkdir_p(sources_dir) + for s in jsrcs: + if s.endswith('.jsrc'): + with contextlib.closing(tarfile.open(s, 'r')) as tf: + tf.extractall(sources_dir) + + srcs = [] + for r, _, files in os.walk(sources_dir): + for f in files: + srcs.append(os.path.join(r, f)) srcs += jsrcs srcs = list(filter(lambda x: x.endswith('.java'), srcs)) - - classes_dir = 'cls' - mkdir_p(classes_dir) + + classes_dir = 'cls' + mkdir_p(classes_dir) classpath = os.pathsep.join(peers) - - if srcs: + + if srcs: temp_sources_file = 'temp.sources.list' with open(temp_sources_file, 'w') as ts: ts.write(' '.join(srcs)) sp.check_call([opts.javac_bin, '-nowarn', '-g', '-classpath', classpath, '-encoding', 'UTF-8', '-d', classes_dir] + javac_opts + ['@' + temp_sources_file]) - - for s in jsrcs: - if s.endswith('-sources.jar'): - with zipfile.ZipFile(s) as zf: - zf.extractall(sources_dir) - - elif s.endswith('.jar'): - with zipfile.ZipFile(s) as zf: - zf.extractall(classes_dir) - + + for s in jsrcs: + if s.endswith('-sources.jar'): + with zipfile.ZipFile(s) as zf: + zf.extractall(sources_dir) + + elif s.endswith('.jar'): + with zipfile.ZipFile(s) as zf: + zf.extractall(classes_dir) + if opts.vcs_mf: sp.check_call([opts.jar_bin, 'cfm', opts.jar_output, opts.vcs_mf, os.curdir], cwd=classes_dir) else: sp.check_call([opts.jar_bin, 'cfM', opts.jar_output, os.curdir], cwd=classes_dir) - - if opts.srcs_jar_output: - for s in jsrcs: - if s.endswith('.java'): - if opts.package_prefix: - d = os.path.join(sources_dir, *(opts.package_prefix.split('.') + [os.path.basename(s)])) - - else: - d = os.path.join(sources_dir, os.path.basename(s)) - - shutil.copyfile(s, d) - + + if opts.srcs_jar_output: + for s in jsrcs: + if s.endswith('.java'): + if opts.package_prefix: + d = os.path.join(sources_dir, *(opts.package_prefix.split('.') + [os.path.basename(s)])) + + else: + d = os.path.join(sources_dir, os.path.basename(s)) + + shutil.copyfile(s, d) + if opts.vcs_mf: sp.check_call([opts.jar_bin, 'cfm', opts.srcs_jar_output, opts.vcs_mf, os.curdir], cwd=sources_dir) else: sp.check_call([opts.jar_bin, 'cfM', opts.srcs_jar_output, os.curdir], cwd=sources_dir) - - -if __name__ == '__main__': - main() + + +if __name__ == '__main__': + main() diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py index 9baeb5ffac..8044b0b0bf 100644 --- a/build/scripts/copy_to_dir.py +++ b/build/scripts/copy_to_dir.py @@ -2,19 +2,19 @@ import errno import sys import os import shutil -import optparse -import tarfile +import optparse +import tarfile -def parse_args(): - parser = optparse.OptionParser() - parser.add_option('--build-root') - parser.add_option('--dest-dir') - parser.add_option('--dest-arch') - return parser.parse_args() - - -def ensure_dir_exists(path): +def parse_args(): + parser = optparse.OptionParser() + parser.add_option('--build-root') + parser.add_option('--dest-dir') + parser.add_option('--dest-arch') + return parser.parse_args() + + +def ensure_dir_exists(path): try: os.makedirs(path) except OSError as e: @@ -24,52 +24,52 @@ def ensure_dir_exists(path): raise -def hardlink_or_copy(src, dst): - if os.name == 'nt': - shutil.copy(src, dst) - else: - try: - os.link(src, dst) - except OSError as e: - if e.errno == errno.EEXIST: - return - elif e.errno == errno.EXDEV: +def hardlink_or_copy(src, dst): + if os.name == 'nt': + shutil.copy(src, dst) + else: + try: + os.link(src, dst) + except OSError as e: + if e.errno == errno.EEXIST: + return + elif e.errno == errno.EXDEV: sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst)) - shutil.copy(src, dst) - else: - raise - - + shutil.copy(src, dst) + else: + raise + + def main(): - opts, args = parse_args() - assert opts.build_root - assert opts.dest_dir - - dest_arch = None - if opts.dest_arch: - if opts.dest_arch.endswith('.tar'): + opts, args = parse_args() + assert opts.build_root + assert opts.dest_dir + + dest_arch = None + if opts.dest_arch: + if opts.dest_arch.endswith('.tar'): dest_arch = tarfile.open(opts.dest_arch, 'w', dereference=True) - elif opts.dest_arch.endswith('.tar.gz') or opts.dest_arch.endswith('.tgz'): + elif opts.dest_arch.endswith('.tar.gz') or opts.dest_arch.endswith('.tgz'): dest_arch = tarfile.open(opts.dest_arch, 'w:gz', dereference=True) - else: - # TODO: move check to graph generation stage - raise Exception('Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format(os.path.basename(opts.dest_arch))) - - for arg in args: + else: + # TODO: move check to graph generation stage + raise Exception('Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format(os.path.basename(opts.dest_arch))) + + for arg in args: dst = arg - if dst.startswith(opts.build_root): - dst = dst[len(opts.build_root) + 1:] + if dst.startswith(opts.build_root): + dst = dst[len(opts.build_root) + 1:] if dest_arch and not arg.endswith('.pkg.fake'): - dest_arch.add(arg, arcname=dst) - - dst = os.path.join(opts.dest_dir, dst) - ensure_dir_exists(os.path.dirname(dst)) - hardlink_or_copy(arg, dst) - - if dest_arch: - dest_arch.close() - + dest_arch.add(arg, arcname=dst) + dst = os.path.join(opts.dest_dir, dst) + ensure_dir_exists(os.path.dirname(dst)) + hardlink_or_copy(arg, dst) + + if dest_arch: + dest_arch.close() + + if __name__ == '__main__': sys.exit(main()) diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py index 45083ff4f7..578183e9b9 100644 --- a/build/scripts/create_jcoverage_report.py +++ b/build/scripts/create_jcoverage_report.py @@ -1,8 +1,8 @@ import argparse import tarfile -import zipfile +import zipfile import os -import sys +import sys import time import subprocess @@ -26,66 +26,66 @@ class Timer(object): def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths): timer = Timer() - reports_dir = 'jacoco_reports_dir' - mkdir_p(reports_dir) - with tarfile.open(source) as tf: - tf.extractall(reports_dir) + reports_dir = 'jacoco_reports_dir' + mkdir_p(reports_dir) + with tarfile.open(source) as tf: + tf.extractall(reports_dir) timer.step("Coverage data extracted") - reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)] - - with open(jars_list) as f: - jars = f.read().strip().split() + reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)] + + with open(jars_list) as f: + jars = f.read().strip().split() if jars and runners_paths: for r in runners_paths: try: jars.remove(r) except ValueError: pass - - src_dir = 'sources_dir' - cls_dir = 'classes_dir' - - mkdir_p(src_dir) - mkdir_p(cls_dir) - - for jar in jars: - if jar.endswith('devtools-jacoco-agent.jar'): - agent_disposition = jar - + + src_dir = 'sources_dir' + cls_dir = 'classes_dir' + + mkdir_p(src_dir) + mkdir_p(cls_dir) + + for jar in jars: + if jar.endswith('devtools-jacoco-agent.jar'): + agent_disposition = jar + # Skip java contrib - it's irrelevant coverage if jar.startswith('contrib/java'): continue - with zipfile.ZipFile(jar) as jf: - for entry in jf.infolist(): - if entry.filename.endswith('.java'): - dest = src_dir - - elif entry.filename.endswith('.class'): - dest = cls_dir - - else: - continue - + with zipfile.ZipFile(jar) as jf: + for entry in jf.infolist(): + if entry.filename.endswith('.java'): + dest = src_dir + + elif entry.filename.endswith('.class'): + dest = cls_dir + + else: + continue + entry.filename = entry.filename.encode('utf-8') - jf.extract(entry, dest) + jf.extract(entry, dest) timer.step("Jar files extracted") - - if not agent_disposition: - print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.' - + + if not agent_disposition: + print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.' + if tar_output: report_dir = 'java.report.temp' else: report_dir = output mkdir_p(report_dir) - if agent_disposition: + if agent_disposition: agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format] - agent_cmd += reports - subprocess.check_call(agent_cmd) + agent_cmd += reports + subprocess.check_call(agent_cmd) timer.step("Jacoco finished") - + if tar_output: with tarfile.open(output, 'w') as outf: outf.add(report_dir, arcname='.') @@ -103,7 +103,7 @@ if __name__ == '__main__': parser.add_argument('--java', action='store') parser.add_argument('--prefix-filter', action='store') parser.add_argument('--exclude-filter', action='store') - parser.add_argument('--jars-list', action='store') + parser.add_argument('--jars-list', action='store') parser.add_argument('--output-format', action='store', default="html") parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True) parser.add_argument('--agent-disposition', action='store') diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py index db4fea50bf..3860974ac4 100755 --- a/build/scripts/fetch_from.py +++ b/build/scripts/fetch_from.py @@ -321,15 +321,15 @@ def process(fetched_file, file_name, args, remove=True): hardlink_or_copy(fetched_file, args.copy_to) if not args.outputs: args.outputs = [args.copy_to] - + if args.rename_to: args.rename.append(fetched_file) if not args.outputs: args.outputs = [args.rename_to] - + if args.copy_to_dir: hardlink_or_copy(fetched_file, os.path.join(args.copy_to_dir, file_name)) - + if args.untar_to: ensure_dir(args.untar_to) # Extract only requested files diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py index a99542e174..f8533f1223 100755 --- a/build/scripts/fetch_from_sandbox.py +++ b/build/scripts/fetch_from_sandbox.py @@ -18,19 +18,19 @@ MDS_PREFIX = 'http://storage-int.mds.yandex.net/get-sandbox/' TEMPORARY_ERROR_CODES = (429, 500, 503, 504) -def parse_args(): +def parse_args(): parser = argparse.ArgumentParser() fetch_from.add_common_arguments(parser) parser.add_argument('--resource-id', type=int, required=True) parser.add_argument('--custom-fetcher') parser.add_argument('--resource-file') return parser.parse_args() - - -class ResourceInfoError(Exception): - pass - - + + +class ResourceInfoError(Exception): + pass + + class UnsupportedProtocolException(Exception): pass @@ -75,37 +75,37 @@ def download_by_skynet(resource_info, file_name): def _urlopen(url, data=None, headers=None): - n = 10 + n = 10 tout = 30 started = time.time() reqid = uuid.uuid4() - + request = urllib2.Request(url, data=data, headers=headers or {}) request.add_header('X-Request-Timeout', str(tout)) request.add_header('X-Request-Id', str(reqid)) request.add_header('User-Agent', 'fetch_from_sandbox.py') - for i in xrange(n): + for i in xrange(n): retry_after = i - try: + try: request.add_header('X-Request-Duration', str(int(time.time() - started))) return urllib2.urlopen(request, timeout=tout).read() - - except urllib2.HTTPError as e: + + except urllib2.HTTPError as e: logging.warning('failed to fetch URL %s with HTTP code %d: %s', url, e.code, e) retry_after = int(e.headers.get('Retry-After', str(retry_after))) - + if e.code not in TEMPORARY_ERROR_CODES: - raise - - except Exception as e: + raise + + except Exception as e: logging.warning('failed to fetch URL %s: %s', url, e) - - if i + 1 == n: - raise e - + + if i + 1 == n: + raise e + time.sleep(retry_after) - - + + def _query(url): return json.loads(_urlopen(url)) @@ -133,19 +133,19 @@ def fetch_via_script(script, resource_id): def fetch(resource_id, custom_fetcher): - try: + try: resource_info = get_resource_info(resource_id, touch=True, no_links=True) - except Exception as e: + except Exception as e: sys.stderr.write( "Failed to fetch resource {}: {}\n".format(resource_id, str(e)) ) raise - + if resource_info.get('state', 'DELETED') != 'READY': raise ResourceInfoError("Resource {} is not READY".format(resource_id)) - logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info)) - + logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info)) + resource_file_name = os.path.basename(resource_info["file_name"]) expected_md5 = resource_info.get('md5') @@ -183,9 +183,9 @@ def fetch(resource_id, custom_fetcher): if mds_link is not None: yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5) - if resource_info.get('attributes', {}).get('ttl') != 'inf': - sys.stderr.write('WARNING: resource {} ttl is not "inf".\n'.format(resource_id)) - + if resource_info.get('attributes', {}).get('ttl') != 'inf': + sys.stderr.write('WARNING: resource {} ttl is not "inf".\n'.format(resource_id)) + exc_info = None for i, action in enumerate(itertools.islice(iter_tries(), 0, 10)): try: @@ -210,10 +210,10 @@ def fetch(resource_id, custom_fetcher): raise exc_info[0], exc_info[1], exc_info[2] else: raise Exception("No available protocol and/or server to fetch resource") - + return fetched_file, resource_info['file_name'] - + def _get_resource_info_from_file(resource_file): if resource_file is None or not os.path.exists(resource_file): return None @@ -242,7 +242,7 @@ def _get_resource_info_from_file(resource_file): def main(args): custom_fetcher = os.environ.get('YA_CUSTOM_FETCHER') - + resource_info = _get_resource_info_from_file(args.resource_file) if resource_info: fetched_file = args.resource_file @@ -250,14 +250,14 @@ def main(args): else: # This code should be merged to ya and removed. fetched_file, file_name = fetch(args.resource_id, custom_fetcher) - + fetch_from.process(fetched_file, file_name, args, remove=not custom_fetcher and not resource_info) if __name__ == '__main__': args = parse_args() fetch_from.setup_logging(args, os.path.basename(__file__)) - + try: main(args) except Exception as e: diff --git a/build/scripts/fetch_resource.py b/build/scripts/fetch_resource.py index d5af311e5d..ba046ad88e 100644 --- a/build/scripts/fetch_resource.py +++ b/build/scripts/fetch_resource.py @@ -1,43 +1,43 @@ -import urllib2 +import urllib2 import argparse -import xmlrpclib - - -def parse_args(): +import xmlrpclib + + +def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-r', '--resource-id', type=int, required=True) parser.add_argument('-o', '--output', required=True) return parser.parse_args() - - -def fetch(url, retries=4, timeout=5): - for i in xrange(retries): - try: - return urllib2.urlopen(url, timeout=timeout).read() - - except Exception: - if i + 1 < retries: - continue - - else: - raise - - -def fetch_resource(id_): + + +def fetch(url, retries=4, timeout=5): + for i in xrange(retries): + try: + return urllib2.urlopen(url, timeout=timeout).read() + + except Exception: + if i + 1 < retries: + continue + + else: + raise + + +def fetch_resource(id_): urls = xmlrpclib.ServerProxy("https://sandbox.yandex-team.ru/sandbox/xmlrpc").get_resource_http_links(id_) - - for u in urls: - try: - return fetch(u) - + + for u in urls: + try: + return fetch(u) + except Exception: - continue - - raise Exception('Cannot fetch resource {}'.format(id_)) - - -if __name__ == '__main__': + continue + + raise Exception('Cannot fetch resource {}'.format(id_)) + + +if __name__ == '__main__': args = parse_args() - + with open(args.output, 'wb') as f: f.write(fetch_resource(int(args.resource_id))) diff --git a/build/scripts/generate_pom.py b/build/scripts/generate_pom.py index 200caebc0b..28d5c988eb 100644 --- a/build/scripts/generate_pom.py +++ b/build/scripts/generate_pom.py @@ -247,7 +247,7 @@ def build_pom_and_export_to_maven(**kwargs): _indent(project) - et.ElementTree(project).write(pom_path) + et.ElementTree(project).write(pom_path) sys.stderr.write("[MAVEN EXPORT] Generated {} file for target {}\n".format(os.path.basename(pom_path), target_path)) diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py index 23487f5c1e..0b14f0cc99 100644 --- a/build/scripts/link_dyn_lib.py +++ b/build/scripts/link_dyn_lib.py @@ -3,7 +3,7 @@ import os import subprocess import tempfile import collections -import optparse +import optparse import pipes from process_whole_archive_option import ProcessWholeArchiveOption @@ -158,26 +158,26 @@ def fix_cmd(arch, musl, c): return sum((do_fix(x) for x in c), []) -def parse_args(): - parser = optparse.OptionParser() - parser.disable_interspersed_args() - parser.add_option('--arch') - parser.add_option('--target') - parser.add_option('--soname') - parser.add_option('--fix-elf') +def parse_args(): + parser = optparse.OptionParser() + parser.disable_interspersed_args() + parser.add_option('--arch') + parser.add_option('--target') + parser.add_option('--soname') + parser.add_option('--fix-elf') parser.add_option('--linker-output') parser.add_option('--musl', action='store_true') parser.add_option('--whole-archive-peers', action='append') parser.add_option('--whole-archive-libs', action='append') - return parser.parse_args() + return parser.parse_args() -if __name__ == '__main__': - opts, args = parse_args() - - assert opts.arch - assert opts.target - +if __name__ == '__main__': + opts, args = parse_args() + + assert opts.arch + assert opts.target + cmd = fix_cmd(opts.arch, opts.musl, args) cmd = ProcessWholeArchiveOption(opts.arch, opts.whole_archive_peers, opts.whole_archive_libs).construct_cmd(cmd) @@ -187,27 +187,27 @@ if __name__ == '__main__': stdout = sys.stdout proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=stdout) - proc.communicate() - - if proc.returncode: + proc.communicate() + + if proc.returncode: print >>sys.stderr, 'linker has failed with retcode:', proc.returncode print >>sys.stderr, 'linker command:', shlex_join(cmd) - sys.exit(proc.returncode) - - if opts.fix_elf: - cmd = [opts.fix_elf, opts.target] - proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=sys.stdout) - proc.communicate() - - if proc.returncode: + sys.exit(proc.returncode) + + if opts.fix_elf: + cmd = [opts.fix_elf, opts.target] + proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=sys.stdout) + proc.communicate() + + if proc.returncode: print >>sys.stderr, 'fix_elf has failed with retcode:', proc.returncode print >>sys.stderr, 'fix_elf command:', shlex_join(cmd) - sys.exit(proc.returncode) - - if opts.soname and opts.soname != opts.target: + sys.exit(proc.returncode) + + if opts.soname and opts.soname != opts.target: if os.path.exists(opts.soname): os.unlink(opts.soname) - os.link(opts.target, opts.soname) + os.link(opts.target, opts.soname) # -----------------Test---------------- # diff --git a/build/scripts/merge_coverage_data.py b/build/scripts/merge_coverage_data.py index b7fa3c6a86..6e4aa9fde9 100644 --- a/build/scripts/merge_coverage_data.py +++ b/build/scripts/merge_coverage_data.py @@ -2,29 +2,29 @@ import sys import tarfile import copy import os -import uuid +import uuid def main(args): - output_file, args = args[0], args[1:] + output_file, args = args[0], args[1:] # heretic@: Splits files on which could be merged( files ) and which should not be merged( expendables ) # expendables will be in output_file in form {name}{ordinal number of archive in args[]}.{extension} - try: - split_i = args.index('-no-merge') - except ValueError: - split_i = len(args) - files, expendables = args[:split_i], args[split_i + 1:] + try: + split_i = args.index('-no-merge') + except ValueError: + split_i = len(args) + files, expendables = args[:split_i], args[split_i + 1:] with tarfile.open(output_file, 'w') as outf: - for x in files: + for x in files: with tarfile.open(x) as tf: for tarinfo in tf: new_tarinfo = copy.deepcopy(tarinfo) if new_tarinfo.name in expendables: - dirname, basename = os.path.split(new_tarinfo.name) - basename_parts = basename.split('.', 1) - new_basename = '.'.join([basename_parts[0] + str(uuid.uuid4())] + basename_parts[1:]) - new_tarinfo.name = os.path.join(dirname, new_basename) + dirname, basename = os.path.split(new_tarinfo.name) + basename_parts = basename.split('.', 1) + new_basename = '.'.join([basename_parts[0] + str(uuid.uuid4())] + basename_parts[1:]) + new_tarinfo.name = os.path.join(dirname, new_basename) outf.addfile(new_tarinfo, tf.extractfile(tarinfo)) diff --git a/build/scripts/pack_jcoverage_resources.py b/build/scripts/pack_jcoverage_resources.py index f6e181067a..22d2dddbe7 100644 --- a/build/scripts/pack_jcoverage_resources.py +++ b/build/scripts/pack_jcoverage_resources.py @@ -9,16 +9,16 @@ def main(args): report_file = args[1] res = subprocess.call(args[args.index('-end') + 1:]) - - if not os.path.exists(report_file): - print>>sys.stderr, 'Can\'t find jacoco exec file' - return res - + + if not os.path.exists(report_file): + print>>sys.stderr, 'Can\'t find jacoco exec file' + return res + with tarfile.open(output_file, 'w') as outf: outf.add(report_file, arcname=os.path.basename(report_file)) - + return res if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/build/scripts/resolve_java_srcs.py b/build/scripts/resolve_java_srcs.py index a2e6c20012..4919e16ed7 100644 --- a/build/scripts/resolve_java_srcs.py +++ b/build/scripts/resolve_java_srcs.py @@ -14,8 +14,8 @@ def list_all_files(directory, prefix='/', hidden_files=False): return result -def pattern_to_regexp(p): - return '^' + \ +def pattern_to_regexp(p): + return '^' + \ ('/' if not p.startswith('**') else '') + \ re.escape(p).replace( r'\*\*\/', '[_DIR_]' @@ -26,8 +26,8 @@ def pattern_to_regexp(p): ).replace( '[_FILE_]', '([^/]*)' ) + '$' - - + + def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, resolve_kotlin=False, resolve_groovy=False): result = {'java': [], 'not_java': [], 'kotlin': [], 'groovy': []} include_patterns_normal, include_patterns_hidden, exclude_patterns_normal, exclude_patterns_hidden = [], [], [], [] @@ -43,22 +43,22 @@ def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, else: re_patterns = [re.compile(i) for i in re_patterns] vis[:], hid[:] = re_patterns[:len(vis)], re_patterns[len(vis):] - + for inc_patterns, exc_patterns, with_hidden_files in ( (include_patterns_normal, exclude_patterns_normal, False), (include_patterns_hidden, exclude_patterns_hidden, True), ): for f in list_all_files(srcdir, hidden_files=with_hidden_files): excluded = False - + for exc_re in exc_patterns: if exc_re.match(f): excluded = True break - + if excluded: continue - + for inc_re in inc_patterns: if inc_re.match(f): s = os.path.normpath(f[1:]) @@ -73,7 +73,7 @@ def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, else: result['not_java'].append(s) break - + return sorted(result['java']), sorted(result['not_java']), sorted(result['kotlin']), sorted(result['groovy']) @@ -99,8 +99,8 @@ if __name__ == '__main__': parser.add_argument('--all-resources', action='store_true', default=False) parser.add_argument('--resolve-kotlin', action='store_true', default=False) parser.add_argument('--resolve-groovy', action='store_true', default=False) - parser.add_argument('--include-patterns', nargs='*', default=[]) - parser.add_argument('--exclude-patterns', nargs='*', default=[]) + parser.add_argument('--include-patterns', nargs='*', default=[]) + parser.add_argument('--exclude-patterns', nargs='*', default=[]) args = parser.parse_args() do_it(**vars(args)) diff --git a/build/scripts/run_javac.py b/build/scripts/run_javac.py index c35546e0fe..2a870af771 100644 --- a/build/scripts/run_javac.py +++ b/build/scripts/run_javac.py @@ -1,47 +1,47 @@ -import sys -import subprocess -import optparse -import re - - -def parse_args(): - parser = optparse.OptionParser() - parser.disable_interspersed_args() - parser.add_option('--sources-list') - parser.add_option('--verbose', default=False, action='store_true') - parser.add_option('--remove-notes', default=False, action='store_true') +import sys +import subprocess +import optparse +import re + + +def parse_args(): + parser = optparse.OptionParser() + parser.disable_interspersed_args() + parser.add_option('--sources-list') + parser.add_option('--verbose', default=False, action='store_true') + parser.add_option('--remove-notes', default=False, action='store_true') parser.add_option('--ignore-errors', default=False, action='store_true') parser.add_option('--kotlin', default=False, action='store_true') - return parser.parse_args() - - -COLORING = { - r'^(?P<path>.*):(?P<line>\d*): error: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format( - path=m.group('path'), - line=m.group('line'), - msg=m.group('msg'), - ), - r'^(?P<path>.*):(?P<line>\d*): warning: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-yellow]]warning[[rst]]: {msg}'.format( - path=m.group('path'), - line=m.group('line'), - msg=m.group('msg'), - ), - r'^warning: ': lambda m: '[[c:light-yellow]]warning[[rst]]: ', - r'^error: (?P<msg>.*)': lambda m: '[[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format(msg=m.group('msg')), - r'^Note: ': lambda m: '[[c:light-cyan]]Note[[rst]]: ', -} - - -def colorize(err): - for regex, sub in COLORING.iteritems(): - err = re.sub(regex, sub, err, flags=re.MULTILINE) - return err - - -def remove_notes(err): - return '\n'.join([line for line in err.split('\n') if not line.startswith('Note:')]) - - + return parser.parse_args() + + +COLORING = { + r'^(?P<path>.*):(?P<line>\d*): error: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format( + path=m.group('path'), + line=m.group('line'), + msg=m.group('msg'), + ), + r'^(?P<path>.*):(?P<line>\d*): warning: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-yellow]]warning[[rst]]: {msg}'.format( + path=m.group('path'), + line=m.group('line'), + msg=m.group('msg'), + ), + r'^warning: ': lambda m: '[[c:light-yellow]]warning[[rst]]: ', + r'^error: (?P<msg>.*)': lambda m: '[[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format(msg=m.group('msg')), + r'^Note: ': lambda m: '[[c:light-cyan]]Note[[rst]]: ', +} + + +def colorize(err): + for regex, sub in COLORING.iteritems(): + err = re.sub(regex, sub, err, flags=re.MULTILINE) + return err + + +def remove_notes(err): + return '\n'.join([line for line in err.split('\n') if not line.startswith('Note:')]) + + def find_javac(cmd): if not cmd: return None @@ -84,39 +84,39 @@ def fix_cmd(cmd): return cmd -def main(): - opts, cmd = parse_args() - - with open(opts.sources_list) as f: - input_files = f.read().strip().split() - +def main(): + opts, cmd = parse_args() + + with open(opts.sources_list) as f: + input_files = f.read().strip().split() + if opts.kotlin: input_files = [i for i in input_files if i.endswith('.kt')] - if not input_files: - if opts.verbose: - sys.stderr.write('No files to compile, javac is not launched.\n') - - else: + if not input_files: + if opts.verbose: + sys.stderr.write('No files to compile, javac is not launched.\n') + + else: p = subprocess.Popen(fix_cmd(cmd), stderr=subprocess.PIPE) - _, err = p.communicate() - rc = p.wait() - - if opts.remove_notes: - err = remove_notes(err) - - try: - err = colorize(err) - - except Exception: - pass - + _, err = p.communicate() + rc = p.wait() + + if opts.remove_notes: + err = remove_notes(err) + + try: + err = colorize(err) + + except Exception: + pass + if opts.ignore_errors and rc: sys.stderr.write('error: javac actually failed with exit code {}\n'.format(rc)) rc = 0 - sys.stderr.write(err) - sys.exit(rc) - - -if __name__ == '__main__': - main() + sys.stderr.write(err) + sys.exit(rc) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/run_sonar.py b/build/scripts/run_sonar.py index 761cc34b78..a1d06d2c41 100644 --- a/build/scripts/run_sonar.py +++ b/build/scripts/run_sonar.py @@ -1,38 +1,38 @@ -import os -import sys -import zipfile -import tarfile -import subprocess as sp -import optparse -import shutil +import os +import sys +import zipfile +import tarfile +import subprocess as sp +import optparse +import shutil import xml.etree.ElementTree as et - - -def parse_args(): - parser = optparse.OptionParser() - parser.add_option( - '--classes-jar-path', - dest='classes_jar_paths', - action='append', - default=[], - ) - parser.add_option('--sources-jar-path') - parser.add_option('--sonar-scanner-jar-path') - parser.add_option('--sonar-scanner-main-class') - parser.add_option('--java-coverage-merged-tar') - parser.add_option('--java-binary-path') - parser.add_option('--log-path') + + +def parse_args(): + parser = optparse.OptionParser() + parser.add_option( + '--classes-jar-path', + dest='classes_jar_paths', + action='append', + default=[], + ) + parser.add_option('--sources-jar-path') + parser.add_option('--sonar-scanner-jar-path') + parser.add_option('--sonar-scanner-main-class') + parser.add_option('--java-coverage-merged-tar') + parser.add_option('--java-binary-path') + parser.add_option('--log-path') parser.add_option('--gcov-report-path') parser.add_option('--source-root') parser.add_option('--java-args', action='append', default=[]) - return parser.parse_args() - - -def extract_zip_file(zip_file_path, dest_dir): - with zipfile.ZipFile(zip_file_path) as arch: - arch.extractall(dest_dir) - - + return parser.parse_args() + + +def extract_zip_file(zip_file_path, dest_dir): + with zipfile.ZipFile(zip_file_path) as arch: + arch.extractall(dest_dir) + + def get_source_real_path(source_root, path): parts = os.path.normpath(path).split(os.path.sep) for i in xrange(len(parts)): @@ -64,58 +64,58 @@ def collect_cpp_sources(report, source_root, destination): os.link(src, dst) -def main(opts, props_args): - sources_dir = os.path.abspath('src') +def main(opts, props_args): + sources_dir = os.path.abspath('src') base_props_args = ['-Dsonar.sources=' + sources_dir] - os.mkdir(sources_dir) + os.mkdir(sources_dir) if opts.sources_jar_path: extract_zip_file(opts.sources_jar_path, sources_dir) if opts.gcov_report_path: collect_cpp_sources(opts.gcov_report_path, opts.source_root, sources_dir) base_props_args += ['-Dsonar.projectBaseDir=' + sources_dir, '-Dsonar.cxx.coverage.reportPath=' + opts.gcov_report_path] - - if opts.classes_jar_paths: - classes_dir = os.path.abspath('cls') - os.mkdir(classes_dir) - - for classes_jar_path in opts.classes_jar_paths: - extract_zip_file(classes_jar_path, classes_dir) - - base_props_args.append('-Dsonar.java.binaries=' + classes_dir) - - if opts.java_coverage_merged_tar: - jacoco_report_path = os.path.abspath('jacoco.exec') - with open(jacoco_report_path, 'w') as dest: - with tarfile.open(opts.java_coverage_merged_tar) as tar: - for src in tar: + + if opts.classes_jar_paths: + classes_dir = os.path.abspath('cls') + os.mkdir(classes_dir) + + for classes_jar_path in opts.classes_jar_paths: + extract_zip_file(classes_jar_path, classes_dir) + + base_props_args.append('-Dsonar.java.binaries=' + classes_dir) + + if opts.java_coverage_merged_tar: + jacoco_report_path = os.path.abspath('jacoco.exec') + with open(jacoco_report_path, 'w') as dest: + with tarfile.open(opts.java_coverage_merged_tar) as tar: + for src in tar: extracted = tar.extractfile(src) if extracted is not None: shutil.copyfileobj(extracted, dest) - - base_props_args += [ - '-Dsonar.core.codeCoveragePlugin=jacoco', - '-Dsonar.jacoco.reportPath=' + jacoco_report_path - ] + + base_props_args += [ + '-Dsonar.core.codeCoveragePlugin=jacoco', + '-Dsonar.jacoco.reportPath=' + jacoco_report_path + ] java_args = ['-{}'.format(i) for i in opts.java_args] + ['-Djava.net.preferIPv6Addresses=true', '-Djava.net.preferIPv4Addresses=false'] - - sonar_cmd = [ - opts.java_binary_path, + + sonar_cmd = [ + opts.java_binary_path, ] + java_args + [ - '-classpath', - opts.sonar_scanner_jar_path, + '-classpath', + opts.sonar_scanner_jar_path, ] + base_props_args + props_args + [opts.sonar_scanner_main_class, '-X'] - - p = sp.Popen(sonar_cmd, stdout=sp.PIPE, stderr=sp.STDOUT) - out, _ = p.communicate() - - sys.stderr.write(out) + + p = sp.Popen(sonar_cmd, stdout=sp.PIPE, stderr=sp.STDOUT) + out, _ = p.communicate() + + sys.stderr.write(out) with open(opts.log_path, 'a') as f: - f.write(out) - - sys.exit(p.returncode) - - -if __name__ == '__main__': - opts, args = parse_args() - props_args = ['-D' + arg for arg in args] - main(opts, props_args) + f.write(out) + + sys.exit(p.returncode) + + +if __name__ == '__main__': + opts, args = parse_args() + props_args = ['-D' + arg for arg in args] + main(opts, props_args) diff --git a/build/scripts/stderr2stdout.py b/build/scripts/stderr2stdout.py index 0e510da373..f47803a450 100644 --- a/build/scripts/stderr2stdout.py +++ b/build/scripts/stderr2stdout.py @@ -1,6 +1,6 @@ -import subprocess -import sys - -if __name__ == '__main__': - assert len(sys.argv) > 1 - sys.exit(subprocess.Popen(sys.argv[1:], stderr=sys.stdout).wait()) +import subprocess +import sys + +if __name__ == '__main__': + assert len(sys.argv) > 1 + sys.exit(subprocess.Popen(sys.argv[1:], stderr=sys.stdout).wait()) diff --git a/build/scripts/tared_protoc.py b/build/scripts/tared_protoc.py index 7643e1dbfe..3207b54b1d 100644 --- a/build/scripts/tared_protoc.py +++ b/build/scripts/tared_protoc.py @@ -1,31 +1,31 @@ -import os -import optparse -import tarfile -import contextlib -import subprocess as sp - - -def parse_args(): - parser = optparse.OptionParser() - parser.disable_interspersed_args() - parser.add_option('--tar-output') - parser.add_option('--protoc-out-dir') - return parser.parse_args() - - -def main(): - opts, args = parse_args() - assert opts.tar_output - assert opts.protoc_out_dir - - if not os.path.exists(opts.protoc_out_dir): - os.makedirs(opts.protoc_out_dir) - - sp.check_call(args) - - with contextlib.closing(tarfile.open(opts.tar_output, 'w')) as tf: - tf.add(opts.protoc_out_dir, arcname='') - - -if __name__ == '__main__': - main() +import os +import optparse +import tarfile +import contextlib +import subprocess as sp + + +def parse_args(): + parser = optparse.OptionParser() + parser.disable_interspersed_args() + parser.add_option('--tar-output') + parser.add_option('--protoc-out-dir') + return parser.parse_args() + + +def main(): + opts, args = parse_args() + assert opts.tar_output + assert opts.protoc_out_dir + + if not os.path.exists(opts.protoc_out_dir): + os.makedirs(opts.protoc_out_dir) + + sp.check_call(args) + + with contextlib.closing(tarfile.open(opts.tar_output, 'w')) as tf: + tf.add(opts.protoc_out_dir, arcname='') + + +if __name__ == '__main__': + main() diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py index 9730dcd711..8b23e3ed26 100644 --- a/build/scripts/unpacking_jtest_runner.py +++ b/build/scripts/unpacking_jtest_runner.py @@ -3,25 +3,25 @@ import json import optparse import os import sys -import subprocess +import subprocess import time import zipfile import platform - -# This script changes test run classpath by unpacking tests.jar -> tests-dir. The goal -# is to launch tests with the same classpath as maven does. - - -def parse_args(): - parser = optparse.OptionParser() - parser.disable_interspersed_args() + +# This script changes test run classpath by unpacking tests.jar -> tests-dir. The goal +# is to launch tests with the same classpath as maven does. + + +def parse_args(): + parser = optparse.OptionParser() + parser.disable_interspersed_args() parser.add_option('--trace-file') - parser.add_option('--jar-binary') - parser.add_option('--tests-jar-path') + parser.add_option('--jar-binary') + parser.add_option('--tests-jar-path') parser.add_option('--classpath-option-type', choices=('manifest', 'command_file', 'list'), default='manifest') - return parser.parse_args() - - + return parser.parse_args() + + # temporary, for jdk8/jdk9+ compatibility def fix_cmd(cmd): if not cmd: @@ -88,18 +88,18 @@ def make_command_file_from_cp(class_path, out): cp_file.write(os.pathsep.join(class_path)) -def main(): +def main(): s = time.time() - opts, args = parse_args() - - # unpack tests jar - try: + opts, args = parse_args() + + # unpack tests jar + try: build_root = args[args.index('--build-root') + 1] dest = os.path.join(build_root, 'test-classes') - except Exception: + except Exception: build_root = '' - dest = os.path.abspath('test-classes') - + dest = os.path.abspath('test-classes') + extract_jars(dest, opts.tests_jar_path) metrics = { @@ -107,7 +107,7 @@ def main(): } s = time.time() - # fix java classpath + # fix java classpath cp_idx = args.index('-classpath') if args[cp_idx + 1].startswith('@'): real_name = args[cp_idx + 1][1:] @@ -137,12 +137,12 @@ def main(): if opts.trace_file: dump_chunk_event({'metrics': metrics}, opts.trace_file) - # run java cmd + # run java cmd if platform.system() == 'Windows': sys.exit(subprocess.Popen(args).wait()) else: os.execv(args[0], args) - - -if __name__ == '__main__': - main() + + +if __name__ == '__main__': + main() diff --git a/build/scripts/writer.py b/build/scripts/writer.py index 21bb3006e5..7eceef9ccd 100644 --- a/build/scripts/writer.py +++ b/build/scripts/writer.py @@ -1,10 +1,10 @@ import sys import argparse - + import process_command_files as pcf + - -def parse_args(): +def parse_args(): args = pcf.get_args(sys.argv[1:]) parser = argparse.ArgumentParser() parser.add_argument('-f', '--file', dest='file_path') @@ -15,7 +15,7 @@ def parse_args(): parser.add_argument('-m', '--content-multiple', nargs='*', dest='content') parser.add_argument('-P', '--path-list', action='store_true', default=False) return parser.parse_args(args) - + def smart_shell_quote(v): if v is None: @@ -23,11 +23,11 @@ def smart_shell_quote(v): if ' ' in v or '"' in v or "'" in v: return "\"{0}\"".format(v.replace('"', '\\"')) return v - -if __name__ == '__main__': + +if __name__ == '__main__': args = parse_args() open_type = 'a' if args.append else 'w' - + content = args.content if args.quote: content = [smart_shell_quote(ln) for ln in content] if content is not None else None |