diff options
author | snermolaev <snermolaev@yandex-team.ru> | 2022-02-10 16:45:53 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:53 +0300 |
commit | 7353a3fdea9c67c256980c00a2b3b67f09b23a27 (patch) | |
tree | 1a2c5ffcf89eb53ecd79dbc9bc0a195c27404d0c /build/scripts | |
parent | 2015790ac9fcc04caab83fccc23ab2460310a797 (diff) | |
download | ydb-7353a3fdea9c67c256980c00a2b3b67f09b23a27.tar.gz |
Restoring authorship annotation for <snermolaev@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'build/scripts')
28 files changed, 1906 insertions, 1906 deletions
diff --git a/build/scripts/build_java_with_error_prone.py b/build/scripts/build_java_with_error_prone.py index 108ed7bf2a..910443552e 100644 --- a/build/scripts/build_java_with_error_prone.py +++ b/build/scripts/build_java_with_error_prone.py @@ -22,7 +22,7 @@ def just_do_it(argv): java, error_prone_tool, javac_cmd = argv[0], argv[1], argv[2:] if java.endswith('javac') or java.endswith('javac.exe'): for f in javac_cmd: - if f.startswith('-Xep'): + if f.startswith('-Xep'): ERROR_PRONE_FLAGS.append(f) for f in ERROR_PRONE_FLAGS: if f in javac_cmd: diff --git a/build/scripts/build_java_with_error_prone2.py b/build/scripts/build_java_with_error_prone2.py index 9bdcfcd09c..4efc1e444a 100644 --- a/build/scripts/build_java_with_error_prone2.py +++ b/build/scripts/build_java_with_error_prone2.py @@ -18,7 +18,7 @@ JAVA10_EXPORTS = [ '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', '--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED', '--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED', - '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED', + '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED', ] @@ -26,11 +26,11 @@ def get_java_version(exe): p = subprocess.Popen([exe, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() for line in ((out or '').strip() + (err or '').strip()).split("\n"): - m = re.match(r'java version "(.+)"', line) + m = re.match(r'java version "(.+)"', line) if m: parts = m.groups()[0].split(".") return parts[1] if parts[0] == "1" else parts[0] - m = re.match(r'openjdk version "(\d+).*"', line) + m = re.match(r'openjdk version "(\d+).*"', line) if m: parts = m.groups()[0].split(".") return parts[0] @@ -65,7 +65,7 @@ def just_do_it(argv): raise Exception("Can't determine java version") if int(ver) >= 10: for f in javac_cmd: - if f.startswith('-Xep'): + if f.startswith('-Xep'): ERROR_PRONE_FLAGS.append(f) for f in ERROR_PRONE_FLAGS: if f in javac_cmd: diff --git a/build/scripts/cgo1_wrapper.py b/build/scripts/cgo1_wrapper.py index ab9eb1d7fe..986082f7e9 100644 --- a/build/scripts/cgo1_wrapper.py +++ b/build/scripts/cgo1_wrapper.py @@ -1,45 +1,45 @@ -import argparse -import shutil -import subprocess -import sys - - -CGO1_SUFFIX='.cgo1.go' - - -def call(cmd, cwd, env=None): - # sys.stderr.write('{}\n'.format(' '.join(cmd))) - return subprocess.call(cmd, stdin=None, stderr=sys.stderr, stdout=sys.stdout, cwd=cwd, env=env) - - -def process_file(source_root, source_prefix, build_root, build_prefix, src_path, comment_prefix): - dst_path = '{}.tmp'.format(src_path) - with open(src_path, 'r') as src_file, open(dst_path, 'w') as dst_file: - for line in src_file: - if line.startswith(comment_prefix): - dst_file.write(line.replace(source_root, source_prefix).replace(build_root, build_prefix)) - else: - dst_file.write(line) - shutil.move(dst_path, src_path) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--build-prefix', default='__ARCADIA_BUILD_ROOT_PREFIX__') - parser.add_argument('--build-root', required=True) - parser.add_argument('--cgo1-files', nargs='+', required=True) - parser.add_argument('--cgo2-files', nargs='+', required=True) - parser.add_argument('--source-prefix', default='__ARCADIA_SOURCE_ROOT_PREFIX__') - parser.add_argument('--source-root', required=True) - parser.add_argument('cgo1_cmd', nargs='*') - args = parser.parse_args() - - exit_code = call(args.cgo1_cmd, args.source_root) - if exit_code != 0: - sys.exit(exit_code) - - for src_path in args.cgo1_files: - process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '//') - - for src_path in args.cgo2_files: - process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '#line') +import argparse +import shutil +import subprocess +import sys + + +CGO1_SUFFIX='.cgo1.go' + + +def call(cmd, cwd, env=None): + # sys.stderr.write('{}\n'.format(' '.join(cmd))) + return subprocess.call(cmd, stdin=None, stderr=sys.stderr, stdout=sys.stdout, cwd=cwd, env=env) + + +def process_file(source_root, source_prefix, build_root, build_prefix, src_path, comment_prefix): + dst_path = '{}.tmp'.format(src_path) + with open(src_path, 'r') as src_file, open(dst_path, 'w') as dst_file: + for line in src_file: + if line.startswith(comment_prefix): + dst_file.write(line.replace(source_root, source_prefix).replace(build_root, build_prefix)) + else: + dst_file.write(line) + shutil.move(dst_path, src_path) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--build-prefix', default='__ARCADIA_BUILD_ROOT_PREFIX__') + parser.add_argument('--build-root', required=True) + parser.add_argument('--cgo1-files', nargs='+', required=True) + parser.add_argument('--cgo2-files', nargs='+', required=True) + parser.add_argument('--source-prefix', default='__ARCADIA_SOURCE_ROOT_PREFIX__') + parser.add_argument('--source-root', required=True) + parser.add_argument('cgo1_cmd', nargs='*') + args = parser.parse_args() + + exit_code = call(args.cgo1_cmd, args.source_root) + if exit_code != 0: + sys.exit(exit_code) + + for src_path in args.cgo1_files: + process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '//') + + for src_path in args.cgo2_files: + process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '#line') diff --git a/build/scripts/collect_java_srcs.py b/build/scripts/collect_java_srcs.py index d87e18569e..170002520a 100644 --- a/build/scripts/collect_java_srcs.py +++ b/build/scripts/collect_java_srcs.py @@ -34,10 +34,10 @@ if __name__ == '__main__': ) else: - destdir = os.path.dirname(dst) - if destdir and not os.path.exists(destdir): - os.makedirs(destdir) - os.rename(src, dst) + destdir = os.path.dirname(dst) + if destdir and not os.path.exists(destdir): + os.makedirs(destdir) + os.rename(src, dst) elif src.endswith('.jsr'): with contextlib.closing(tarfile.open(src, 'r')) as tf: diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py index 61bc5eec99..c0bec50b2a 100644 --- a/build/scripts/compile_cuda.py +++ b/build/scripts/compile_cuda.py @@ -53,7 +53,7 @@ def main(): '/Zc:inline', # disable unreferenced functions (kernel registrators) remove '-Wno-c++17-extensions', '-flto', - '-faligned-allocation', + '-faligned-allocation', ] if skip_nocxxinc: diff --git a/build/scripts/compile_java.py b/build/scripts/compile_java.py index a8c46a5565..e95869e853 100644 --- a/build/scripts/compile_java.py +++ b/build/scripts/compile_java.py @@ -40,9 +40,9 @@ def main(): args, javac_opts, peers = cmd_parts opts, jsrcs = parse_args(args) - jsrcs += list(filter(lambda x: x.endswith('.jsrc'), peers)) - peers = list(filter(lambda x: not x.endswith('.jsrc'), peers)) - + jsrcs += list(filter(lambda x: x.endswith('.jsrc'), peers)) + peers = list(filter(lambda x: not x.endswith('.jsrc'), peers)) + sources_dir = 'src' mkdir_p(sources_dir) for s in jsrcs: diff --git a/build/scripts/compile_jsrc.py b/build/scripts/compile_jsrc.py index b0923cfbaf..8760e5eee9 100644 --- a/build/scripts/compile_jsrc.py +++ b/build/scripts/compile_jsrc.py @@ -1,24 +1,24 @@ -import argparse -import os -import tarfile - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--input', nargs='*', required=True) - parser.add_argument('--output', required=True) - parser.add_argument('--prefix', required=True) - - return parser.parse_args() - - -def main(): - args = parse_args() - - with tarfile.open(args.output, 'w') as out: - for f in args.input: - out.add(f, arcname=os.path.relpath(f, args.prefix)) - - -if __name__ == '__main__': - main() +import argparse +import os +import tarfile + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', nargs='*', required=True) + parser.add_argument('--output', required=True) + parser.add_argument('--prefix', required=True) + + return parser.parse_args() + + +def main(): + args = parse_args() + + with tarfile.open(args.output, 'w') as out: + for f in args.input: + out.add(f, arcname=os.path.relpath(f, args.prefix)) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/compile_pysrc.py b/build/scripts/compile_pysrc.py index 91107b530e..e3637e18e2 100644 --- a/build/scripts/compile_pysrc.py +++ b/build/scripts/compile_pysrc.py @@ -1,101 +1,101 @@ -import argparse -import os -import shutil -import subprocess -import tarfile - - -LIMIT = 6000 - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--input', required=True) - parser.add_argument('--output', required=True) - parser.add_argument('--rescompiler', required=True) - subparsers = parser.add_subparsers(dest='mode') - - parser_py2 = subparsers.add_parser('py2') - parser_py2.add_argument('--py_compile', required=True) - parser_py2.add_argument('--python', required=True) - - parser_py3 = subparsers.add_parser('py3') - parser_py3.add_argument('--pycc', required=True) - - return parser.parse_args() - - -def call(cmd, cwd=None, env=None): - return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env) - - -def iterate_py2_resource_params(py_files): - for py in py_files: - mod = py[:-3].replace('/', '.') - key = '/py_modules/{}'.format(mod) - yield py, key - yield '-', 'resfs/src/{}={}'.format(key, py) - yield '{}.yapyc'.format(py), '/py_code/{}'.format(mod) - - -def iterate_py3_resource_params(py_files): - for py in py_files: - for ext in ('', '.yapyc3'): - path = '{}{}'.format(py, ext) - dest = 'py/{}'.format(path) - key = 'resfs/file/{}'.format(dest) - src = 'resfs/src/{}={}'.format(key, os.path.basename(path)) - yield '-', src - yield path, key - - -def main(): - args = parse_args() - - names = [] - with tarfile.open(args.input, 'r') as tar: - names = tar.getnames() - tar.extractall() - - if args.mode == 'py3': - pycc_cmd = [args.pycc] - pycc_ext = '.yapyc3' - iterate_resource_params = iterate_py3_resource_params - else: - pycc_cmd = [args.python, args.py_compile] - pycc_ext = '.yapyc' - iterate_resource_params = iterate_py2_resource_params - - py_files = sorted(names) - - for py in py_files: - cmd = pycc_cmd + ['{}-'.format(os.path.basename(py)), py, '{}{}'.format(py, pycc_ext)] - call(cmd) - - outputs = [] - cmd = [args.rescompiler, '{}.0'.format(args.output)] - size = 0 - for path, key in iterate_resource_params(py_files): - addendum = len(path) + len(key) - if size + addendum > LIMIT and len(cmd) > 2: - call(cmd) - outputs.append(cmd[1]) - cmd[1] = '{}.{}'.format(args.output, len(outputs)) - cmd = cmd[0:2] - size = 0 - cmd.extend([path, key]) - size += addendum - if len(outputs) == 0: - cmd[1] = args.output - call(cmd) - else: - call(cmd) - outputs.append(cmd[1]) - with open(args.output, 'w') as fout: - for fname in outputs: - with open(fname, 'r') as fin: - shutil.copyfileobj(fin, fout) - - -if __name__ == '__main__': - main() +import argparse +import os +import shutil +import subprocess +import tarfile + + +LIMIT = 6000 + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', required=True) + parser.add_argument('--output', required=True) + parser.add_argument('--rescompiler', required=True) + subparsers = parser.add_subparsers(dest='mode') + + parser_py2 = subparsers.add_parser('py2') + parser_py2.add_argument('--py_compile', required=True) + parser_py2.add_argument('--python', required=True) + + parser_py3 = subparsers.add_parser('py3') + parser_py3.add_argument('--pycc', required=True) + + return parser.parse_args() + + +def call(cmd, cwd=None, env=None): + return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env) + + +def iterate_py2_resource_params(py_files): + for py in py_files: + mod = py[:-3].replace('/', '.') + key = '/py_modules/{}'.format(mod) + yield py, key + yield '-', 'resfs/src/{}={}'.format(key, py) + yield '{}.yapyc'.format(py), '/py_code/{}'.format(mod) + + +def iterate_py3_resource_params(py_files): + for py in py_files: + for ext in ('', '.yapyc3'): + path = '{}{}'.format(py, ext) + dest = 'py/{}'.format(path) + key = 'resfs/file/{}'.format(dest) + src = 'resfs/src/{}={}'.format(key, os.path.basename(path)) + yield '-', src + yield path, key + + +def main(): + args = parse_args() + + names = [] + with tarfile.open(args.input, 'r') as tar: + names = tar.getnames() + tar.extractall() + + if args.mode == 'py3': + pycc_cmd = [args.pycc] + pycc_ext = '.yapyc3' + iterate_resource_params = iterate_py3_resource_params + else: + pycc_cmd = [args.python, args.py_compile] + pycc_ext = '.yapyc' + iterate_resource_params = iterate_py2_resource_params + + py_files = sorted(names) + + for py in py_files: + cmd = pycc_cmd + ['{}-'.format(os.path.basename(py)), py, '{}{}'.format(py, pycc_ext)] + call(cmd) + + outputs = [] + cmd = [args.rescompiler, '{}.0'.format(args.output)] + size = 0 + for path, key in iterate_resource_params(py_files): + addendum = len(path) + len(key) + if size + addendum > LIMIT and len(cmd) > 2: + call(cmd) + outputs.append(cmd[1]) + cmd[1] = '{}.{}'.format(args.output, len(outputs)) + cmd = cmd[0:2] + size = 0 + cmd.extend([path, key]) + size += addendum + if len(outputs) == 0: + cmd[1] = args.output + call(cmd) + else: + call(cmd) + outputs.append(cmd[1]) + with open(args.output, 'w') as fout: + for fname in outputs: + with open(fname, 'r') as fin: + shutil.copyfileobj(fin, fout) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/copy_files_to_dir.py b/build/scripts/copy_files_to_dir.py index 111c62167c..ead57ba16e 100644 --- a/build/scripts/copy_files_to_dir.py +++ b/build/scripts/copy_files_to_dir.py @@ -1,59 +1,59 @@ -import argparse -import errno -import os -import process_command_files as pcf -import shutil -import sys - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--dest-dir', required=True) - parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite') - parser.add_argument('--flat', action='store_true') - parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[]) - parser.add_argument('files', nargs='*') - return parser.parse_args(pcf.get_args(sys.argv[1:])) - - -def makedirs(dirname): - try: - os.makedirs(dirname) - except OSError as e: - if e.errno == errno.EEXIST and os.path.isdir(dirname): - pass - else: - raise - - -def main(): - args = parse_args() - - dest_dir = os.path.normpath(args.dest_dir) + os.pathsep - makedirs(dest_dir) - - prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes] - - for src in args.files: - src = os.path.normpath(src) - assert os.path.isfile(src) - if args.flat: - rel_dst = os.path.basename(src) - else: - rel_dst = src - for prefix in prefixes: - if src.startswith(prefix): - rel_dst = src[len(prefix):] - break - assert not os.path.isabs(rel_dst) - dst = os.path.join(args.dest_dir, rel_dst) - if os.path.isfile(dst) and args.existing == 'skip': - break - - makedirs(os.path.dirname(dst)) - - shutil.copyfile(src, dst) - - -if __name__ == '__main__': - main() +import argparse +import errno +import os +import process_command_files as pcf +import shutil +import sys + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--dest-dir', required=True) + parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite') + parser.add_argument('--flat', action='store_true') + parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[]) + parser.add_argument('files', nargs='*') + return parser.parse_args(pcf.get_args(sys.argv[1:])) + + +def makedirs(dirname): + try: + os.makedirs(dirname) + except OSError as e: + if e.errno == errno.EEXIST and os.path.isdir(dirname): + pass + else: + raise + + +def main(): + args = parse_args() + + dest_dir = os.path.normpath(args.dest_dir) + os.pathsep + makedirs(dest_dir) + + prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes] + + for src in args.files: + src = os.path.normpath(src) + assert os.path.isfile(src) + if args.flat: + rel_dst = os.path.basename(src) + else: + rel_dst = src + for prefix in prefixes: + if src.startswith(prefix): + rel_dst = src[len(prefix):] + break + assert not os.path.isabs(rel_dst) + dst = os.path.join(args.dest_dir, rel_dst) + if os.path.isfile(dst) and args.existing == 'skip': + break + + makedirs(os.path.dirname(dst)) + + shutil.copyfile(src, dst) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/cpp_flatc_wrapper.py b/build/scripts/cpp_flatc_wrapper.py index bae30e7d08..78a20e0280 100644 --- a/build/scripts/cpp_flatc_wrapper.py +++ b/build/scripts/cpp_flatc_wrapper.py @@ -1,31 +1,31 @@ -import os -import subprocess -import sys - - -def main(): - cmd = sys.argv[1:] - h_file = None - try: - index = cmd.index('-o') - h_file = cmd[index+1] - cmd[index+1] = os.path.dirname(h_file) - except (ValueError, IndexError): - pass - p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if p.returncode: - if out: - sys.stderr.write('stdout:\n{}\n'.format(out)) - if err: - sys.stderr.write('stderr:\n{}\n'.format(err)) - sys.exit(p.returncode) - if h_file and h_file.endswith(('.fbs.h', '.fbs64.h')): - cpp_file = '{}.cpp'.format(h_file[:-2]) - with open(cpp_file, 'w') as f: - f.write('#include "{}"\n'.format(os.path.basename(h_file))) - sys.exit(0) - - -if __name__ == '__main__': - main() +import os +import subprocess +import sys + + +def main(): + cmd = sys.argv[1:] + h_file = None + try: + index = cmd.index('-o') + h_file = cmd[index+1] + cmd[index+1] = os.path.dirname(h_file) + except (ValueError, IndexError): + pass + p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + if p.returncode: + if out: + sys.stderr.write('stdout:\n{}\n'.format(out)) + if err: + sys.stderr.write('stderr:\n{}\n'.format(err)) + sys.exit(p.returncode) + if h_file and h_file.endswith(('.fbs.h', '.fbs64.h')): + cpp_file = '{}.cpp'.format(h_file[:-2]) + with open(cpp_file, 'w') as f: + f.write('#include "{}"\n'.format(os.path.basename(h_file))) + sys.exit(0) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/extract_asrc.py b/build/scripts/extract_asrc.py index 1649899a14..89892ddf2d 100644 --- a/build/scripts/extract_asrc.py +++ b/build/scripts/extract_asrc.py @@ -1,23 +1,23 @@ -import argparse -import os -import tarfile - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--input', nargs='*', required=True) - parser.add_argument('--output', required=True) - - return parser.parse_args() - - -def main(): - args = parse_args() - - for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.input): - with tarfile.open(asrc, 'r') as tar: - tar.extractall(path=args.output) - - -if __name__ == '__main__': - main() +import argparse +import os +import tarfile + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', nargs='*', required=True) + parser.add_argument('--output', required=True) + + return parser.parse_args() + + +def main(): + args = parse_args() + + for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.input): + with tarfile.open(asrc, 'r') as tar: + tar.extractall(path=args.output) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/extract_docs.py b/build/scripts/extract_docs.py index f42ce92b1f..cdcf93105f 100644 --- a/build/scripts/extract_docs.py +++ b/build/scripts/extract_docs.py @@ -1,36 +1,36 @@ -import argparse -import os -import process_command_files as pcf -import tarfile -import sys - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--dest-dir', required=True) - parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[]) - parser.add_argument('docs', nargs='*') - return parser.parse_args(pcf.get_args(sys.argv[1:])) - - -def main(): - args = parse_args() - - prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes] - - for src in filter(lambda(p): os.path.basename(p) == 'preprocessed.tar.gz', args.docs): - rel_dst = os.path.dirname(os.path.normpath(src)) - for prefix in prefixes: - if src.startswith(prefix): - rel_dst = rel_dst[len(prefix):] - break - assert not os.path.isabs(rel_dst) - dest_dir = os.path.join(args.dest_dir, rel_dst) - if not os.path.exists(dest_dir): - os.makedirs(dest_dir) - with tarfile.open(src, 'r') as tar_file: - tar_file.extractall(dest_dir) - - -if __name__ == '__main__': - main() +import argparse +import os +import process_command_files as pcf +import tarfile +import sys + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--dest-dir', required=True) + parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[]) + parser.add_argument('docs', nargs='*') + return parser.parse_args(pcf.get_args(sys.argv[1:])) + + +def main(): + args = parse_args() + + prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes] + + for src in filter(lambda(p): os.path.basename(p) == 'preprocessed.tar.gz', args.docs): + rel_dst = os.path.dirname(os.path.normpath(src)) + for prefix in prefixes: + if src.startswith(prefix): + rel_dst = rel_dst[len(prefix):] + break + assert not os.path.isabs(rel_dst) + dest_dir = os.path.join(args.dest_dir, rel_dst) + if not os.path.exists(dest_dir): + os.makedirs(dest_dir) + with tarfile.open(src, 'r') as tar_file: + tar_file.extractall(dest_dir) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/f2c.py b/build/scripts/f2c.py index 4e1b8b3008..7021e1391f 100644 --- a/build/scripts/f2c.py +++ b/build/scripts/f2c.py @@ -50,7 +50,7 @@ if __name__ == '__main__': sys.exit(ret) if 'Error' in stderr: - print >>sys.stderr, stderr + print >>sys.stderr, stderr with open(args.output, 'w') as f: f.write(header) diff --git a/build/scripts/fail_module_cmd.py b/build/scripts/fail_module_cmd.py index 998f33ce87..fa14c0d851 100644 --- a/build/scripts/fail_module_cmd.py +++ b/build/scripts/fail_module_cmd.py @@ -1,7 +1,7 @@ -import sys - - -if __name__ == '__main__': - assert len(sys.argv) == 2, 'Unexpected number of arguments...' - sys.stderr.write('Error: module command for target [[bad]]{}[[rst]] was not executed due to build graph configuration errors...\n'.format(sys.argv[1])) - sys.exit(1) +import sys + + +if __name__ == '__main__': + assert len(sys.argv) == 2, 'Unexpected number of arguments...' + sys.stderr.write('Error: module command for target [[bad]]{}[[rst]] was not executed due to build graph configuration errors...\n'.format(sys.argv[1])) + sys.exit(1) diff --git a/build/scripts/filter_zip.py b/build/scripts/filter_zip.py index edb6628d6f..b2121b9c9e 100644 --- a/build/scripts/filter_zip.py +++ b/build/scripts/filter_zip.py @@ -6,15 +6,15 @@ import zipfile def pattern_to_regexp(p): - return re.compile( - '^' - + re.escape(p) - .replace(r'\*\*\/', '[_DIR_]') - .replace(r'\*', '[_FILE_]') - .replace('[_DIR_]', '(.*/)?') - .replace('[_FILE_]', '([^/]*)') - + '$' - ) + return re.compile( + '^' + + re.escape(p) + .replace(r'\*\*\/', '[_DIR_]') + .replace(r'\*', '[_FILE_]') + .replace('[_DIR_]', '(.*/)?') + .replace('[_FILE_]', '([^/]*)') + + '$' + ) def is_deathman(positive_filter, negative_filter, candidate): diff --git a/build/scripts/gen_aar_gradle_script.py b/build/scripts/gen_aar_gradle_script.py index b95cc051c4..4594e67278 100644 --- a/build/scripts/gen_aar_gradle_script.py +++ b/build/scripts/gen_aar_gradle_script.py @@ -1,11 +1,11 @@ -import argparse -import os -import tarfile - -FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n' -MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n' -KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' - +import argparse +import os +import tarfile + +FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n' +MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n' +KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' + ENABLE_JAVADOC = 'tasks["bundle${suffix}Aar"].dependsOn packageJavadocTask' DO_NOT_STRIP = '''\ packagingOptions { @@ -16,35 +16,35 @@ DO_NOT_STRIP = '''\ } ''' -AAR_TEMPLATE = """\ -ext.jniLibsDirs = [ - {jni_libs_dirs} -] - -ext.resDirs = [ - {res_dirs} -] - -ext.assetsDirs = [ - {assets_dirs} -] - -ext.javaDirs = [ - {java_dirs} -] - -def aidlDirs = [ - {aidl_dirs} -] - -ext.bundles = [ - {bundles} -] - -ext.androidArs = [ - {aars} -] - +AAR_TEMPLATE = """\ +ext.jniLibsDirs = [ + {jni_libs_dirs} +] + +ext.resDirs = [ + {res_dirs} +] + +ext.assetsDirs = [ + {assets_dirs} +] + +ext.javaDirs = [ + {java_dirs} +] + +def aidlDirs = [ + {aidl_dirs} +] + +ext.bundles = [ + {bundles} +] + +ext.androidArs = [ + {aars} +] + ext.compileOnlyAndroidArs = [ {compile_only_aars} ] @@ -53,96 +53,96 @@ def minVersion = 21 def compileVersion = 30 def targetVersion = 30 def buildVersion = '30.0.3' - -import com.android.build.gradle.LibraryPlugin + +import com.android.build.gradle.LibraryPlugin import java.nio.file.Files import java.nio.file.Paths -import java.util.regex.Matcher -import java.util.regex.Pattern +import java.util.regex.Matcher +import java.util.regex.Pattern import java.util.zip.ZipFile - - -apply plugin: 'com.github.dcendents.android-maven' - -buildDir = "$projectDir/build" - -if (!ext.has("packageSuffix")) - ext.packageSuffix = "" - -buildscript {{ -// repositories {{ -// jcenter() -// mavenCentral() -// }} - - repositories {{ - {maven_repos} - }} - - dependencies {{ + + +apply plugin: 'com.github.dcendents.android-maven' + +buildDir = "$projectDir/build" + +if (!ext.has("packageSuffix")) + ext.packageSuffix = "" + +buildscript {{ +// repositories {{ +// jcenter() +// mavenCentral() +// }} + + repositories {{ + {maven_repos} + }} + + dependencies {{ classpath 'com.android.tools.build:gradle:4.0.2' - classpath 'com.github.dcendents:android-maven-gradle-plugin:1.5' - }} -}} - -apply plugin: LibraryPlugin - -repositories {{ -// flatDir {{ -// dirs System.env.PKG_ROOT + '/bundle' -// }} -// maven {{ -// url "http://maven.google.com/" -// }} -// maven {{ -// url "http://artifactory.yandex.net/artifactory/public/" -// }} - - {flat_dirs_repo} - - {maven_repos} -}} - -android {{ - {keystore} - - compileSdkVersion compileVersion - buildToolsVersion buildVersion - - defaultConfig {{ - minSdkVersion minVersion - targetSdkVersion targetVersion - consumerProguardFiles '{proguard_rules}' - }} - - sourceSets {{ - main {{ - manifest.srcFile '{manifest}' - jniLibs.srcDirs = jniLibsDirs - res.srcDirs = resDirs - assets.srcDirs = assetsDirs - java.srcDirs = javaDirs - aidl.srcDirs = aidlDirs - }} - // We don't use this feature, so we set it to nonexisting directory - androidTest.setRoot('bundle/tests') - }} - + classpath 'com.github.dcendents:android-maven-gradle-plugin:1.5' + }} +}} + +apply plugin: LibraryPlugin + +repositories {{ +// flatDir {{ +// dirs System.env.PKG_ROOT + '/bundle' +// }} +// maven {{ +// url "http://maven.google.com/" +// }} +// maven {{ +// url "http://artifactory.yandex.net/artifactory/public/" +// }} + + {flat_dirs_repo} + + {maven_repos} +}} + +android {{ + {keystore} + + compileSdkVersion compileVersion + buildToolsVersion buildVersion + + defaultConfig {{ + minSdkVersion minVersion + targetSdkVersion targetVersion + consumerProguardFiles '{proguard_rules}' + }} + + sourceSets {{ + main {{ + manifest.srcFile '{manifest}' + jniLibs.srcDirs = jniLibsDirs + res.srcDirs = resDirs + assets.srcDirs = assetsDirs + java.srcDirs = javaDirs + aidl.srcDirs = aidlDirs + }} + // We don't use this feature, so we set it to nonexisting directory + androidTest.setRoot('bundle/tests') + }} + {do_not_strip} - dependencies {{ - for (bundle in bundles) + dependencies {{ + for (bundle in bundles) compile("$bundle") {{ - transitive = true - }} - for (bundle in androidArs) + transitive = true + }} + for (bundle in androidArs) compile(bundle) {{ - transitive = true - }} + transitive = true + }} for (bundle in compileOnlyAndroidArs) compileOnly(bundle) - }} - + }} + android.libraryVariants.all {{ variant -> def suffix = variant.buildType.name.capitalize() @@ -157,17 +157,17 @@ android {{ }} }} includeEmptyDirs = false - }} - + }} + def manifestFile = android.sourceSets.main.manifest.srcFile def manifestXml = new XmlParser().parse(manifestFile) - + def packageName = manifestXml['@package'] def groupName = packageName.tokenize('.')[0..-2].join('.') - + def androidNs = new groovy.xml.Namespace("http://schemas.android.com/apk/res/android") def packageVersion = manifestXml.attributes()[androidNs.versionName] - + def writePomTask = project.tasks.create(name: "writePom${{suffix}}") {{ pom {{ project {{ @@ -177,10 +177,10 @@ android {{ }} }}.writeTo("$buildDir/${{rootProject.name}}$packageSuffix-pom.xml") }} - + tasks["bundle${{suffix}}Aar"].dependsOn sourcesJarTask tasks["bundle${{suffix}}Aar"].dependsOn writePomTask - }} + }} android.libraryVariants.all {{ variant -> def capitalizedVariantName = variant.name.capitalize() @@ -245,7 +245,7 @@ android {{ {enable_javadoc} }} -}} +}} private def extractClassesJar(aarPath, outputPath) {{ if (!aarPath.exists()) {{ @@ -265,36 +265,36 @@ private def extractClassesJar(aarPath, outputPath) {{ zip.close() }} -""" - - -def gen_build_script(args): - - def wrap(items): - return ',\n '.join('"{}"'.format(x) for x in items) - - bundles = [] - bundles_dirs = set(args.flat_repos) - for bundle in args.bundles: - dir_name, base_name = os.path.split(bundle) - assert(len(dir_name) > 0 and len(base_name) > 0) - name, ext = os.path.splitext(base_name) - assert(len(name) > 0 and ext == '.aar') - bundles_dirs.add(dir_name) - bundles.append('com.yandex:{}@aar'.format(name)) - - if len(bundles_dirs) > 0: - flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs)) - else: - flat_dirs_repo = '' - - maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos) - - if args.keystore: - keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore) - else: - keystore = '' - +""" + + +def gen_build_script(args): + + def wrap(items): + return ',\n '.join('"{}"'.format(x) for x in items) + + bundles = [] + bundles_dirs = set(args.flat_repos) + for bundle in args.bundles: + dir_name, base_name = os.path.split(bundle) + assert(len(dir_name) > 0 and len(base_name) > 0) + name, ext = os.path.splitext(base_name) + assert(len(name) > 0 and ext == '.aar') + bundles_dirs.add(dir_name) + bundles.append('com.yandex:{}@aar'.format(name)) + + if len(bundles_dirs) > 0: + flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs)) + else: + flat_dirs_repo = '' + + maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos) + + if args.keystore: + keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore) + else: + keystore = '' + if args.generate_doc: enable_javadoc = ENABLE_JAVADOC else: @@ -305,7 +305,7 @@ def gen_build_script(args): else: do_not_strip = '' - return AAR_TEMPLATE.format( + return AAR_TEMPLATE.format( aars=wrap(args.aars), compile_only_aars=wrap(args.compile_only_aars), aidl_dirs=wrap(args.aidl_dirs), @@ -321,54 +321,54 @@ def gen_build_script(args): maven_repos=maven_repos, proguard_rules=args.proguard_rules, res_dirs=wrap(args.res_dirs), - ) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() + ) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() parser.add_argument('--aars', nargs='*', default=[]) parser.add_argument('--compile-only-aars', nargs='*', default=[]) parser.add_argument('--aidl-dirs', nargs='*', default=[]) - parser.add_argument('--assets-dirs', nargs='*', default=[]) + parser.add_argument('--assets-dirs', nargs='*', default=[]) parser.add_argument('--bundle-name', nargs='?', default='default-bundle-name') - parser.add_argument('--bundles', nargs='*', default=[]) + parser.add_argument('--bundles', nargs='*', default=[]) parser.add_argument('--do-not-strip', action='store_true') parser.add_argument('--flat-repos', nargs='*', default=[]) parser.add_argument('--generate-doc', action='store_true') - parser.add_argument('--java-dirs', nargs='*', default=[]) - parser.add_argument('--jni-libs-dirs', nargs='*', default=[]) + parser.add_argument('--java-dirs', nargs='*', default=[]) + parser.add_argument('--jni-libs-dirs', nargs='*', default=[]) parser.add_argument('--keystore', default=None) - parser.add_argument('--manifest', required=True) - parser.add_argument('--maven-repos', nargs='*', default=[]) - parser.add_argument('--output-dir', required=True) + parser.add_argument('--manifest', required=True) + parser.add_argument('--maven-repos', nargs='*', default=[]) + parser.add_argument('--output-dir', required=True) parser.add_argument('--peers', nargs='*', default=[]) - parser.add_argument('--proguard-rules', nargs='?', default=None) - parser.add_argument('--res-dirs', nargs='*', default=[]) - args = parser.parse_args() - - if args.proguard_rules is None: - args.proguard_rules = os.path.join(args.output_dir, 'proguard-rules.txt') - with open(args.proguard_rules, 'w') as f: - pass - - for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)): - jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index))) - os.makedirs(jsrc_dir) - with tarfile.open(jsrc, 'r') as tar: - tar.extractall(path=jsrc_dir) - args.java_dirs.append(jsrc_dir) - - args.build_gradle = os.path.join(args.output_dir, 'build.gradle') - args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle') + parser.add_argument('--proguard-rules', nargs='?', default=None) + parser.add_argument('--res-dirs', nargs='*', default=[]) + args = parser.parse_args() + + if args.proguard_rules is None: + args.proguard_rules = os.path.join(args.output_dir, 'proguard-rules.txt') + with open(args.proguard_rules, 'w') as f: + pass + + for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)): + jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index))) + os.makedirs(jsrc_dir) + with tarfile.open(jsrc, 'r') as tar: + tar.extractall(path=jsrc_dir) + args.java_dirs.append(jsrc_dir) + + args.build_gradle = os.path.join(args.output_dir, 'build.gradle') + args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle') args.gradle_properties = os.path.join(args.output_dir, 'gradle.properties') - - content = gen_build_script(args) - with open(args.build_gradle, 'w') as f: - f.write(content) - + + content = gen_build_script(args) + with open(args.build_gradle, 'w') as f: + f.write(content) + with open(args.gradle_properties, 'w') as f: f.write('android.useAndroidX=true') - if args.bundle_name: - with open(args.settings_gradle, 'w') as f: - f.write('rootProject.name = "{}"'.format(args.bundle_name)) + if args.bundle_name: + with open(args.settings_gradle, 'w') as f: + f.write('rootProject.name = "{}"'.format(args.bundle_name)) diff --git a/build/scripts/gen_py_protos.py b/build/scripts/gen_py_protos.py index 04c9de2a9e..08397472f9 100644 --- a/build/scripts/gen_py_protos.py +++ b/build/scripts/gen_py_protos.py @@ -40,7 +40,7 @@ def main(): plugin_out_dirs_orig[plugin] = args[i][len(plugin_out_dir_arg):] assert plugin_out_dirs_orig[plugin] == out_dir_orig, 'Params "{0}" and "{1}" expected to have the same value'.format(OUT_DIR_ARG, plugin_out_dir_arg) args[i] = plugin_out_dir_arg + out_dir_temp - + assert out_dir_temp, 'Param "{0}" not found'.format(OUT_DIR_ARG) retcode = subprocess.call(args) diff --git a/build/scripts/gen_test_apk_gradle_script.py b/build/scripts/gen_test_apk_gradle_script.py index 2d4319336b..d1a78ceb1c 100644 --- a/build/scripts/gen_test_apk_gradle_script.py +++ b/build/scripts/gen_test_apk_gradle_script.py @@ -1,193 +1,193 @@ -import argparse -import os -import tarfile -import xml.etree.ElementTree as etree - -FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n' -MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n' -KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' - -TEST_APK_TEMPLATE = """\ -ext.jniLibsDirs = [ - {jni_libs_dirs} -] -ext.resDirs = [ - {res_dirs} -] -ext.javaDirs = [ - {java_dirs} -] -ext.bundles = [ - {bundles} -] - -buildscript {{ -// repositories {{ -// jcenter() -// }} - - repositories {{ - {maven_repos} - }} - - dependencies {{ +import argparse +import os +import tarfile +import xml.etree.ElementTree as etree + +FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n' +MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n' +KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' + +TEST_APK_TEMPLATE = """\ +ext.jniLibsDirs = [ + {jni_libs_dirs} +] +ext.resDirs = [ + {res_dirs} +] +ext.javaDirs = [ + {java_dirs} +] +ext.bundles = [ + {bundles} +] + +buildscript {{ +// repositories {{ +// jcenter() +// }} + + repositories {{ + {maven_repos} + }} + + dependencies {{ classpath 'com.android.tools.build:gradle:3.5.3' - }} -}} - -apply plugin: 'com.android.application' - -repositories {{ -// maven {{ -// url "http://maven.google.com/" -// }} -// maven {{ -// url "http://artifactory.yandex.net/artifactory/public/" -// }} -// flatDir {{ -// dirs System.env.PKG_ROOT + '/bundle' -// }} - - {flat_dirs_repo} - - {maven_repos} -}} - -dependencies {{ - for (bundle in bundles) {{ - compile("$bundle") - }} -}} - -android {{ - {keystore} - + }} +}} + +apply plugin: 'com.android.application' + +repositories {{ +// maven {{ +// url "http://maven.google.com/" +// }} +// maven {{ +// url "http://artifactory.yandex.net/artifactory/public/" +// }} +// flatDir {{ +// dirs System.env.PKG_ROOT + '/bundle' +// }} + + {flat_dirs_repo} + + {maven_repos} +}} + +dependencies {{ + for (bundle in bundles) {{ + compile("$bundle") + }} +}} + +android {{ + {keystore} + compileSdkVersion 30 buildToolsVersion "30.0.3" - - - defaultConfig {{ + + + defaultConfig {{ minSdkVersion 21 targetSdkVersion 30 - applicationId "{app_id}" - }} - - sourceSets {{ - main {{ - manifest.srcFile 'Manifest.xml' - jniLibs.srcDirs = jniLibsDirs - res.srcDirs = resDirs - java.srcDirs = javaDirs - }} - }} - - applicationVariants.all {{ variant -> - variant.outputs.each {{ output -> + applicationId "{app_id}" + }} + + sourceSets {{ + main {{ + manifest.srcFile 'Manifest.xml' + jniLibs.srcDirs = jniLibsDirs + res.srcDirs = resDirs + java.srcDirs = javaDirs + }} + }} + + applicationVariants.all {{ variant -> + variant.outputs.each {{ output -> def fileName = "$projectDir/output/{app_id}.apk" output.outputFileName = new File(output.outputFile.parent, fileName).getName() - }} - }} - - dependencies {{ + }} + }} + + dependencies {{ implementation 'com.google.android.gms:play-services-location:16.0.0' implementation 'com.google.android.gms:play-services-gcm:16.0.0' implementation 'com.evernote:android-job:1.2.6' implementation 'androidx.annotation:annotation:1.1.0' implementation 'androidx.core:core:1.1.0' - }} -}} -""" - - -def create_native_properties(output_dir, library_name): - native_properties_file = os.path.join(output_dir, 'native_library_name.xml') - resources = etree.Element('resources') - name = etree.SubElement(resources, 'item', dict(name='native_library_name', type='string')) - name.text = library_name - etree.ElementTree(resources).write(native_properties_file, xml_declaration=True, encoding='utf-8') - - -def gen_build_script(args): - def wrap(items): - return ',\n '.join('"{}"'.format(x) for x in items) - - bundles = [] - bundles_dirs = set(args.flat_repos) - for bundle in args.bundles: - dir_name, base_name = os.path.split(bundle) - assert(len(dir_name) > 0 and len(base_name) > 0) - name, ext = os.path.splitext(base_name) - assert(len(name) > 0 and ext == '.aar') - bundles_dirs.add(dir_name) - bundles.append('com.yandex:{}@aar'.format(name)) - - if len(bundles_dirs) > 0: - flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs)) - else: - flat_dirs_repo = '' - - maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos) - - if args.keystore: - keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore) - else: - keystore = '' - - return TEST_APK_TEMPLATE.format( - app_id=args.app_id, - jni_libs_dirs=wrap(args.jni_libs_dirs), - res_dirs=wrap(args.res_dirs), - java_dirs=wrap(args.java_dirs), - maven_repos=maven_repos, - bundles=wrap(bundles), - flat_dirs_repo=flat_dirs_repo, - keystore=keystore, - ) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--aars', nargs='*', default=[]) - parser.add_argument('--app-id', required=True) - parser.add_argument('--assets-dirs', nargs='*', default=[]) - parser.add_argument('--bundles', nargs='*', default=[]) - parser.add_argument('--bundle-name', nargs='?', default=None) - parser.add_argument('--java-dirs', nargs='*', default=[]) - parser.add_argument('--jni-libs-dirs', nargs='*', default=[]) - parser.add_argument('--library-name', required=True) - parser.add_argument('--manifest', required=True) - parser.add_argument('--flat-repos', nargs='*', default=[]) - parser.add_argument('--maven-repos', nargs='*', default=[]) - parser.add_argument('--output-dir', required=True) - parser.add_argument('--peers', nargs='*', default=[]) - parser.add_argument('--keystore', default=None) - parser.add_argument('--res-dirs', nargs='*', default=[]) - args = parser.parse_args() - - for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)): - jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index))) - os.makedirs(jsrc_dir) - with tarfile.open(jsrc, 'r') as tar: - tar.extractall(path=jsrc_dir) - args.java_dirs.append(jsrc_dir) - - args.build_gradle = os.path.join(args.output_dir, 'build.gradle') - args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle') + }} +}} +""" + + +def create_native_properties(output_dir, library_name): + native_properties_file = os.path.join(output_dir, 'native_library_name.xml') + resources = etree.Element('resources') + name = etree.SubElement(resources, 'item', dict(name='native_library_name', type='string')) + name.text = library_name + etree.ElementTree(resources).write(native_properties_file, xml_declaration=True, encoding='utf-8') + + +def gen_build_script(args): + def wrap(items): + return ',\n '.join('"{}"'.format(x) for x in items) + + bundles = [] + bundles_dirs = set(args.flat_repos) + for bundle in args.bundles: + dir_name, base_name = os.path.split(bundle) + assert(len(dir_name) > 0 and len(base_name) > 0) + name, ext = os.path.splitext(base_name) + assert(len(name) > 0 and ext == '.aar') + bundles_dirs.add(dir_name) + bundles.append('com.yandex:{}@aar'.format(name)) + + if len(bundles_dirs) > 0: + flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs)) + else: + flat_dirs_repo = '' + + maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos) + + if args.keystore: + keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore) + else: + keystore = '' + + return TEST_APK_TEMPLATE.format( + app_id=args.app_id, + jni_libs_dirs=wrap(args.jni_libs_dirs), + res_dirs=wrap(args.res_dirs), + java_dirs=wrap(args.java_dirs), + maven_repos=maven_repos, + bundles=wrap(bundles), + flat_dirs_repo=flat_dirs_repo, + keystore=keystore, + ) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--aars', nargs='*', default=[]) + parser.add_argument('--app-id', required=True) + parser.add_argument('--assets-dirs', nargs='*', default=[]) + parser.add_argument('--bundles', nargs='*', default=[]) + parser.add_argument('--bundle-name', nargs='?', default=None) + parser.add_argument('--java-dirs', nargs='*', default=[]) + parser.add_argument('--jni-libs-dirs', nargs='*', default=[]) + parser.add_argument('--library-name', required=True) + parser.add_argument('--manifest', required=True) + parser.add_argument('--flat-repos', nargs='*', default=[]) + parser.add_argument('--maven-repos', nargs='*', default=[]) + parser.add_argument('--output-dir', required=True) + parser.add_argument('--peers', nargs='*', default=[]) + parser.add_argument('--keystore', default=None) + parser.add_argument('--res-dirs', nargs='*', default=[]) + args = parser.parse_args() + + for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)): + jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index))) + os.makedirs(jsrc_dir) + with tarfile.open(jsrc, 'r') as tar: + tar.extractall(path=jsrc_dir) + args.java_dirs.append(jsrc_dir) + + args.build_gradle = os.path.join(args.output_dir, 'build.gradle') + args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle') args.gradle_properties = os.path.join(args.output_dir, 'gradle.properties') - - content = gen_build_script(args) - with open(args.build_gradle, 'w') as f: - f.write(content) - + + content = gen_build_script(args) + with open(args.build_gradle, 'w') as f: + f.write(content) + with open(args.gradle_properties, 'w') as f: f.write('''android.enableJetifier=true android.useAndroidX=true org.gradle.jvmargs=-Xmx8192m -XX:MaxPermSize=512m''') - if args.bundle_name: - with open(args.settings_gradle, 'w') as f: - f.write('rootProject.name = "{}"'.format(args.bundle_name)) - - values_dir = os.path.join(args.output_dir, 'res', 'values') - os.makedirs(values_dir) - create_native_properties(values_dir, args.library_name) + if args.bundle_name: + with open(args.settings_gradle, 'w') as f: + f.write('rootProject.name = "{}"'.format(args.bundle_name)) + + values_dir = os.path.join(args.output_dir, 'res', 'values') + os.makedirs(values_dir) + create_native_properties(values_dir, args.library_name) diff --git a/build/scripts/generate_mf.py b/build/scripts/generate_mf.py index c7b8a72cbd..a44a969980 100644 --- a/build/scripts/generate_mf.py +++ b/build/scripts/generate_mf.py @@ -67,11 +67,11 @@ def generate_mf(): build_root = options.build_root file_name = os.path.join(build_root, options.output) - if options.type != 'LIBRARY': - for rel_filename in peers: - with open(os.path.join(build_root, rel_filename + '.mf')) as peer_file: - peer_meta = json.load(peer_file) - meta['dependencies'].append(peer_meta) + if options.type != 'LIBRARY': + for rel_filename in peers: + with open(os.path.join(build_root, rel_filename + '.mf')) as peer_file: + peer_meta = json.load(peer_file) + meta['dependencies'].append(peer_meta) if credits: union_texts = [] diff --git a/build/scripts/go_proto_wrapper.py b/build/scripts/go_proto_wrapper.py index 74c3aaf82a..065120b6eb 100644 --- a/build/scripts/go_proto_wrapper.py +++ b/build/scripts/go_proto_wrapper.py @@ -1,82 +1,82 @@ -from __future__ import absolute_import -import os -import re -import shutil -import subprocess -import sys -import tempfile -from six.moves import range - - -OUT_DIR_FLAG_PATTERN = re.compile(r'^(--go(([-_]\w+))*_out=)') - - -def move_tree(src_root, dst_root): - for root, _, files in os.walk(src_root): - rel_dir = os.path.relpath(root, src_root) - dst_dir = os.path.join(dst_root, rel_dir) - if not os.path.exists(dst_dir): - os.mkdir(dst_dir) - for file in files: - os.rename(os.path.join(root, file), os.path.join(dst_dir, file)) - - -def main(arcadia_prefix, contrib_prefix, proto_namespace, args): - out_dir_orig = None - out_dir_temp = None - for i in range(len(args)): - m = re.match(OUT_DIR_FLAG_PATTERN, args[i]) - if m: - out_dir_flag = m.group(1) - index = max(len(out_dir_flag), args[i].rfind(':')+1) - out_dir = args[i][index:] - if out_dir_orig: - assert out_dir_orig == out_dir, 'Output directories do not match: [{}] and [{}]'.format(out_dir_orig, out_dir) - else: - out_dir_orig = out_dir - out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig) - args[i] = (args[i][:index] + out_dir_temp).replace('|', ',') - assert out_dir_temp is not None, 'Output directory is not specified' - - try: +from __future__ import absolute_import +import os +import re +import shutil +import subprocess +import sys +import tempfile +from six.moves import range + + +OUT_DIR_FLAG_PATTERN = re.compile(r'^(--go(([-_]\w+))*_out=)') + + +def move_tree(src_root, dst_root): + for root, _, files in os.walk(src_root): + rel_dir = os.path.relpath(root, src_root) + dst_dir = os.path.join(dst_root, rel_dir) + if not os.path.exists(dst_dir): + os.mkdir(dst_dir) + for file in files: + os.rename(os.path.join(root, file), os.path.join(dst_dir, file)) + + +def main(arcadia_prefix, contrib_prefix, proto_namespace, args): + out_dir_orig = None + out_dir_temp = None + for i in range(len(args)): + m = re.match(OUT_DIR_FLAG_PATTERN, args[i]) + if m: + out_dir_flag = m.group(1) + index = max(len(out_dir_flag), args[i].rfind(':')+1) + out_dir = args[i][index:] + if out_dir_orig: + assert out_dir_orig == out_dir, 'Output directories do not match: [{}] and [{}]'.format(out_dir_orig, out_dir) + else: + out_dir_orig = out_dir + out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig) + args[i] = (args[i][:index] + out_dir_temp).replace('|', ',') + assert out_dir_temp is not None, 'Output directory is not specified' + + try: subprocess.check_output(args, stdin=None, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output)) - return e.returncode - - # All Arcadia GO projects should have 'a.yandex-team.ru/' namespace prefix. - # If the namespace doesn't start with 'a.yandex-team.ru/' prefix then this - # project is from vendor directory under the root of Arcadia. - out_dir_src = os.path.normpath(os.path.join(out_dir_temp, arcadia_prefix, proto_namespace)) - out_dir_dst = out_dir_orig - is_from_contrib = False - if not os.path.isdir(out_dir_src): - is_from_contrib = True - out_dir_src = out_dir_temp - out_dir_dst = os.path.join(out_dir_orig, contrib_prefix) - - if not os.path.exists(out_dir_src) or is_from_contrib: - protos = [x for x in args if x.endswith('.proto')] - if not is_from_contrib or not all(x.startswith(contrib_prefix) for x in protos): - proto_list = [] - option_re = re.compile(r'^\s*option\s+go_package\s*=\s*') - for arg in protos: - with open(arg, 'r') as f: - if not any([re.match(option_re, line) for line in f]): - proto_list.append(arg) - if proto_list: - sys.stderr.write( - '\nError: Option go_package is not specified in the following proto files: {}\n' - '\nNOTE! You can find detailed description of how to properly set go_package ' - 'option here https://wiki.yandex-team.ru/devrules/Go/#protobufigrpc'.format(', '.join(proto_list))) - return 1 - - move_tree(out_dir_src, out_dir_dst) - - shutil.rmtree(out_dir_temp) - - return 0 - - -if __name__ == '__main__': - sys.exit(main(os.path.normpath(sys.argv[1]), os.path.normpath(sys.argv[2]), os.path.normpath(sys.argv[3]), sys.argv[4:])) + except subprocess.CalledProcessError as e: + sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output)) + return e.returncode + + # All Arcadia GO projects should have 'a.yandex-team.ru/' namespace prefix. + # If the namespace doesn't start with 'a.yandex-team.ru/' prefix then this + # project is from vendor directory under the root of Arcadia. + out_dir_src = os.path.normpath(os.path.join(out_dir_temp, arcadia_prefix, proto_namespace)) + out_dir_dst = out_dir_orig + is_from_contrib = False + if not os.path.isdir(out_dir_src): + is_from_contrib = True + out_dir_src = out_dir_temp + out_dir_dst = os.path.join(out_dir_orig, contrib_prefix) + + if not os.path.exists(out_dir_src) or is_from_contrib: + protos = [x for x in args if x.endswith('.proto')] + if not is_from_contrib or not all(x.startswith(contrib_prefix) for x in protos): + proto_list = [] + option_re = re.compile(r'^\s*option\s+go_package\s*=\s*') + for arg in protos: + with open(arg, 'r') as f: + if not any([re.match(option_re, line) for line in f]): + proto_list.append(arg) + if proto_list: + sys.stderr.write( + '\nError: Option go_package is not specified in the following proto files: {}\n' + '\nNOTE! You can find detailed description of how to properly set go_package ' + 'option here https://wiki.yandex-team.ru/devrules/Go/#protobufigrpc'.format(', '.join(proto_list))) + return 1 + + move_tree(out_dir_src, out_dir_dst) + + shutil.rmtree(out_dir_temp) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(os.path.normpath(sys.argv[1]), os.path.normpath(sys.argv[2]), os.path.normpath(sys.argv[3]), sys.argv[4:])) diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py index 49471d792e..c1e98b20c0 100644 --- a/build/scripts/go_tool.py +++ b/build/scripts/go_tool.py @@ -1,864 +1,864 @@ -from __future__ import absolute_import, unicode_literals -import argparse -import copy -import json -import os -import re -import shutil -import subprocess -import sys -import tarfile -import tempfile -import threading -import six -from functools import reduce - +from __future__ import absolute_import, unicode_literals +import argparse +import copy +import json +import os +import re +import shutil +import subprocess +import sys +import tarfile +import tempfile +import threading +import six +from functools import reduce + import process_command_files as pcf import process_whole_archive_option as pwa -arc_project_prefix = 'a.yandex-team.ru/' -std_lib_prefix = 'contrib/go/_std/src/' -vendor_prefix = 'vendor/' -vet_info_ext = '.vet.out' -vet_report_ext = '.vet.txt' - -FIXED_CGO1_SUFFIX='.fixed.cgo1.go' - -COMPILE_OPTIMIZATION_FLAGS=('-N',) - - -def get_trimpath_args(args): - return ['-trimpath', args.trimpath] if args.trimpath else [] - - -def preprocess_cgo1(src_path, dst_path, source_root): - with open(src_path, 'r') as f: - content = f.read() - content = content.replace('__ARCADIA_SOURCE_ROOT_PREFIX__', source_root) - with open(dst_path, 'w') as f: - f.write(content) - - -def preprocess_args(args): - # Temporary work around for noauto - if args.cgo_srcs and len(args.cgo_srcs) > 0: - cgo_srcs_set = set(args.cgo_srcs) - args.srcs = [x for x in args.srcs if x not in cgo_srcs_set] - - args.pkg_root = os.path.join(args.toolchain_root, 'pkg') - toolchain_tool_root = os.path.join(args.pkg_root, 'tool', '{}_{}'.format(args.host_os, args.host_arch)) - args.go_compile = os.path.join(toolchain_tool_root, 'compile') - args.go_cgo = os.path.join(toolchain_tool_root, 'cgo') - args.go_link = os.path.join(toolchain_tool_root, 'link') - args.go_asm = os.path.join(toolchain_tool_root, 'asm') - args.go_pack = os.path.join(toolchain_tool_root, 'pack') - args.go_vet = os.path.join(toolchain_tool_root, 'vet') if args.vet is True else args.vet - args.output = os.path.normpath(args.output) - args.vet_report_output = vet_report_output_name(args.output, args.vet_report_ext) - args.trimpath = None - if args.debug_root_map: - roots = {'build': args.build_root, 'source': args.source_root, 'tools': args.tools_root} - replaces = [] - for root in args.debug_root_map.split(';'): - src, dst = root.split('=', 1) - assert src in roots - replaces.append('{}=>{}'.format(roots[src], dst)) - del roots[src] - assert len(replaces) > 0 - args.trimpath = ';'.join(replaces) - args.build_root = os.path.normpath(args.build_root) - args.build_root_dir = args.build_root + os.path.sep - args.source_root = os.path.normpath(args.source_root) - args.source_root_dir = args.source_root + os.path.sep - args.output_root = os.path.normpath(args.output_root) - args.import_map = {} - args.module_map = {} - if args.cgo_peers: - args.cgo_peers = [x for x in args.cgo_peers if not x.endswith('.fake.pkg')] - - srcs = [] - for f in args.srcs: - if f.endswith('.gosrc'): - with tarfile.open(f, 'r') as tar: - srcs.extend(os.path.join(args.output_root, src) for src in tar.getnames()) - tar.extractall(path=args.output_root) - else: - srcs.append(f) - args.srcs = srcs - - assert args.mode == 'test' or args.test_srcs is None and args.xtest_srcs is None - # add lexical oreder by basename for go sources - args.srcs.sort(key=lambda x: os.path.basename(x)) - if args.test_srcs: - args.srcs += sorted(args.test_srcs, key=lambda x: os.path.basename(x)) - del args.test_srcs - if args.xtest_srcs: - args.xtest_srcs.sort(key=lambda x: os.path.basename(x)) - - # compute root relative module dir path - assert args.output is None or args.output_root == os.path.dirname(args.output) - assert args.output_root.startswith(args.build_root_dir) - args.module_path = args.output_root[len(args.build_root_dir):] - args.source_module_dir = os.path.join(args.source_root, args.test_import_path or args.module_path) + os.path.sep - assert len(args.module_path) > 0 - args.import_path, args.is_std = get_import_path(args.module_path) - - assert args.asmhdr is None or args.word == 'go' - - srcs = [] - for f in args.srcs: - if f.endswith(FIXED_CGO1_SUFFIX) and f.startswith(args.build_root_dir): - path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[:-len(FIXED_CGO1_SUFFIX)]))) - srcs.append(path) - preprocess_cgo1(f, path, args.source_root) - else: - srcs.append(f) - args.srcs = srcs - +arc_project_prefix = 'a.yandex-team.ru/' +std_lib_prefix = 'contrib/go/_std/src/' +vendor_prefix = 'vendor/' +vet_info_ext = '.vet.out' +vet_report_ext = '.vet.txt' + +FIXED_CGO1_SUFFIX='.fixed.cgo1.go' + +COMPILE_OPTIMIZATION_FLAGS=('-N',) + + +def get_trimpath_args(args): + return ['-trimpath', args.trimpath] if args.trimpath else [] + + +def preprocess_cgo1(src_path, dst_path, source_root): + with open(src_path, 'r') as f: + content = f.read() + content = content.replace('__ARCADIA_SOURCE_ROOT_PREFIX__', source_root) + with open(dst_path, 'w') as f: + f.write(content) + + +def preprocess_args(args): + # Temporary work around for noauto + if args.cgo_srcs and len(args.cgo_srcs) > 0: + cgo_srcs_set = set(args.cgo_srcs) + args.srcs = [x for x in args.srcs if x not in cgo_srcs_set] + + args.pkg_root = os.path.join(args.toolchain_root, 'pkg') + toolchain_tool_root = os.path.join(args.pkg_root, 'tool', '{}_{}'.format(args.host_os, args.host_arch)) + args.go_compile = os.path.join(toolchain_tool_root, 'compile') + args.go_cgo = os.path.join(toolchain_tool_root, 'cgo') + args.go_link = os.path.join(toolchain_tool_root, 'link') + args.go_asm = os.path.join(toolchain_tool_root, 'asm') + args.go_pack = os.path.join(toolchain_tool_root, 'pack') + args.go_vet = os.path.join(toolchain_tool_root, 'vet') if args.vet is True else args.vet + args.output = os.path.normpath(args.output) + args.vet_report_output = vet_report_output_name(args.output, args.vet_report_ext) + args.trimpath = None + if args.debug_root_map: + roots = {'build': args.build_root, 'source': args.source_root, 'tools': args.tools_root} + replaces = [] + for root in args.debug_root_map.split(';'): + src, dst = root.split('=', 1) + assert src in roots + replaces.append('{}=>{}'.format(roots[src], dst)) + del roots[src] + assert len(replaces) > 0 + args.trimpath = ';'.join(replaces) + args.build_root = os.path.normpath(args.build_root) + args.build_root_dir = args.build_root + os.path.sep + args.source_root = os.path.normpath(args.source_root) + args.source_root_dir = args.source_root + os.path.sep + args.output_root = os.path.normpath(args.output_root) + args.import_map = {} + args.module_map = {} + if args.cgo_peers: + args.cgo_peers = [x for x in args.cgo_peers if not x.endswith('.fake.pkg')] + + srcs = [] + for f in args.srcs: + if f.endswith('.gosrc'): + with tarfile.open(f, 'r') as tar: + srcs.extend(os.path.join(args.output_root, src) for src in tar.getnames()) + tar.extractall(path=args.output_root) + else: + srcs.append(f) + args.srcs = srcs + + assert args.mode == 'test' or args.test_srcs is None and args.xtest_srcs is None + # add lexical oreder by basename for go sources + args.srcs.sort(key=lambda x: os.path.basename(x)) + if args.test_srcs: + args.srcs += sorted(args.test_srcs, key=lambda x: os.path.basename(x)) + del args.test_srcs + if args.xtest_srcs: + args.xtest_srcs.sort(key=lambda x: os.path.basename(x)) + + # compute root relative module dir path + assert args.output is None or args.output_root == os.path.dirname(args.output) + assert args.output_root.startswith(args.build_root_dir) + args.module_path = args.output_root[len(args.build_root_dir):] + args.source_module_dir = os.path.join(args.source_root, args.test_import_path or args.module_path) + os.path.sep + assert len(args.module_path) > 0 + args.import_path, args.is_std = get_import_path(args.module_path) + + assert args.asmhdr is None or args.word == 'go' + + srcs = [] + for f in args.srcs: + if f.endswith(FIXED_CGO1_SUFFIX) and f.startswith(args.build_root_dir): + path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[:-len(FIXED_CGO1_SUFFIX)]))) + srcs.append(path) + preprocess_cgo1(f, path, args.source_root) + else: + srcs.append(f) + args.srcs = srcs + if args.extldflags: args.extldflags = pwa.ProcessWholeArchiveOption(args.targ_os).construct_cmd(args.extldflags) - classify_srcs(args.srcs, args) - - -def compare_versions(version1, version2): - def last_index(version): - index = version.find('beta') - return len(version) if index < 0 else index - - v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.')) - v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.')) - if v1 == v2: - return 0 - return 1 if v1 < v2 else -1 - - -def get_symlink_or_copyfile(): - os_symlink = getattr(os, 'symlink', None) - if os_symlink is None: - os_symlink = shutil.copyfile - return os_symlink - - -def copy_args(args): - return copy.copy(args) - - -def get_vendor_index(import_path): - index = import_path.rfind('/' + vendor_prefix) - if index < 0: - index = 0 if import_path.startswith(vendor_prefix) else index - else: - index = index + 1 - return index - - -def get_import_path(module_path): - assert len(module_path) > 0 - import_path = module_path.replace('\\', '/') - is_std_module = import_path.startswith(std_lib_prefix) - if is_std_module: - import_path = import_path[len(std_lib_prefix):] - elif import_path.startswith(vendor_prefix): - import_path = import_path[len(vendor_prefix):] - else: - import_path = arc_project_prefix + import_path - assert len(import_path) > 0 - return import_path, is_std_module - - -def call(cmd, cwd, env=None): - # sys.stderr.write('{}\n'.format(' '.join(cmd))) - return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env) - - -def classify_srcs(srcs, args): - args.go_srcs = [x for x in srcs if x.endswith('.go')] - args.asm_srcs = [x for x in srcs if x.endswith('.s')] - args.objects = [x for x in srcs if x.endswith('.o') or x.endswith('.obj')] - args.symabis = [x for x in srcs if x.endswith('.symabis')] - args.sysos = [x for x in srcs if x.endswith('.syso')] - - -def get_import_config_info(peers, gen_importmap, import_map={}, module_map={}): - info = {'importmap': [], 'packagefile': [], 'standard': {}} - if gen_importmap: - for key, value in six.iteritems(import_map): - info['importmap'].append((key, value)) - for peer in peers: - peer_import_path, is_std = get_import_path(os.path.dirname(peer)) - if gen_importmap: - index = get_vendor_index(peer_import_path) - if index >= 0: - index += len(vendor_prefix) - info['importmap'].append((peer_import_path[index:], peer_import_path)) - info['packagefile'].append((peer_import_path, os.path.join(args.build_root, peer))) - if is_std: - info['standard'][peer_import_path] = True - for key, value in six.iteritems(module_map): - info['packagefile'].append((key, value)) - return info - - -def create_import_config(peers, gen_importmap, import_map={}, module_map={}): - lines = [] - info = get_import_config_info(peers, gen_importmap, import_map, module_map) - for key in ('importmap', 'packagefile'): - for item in info[key]: - lines.append('{} {}={}'.format(key, *item)) - if len(lines) > 0: - lines.append('') - content = '\n'.join(lines) - # sys.stderr.writelines('{}\n'.format(l) for l in lines) - with tempfile.NamedTemporaryFile(delete=False) as f: - f.write(content.encode('UTF-8')) - return f.name - return None - - -def create_embed_config(args): - data = { - 'Patterns': {}, - 'Files': {}, - } - for info in args.embed: - pattern = info[0] - if pattern.endswith('/**/*'): - pattern = pattern[:-3] - files = {os.path.relpath(f, args.source_module_dir).replace('\\', '/'): f for f in info[1:]} - data['Patterns'][pattern] = list(files.keys()) - data['Files'].update(files) - # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4))) - with tempfile.NamedTemporaryFile(delete=False, suffix='.embedcfg') as f: - f.write(json.dumps(data).encode('UTF-8')) - return f.name - - -def vet_info_output_name(path, ext=None): - return '{}{}'.format(path, ext or vet_info_ext) - - -def vet_report_output_name(path, ext=None): - return '{}{}'.format(path, ext or vet_report_ext) - - -def get_source_path(args): - return args.test_import_path or args.module_path - - -def gen_vet_info(args): - import_path = args.real_import_path if hasattr(args, 'real_import_path') else args.import_path - info = get_import_config_info(args.peers, True, args.import_map, args.module_map) - - import_map = dict(info['importmap']) - # FIXME(snermolaev): it seems that adding import map for 'fake' package - # does't make any harm (it needs to be revised later) - import_map['unsafe'] = 'unsafe' - - for (key, _) in info['packagefile']: - if key not in import_map: - import_map[key] = key - - data = { - 'ID': import_path, - 'Compiler': 'gc', - 'Dir': os.path.join(args.source_root, get_source_path(args)), - 'ImportPath': import_path, - 'GoFiles': [x for x in args.go_srcs if x.endswith('.go')], - 'NonGoFiles': [x for x in args.go_srcs if not x.endswith('.go')], - 'ImportMap': import_map, - 'PackageFile': dict(info['packagefile']), - 'Standard': dict(info['standard']), - 'PackageVetx': dict((key, vet_info_output_name(value)) for key, value in info['packagefile']), - 'VetxOnly': False, - 'VetxOutput': vet_info_output_name(args.output), - 'SucceedOnTypecheckFailure': False - } - # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4))) - return data - - -def create_vet_config(args, info): - with tempfile.NamedTemporaryFile(delete=False, suffix='.cfg') as f: - f.write(json.dumps(info).encode('UTF-8')) - return f.name - - -def decode_vet_report(json_report): - report = '' - if json_report: - try: - full_diags = json.JSONDecoder().decode(json_report.decode('UTF-8')) - except ValueError: - report = json_report - else: - messages = [] - for _, module_diags in six.iteritems(full_diags): - for _, type_diags in six.iteritems(module_diags): - for diag in type_diags: - messages.append('{}: {}'.format(diag['posn'], json.dumps(diag['message']))) - report = '\n'.join(messages) - - return report - - -def dump_vet_report(args, report): - if report: - report = report.replace(args.build_root, '$B') - report = report.replace(args.source_root, '$S') - with open(args.vet_report_output, 'w') as f: - f.write(report) - - -def read_vet_report(args): - assert args - report = '' - if os.path.exists(args.vet_report_output): - with open(args.vet_report_output, 'r') as f: - report += f.read() - return report - - -def dump_vet_report_for_tests(args, *test_args_list): - dump_vet_report(args, reduce(lambda x, y: x + read_vet_report(y), [_f for _f in test_args_list if _f], '')) - - -def do_vet(args): - assert args.vet - info = gen_vet_info(args) - vet_config = create_vet_config(args, info) - cmd = [args.go_vet, '-json'] - if args.vet_flags: - cmd.extend(args.vet_flags) - cmd.append(vet_config) - # sys.stderr.write('>>>> [{}]\n'.format(' '.join(cmd))) - p_vet = subprocess.Popen(cmd, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=args.source_root) - vet_out, vet_err = p_vet.communicate() - report = decode_vet_report(vet_out) if vet_out else '' - dump_vet_report(args, report) - if p_vet.returncode: - raise subprocess.CalledProcessError(returncode=p_vet.returncode, cmd=cmd, output=vet_err) - - -def _do_compile_go(args): - import_path, is_std_module = args.import_path, args.is_std - cmd = [ - args.go_compile, - '-o', - args.output, - '-p', - import_path, - '-D', - '""', - '-goversion', - 'go{}'.format(args.goversion) - ] - cmd.extend(get_trimpath_args(args)) - compiling_runtime = False - if is_std_module: - cmd.append('-std') - if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith('runtime/internal/'): - cmd.append('-+') - compiling_runtime = True - import_config_name = create_import_config(args.peers, True, args.import_map, args.module_map) - if import_config_name: - cmd += ['-importcfg', import_config_name] - else: - if import_path == 'unsafe' or len(args.objects) > 0 or args.asmhdr: - pass - else: - cmd.append('-complete') - # if compare_versions('1.16', args.goversion) >= 0: - if args.embed: - embed_config_name = create_embed_config(args) - cmd.extend(['-embedcfg', embed_config_name]) - if args.asmhdr: - cmd += ['-asmhdr', args.asmhdr] - # Use .symabis (starting from 1.12 version) - if args.symabis: - cmd += ['-symabis'] + args.symabis - # If 1.12 <= version < 1.13 we have to pass -allabis for 'runtime' and 'runtime/internal/atomic' - # if compare_versions('1.13', args.goversion) >= 0: - # pass - # elif import_path in ('runtime', 'runtime/internal/atomic'): - # cmd.append('-allabis') - compile_workers = '4' - if args.compile_flags: - if compiling_runtime: - cmd.extend(x for x in args.compile_flags if x not in COMPILE_OPTIMIZATION_FLAGS) - else: - cmd.extend(args.compile_flags) - if any([x in ('-race', '-shared') for x in args.compile_flags]): - compile_workers = '1' - cmd += ['-pack', '-c={}'.format(compile_workers)] - cmd += args.go_srcs - call(cmd, args.build_root) - - -class VetThread(threading.Thread): - - def __init__(self, target, args): - super(VetThread, self).__init__(target=target, args=args) - self.exc_info = None - - def run(self): - try: - super(VetThread, self).run() - except: - self.exc_info = sys.exc_info() - - def join_with_exception(self, reraise_exception): - self.join() - if reraise_exception and self.exc_info: - six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2]) - - -def do_compile_go(args): - raise_exception_from_vet = False - if args.vet: - run_vet = VetThread(target=do_vet, args=(args,)) - run_vet.start() - try: - _do_compile_go(args) - raise_exception_from_vet = True - finally: - if args.vet: - run_vet.join_with_exception(raise_exception_from_vet) - - -def do_compile_asm(args): - def need_compiling_runtime(import_path): - return import_path in ('runtime', 'reflect', 'syscall') or \ - import_path.startswith('runtime/internal/') or \ - compare_versions('1.17', args.goversion) >= 0 and import_path == 'internal/bytealg' - - assert(len(args.srcs) == 1 and len(args.asm_srcs) == 1) - cmd = [args.go_asm] - cmd += get_trimpath_args(args) - cmd += ['-I', args.output_root, '-I', os.path.join(args.pkg_root, 'include')] - cmd += ['-D', 'GOOS_' + args.targ_os, '-D', 'GOARCH_' + args.targ_arch, '-o', args.output] - - # if compare_versions('1.16', args.goversion) >= 0: - cmd += ['-p', args.import_path] - if need_compiling_runtime(args.import_path): - cmd += ['-compiling-runtime'] - - if args.asm_flags: - cmd += args.asm_flags - cmd += args.asm_srcs - call(cmd, args.build_root) - - -def do_link_lib(args): - if len(args.asm_srcs) > 0: - asmargs = copy_args(args) - asmargs.asmhdr = os.path.join(asmargs.output_root, 'go_asm.h') - do_compile_go(asmargs) - for src in asmargs.asm_srcs: - asmargs.srcs = [src] - asmargs.asm_srcs = [src] - asmargs.output = os.path.join(asmargs.output_root, os.path.basename(src) + '.o') - do_compile_asm(asmargs) - args.objects.append(asmargs.output) - else: - do_compile_go(args) - if args.objects or args.sysos: - cmd = [args.go_pack, 'r', args.output] + args.objects + args.sysos - call(cmd, args.build_root) - - -def do_link_exe(args): - assert args.extld is not None - assert args.non_local_peers is not None - compile_args = copy_args(args) - compile_args.output = os.path.join(args.output_root, 'main.a') - compile_args.real_import_path = compile_args.import_path - compile_args.import_path = 'main' + classify_srcs(args.srcs, args) + + +def compare_versions(version1, version2): + def last_index(version): + index = version.find('beta') + return len(version) if index < 0 else index + + v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.')) + v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.')) + if v1 == v2: + return 0 + return 1 if v1 < v2 else -1 + + +def get_symlink_or_copyfile(): + os_symlink = getattr(os, 'symlink', None) + if os_symlink is None: + os_symlink = shutil.copyfile + return os_symlink + + +def copy_args(args): + return copy.copy(args) + + +def get_vendor_index(import_path): + index = import_path.rfind('/' + vendor_prefix) + if index < 0: + index = 0 if import_path.startswith(vendor_prefix) else index + else: + index = index + 1 + return index + + +def get_import_path(module_path): + assert len(module_path) > 0 + import_path = module_path.replace('\\', '/') + is_std_module = import_path.startswith(std_lib_prefix) + if is_std_module: + import_path = import_path[len(std_lib_prefix):] + elif import_path.startswith(vendor_prefix): + import_path = import_path[len(vendor_prefix):] + else: + import_path = arc_project_prefix + import_path + assert len(import_path) > 0 + return import_path, is_std_module + + +def call(cmd, cwd, env=None): + # sys.stderr.write('{}\n'.format(' '.join(cmd))) + return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env) + + +def classify_srcs(srcs, args): + args.go_srcs = [x for x in srcs if x.endswith('.go')] + args.asm_srcs = [x for x in srcs if x.endswith('.s')] + args.objects = [x for x in srcs if x.endswith('.o') or x.endswith('.obj')] + args.symabis = [x for x in srcs if x.endswith('.symabis')] + args.sysos = [x for x in srcs if x.endswith('.syso')] + + +def get_import_config_info(peers, gen_importmap, import_map={}, module_map={}): + info = {'importmap': [], 'packagefile': [], 'standard': {}} + if gen_importmap: + for key, value in six.iteritems(import_map): + info['importmap'].append((key, value)) + for peer in peers: + peer_import_path, is_std = get_import_path(os.path.dirname(peer)) + if gen_importmap: + index = get_vendor_index(peer_import_path) + if index >= 0: + index += len(vendor_prefix) + info['importmap'].append((peer_import_path[index:], peer_import_path)) + info['packagefile'].append((peer_import_path, os.path.join(args.build_root, peer))) + if is_std: + info['standard'][peer_import_path] = True + for key, value in six.iteritems(module_map): + info['packagefile'].append((key, value)) + return info + + +def create_import_config(peers, gen_importmap, import_map={}, module_map={}): + lines = [] + info = get_import_config_info(peers, gen_importmap, import_map, module_map) + for key in ('importmap', 'packagefile'): + for item in info[key]: + lines.append('{} {}={}'.format(key, *item)) + if len(lines) > 0: + lines.append('') + content = '\n'.join(lines) + # sys.stderr.writelines('{}\n'.format(l) for l in lines) + with tempfile.NamedTemporaryFile(delete=False) as f: + f.write(content.encode('UTF-8')) + return f.name + return None + + +def create_embed_config(args): + data = { + 'Patterns': {}, + 'Files': {}, + } + for info in args.embed: + pattern = info[0] + if pattern.endswith('/**/*'): + pattern = pattern[:-3] + files = {os.path.relpath(f, args.source_module_dir).replace('\\', '/'): f for f in info[1:]} + data['Patterns'][pattern] = list(files.keys()) + data['Files'].update(files) + # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4))) + with tempfile.NamedTemporaryFile(delete=False, suffix='.embedcfg') as f: + f.write(json.dumps(data).encode('UTF-8')) + return f.name + + +def vet_info_output_name(path, ext=None): + return '{}{}'.format(path, ext or vet_info_ext) + + +def vet_report_output_name(path, ext=None): + return '{}{}'.format(path, ext or vet_report_ext) + + +def get_source_path(args): + return args.test_import_path or args.module_path + + +def gen_vet_info(args): + import_path = args.real_import_path if hasattr(args, 'real_import_path') else args.import_path + info = get_import_config_info(args.peers, True, args.import_map, args.module_map) + + import_map = dict(info['importmap']) + # FIXME(snermolaev): it seems that adding import map for 'fake' package + # does't make any harm (it needs to be revised later) + import_map['unsafe'] = 'unsafe' + + for (key, _) in info['packagefile']: + if key not in import_map: + import_map[key] = key + + data = { + 'ID': import_path, + 'Compiler': 'gc', + 'Dir': os.path.join(args.source_root, get_source_path(args)), + 'ImportPath': import_path, + 'GoFiles': [x for x in args.go_srcs if x.endswith('.go')], + 'NonGoFiles': [x for x in args.go_srcs if not x.endswith('.go')], + 'ImportMap': import_map, + 'PackageFile': dict(info['packagefile']), + 'Standard': dict(info['standard']), + 'PackageVetx': dict((key, vet_info_output_name(value)) for key, value in info['packagefile']), + 'VetxOnly': False, + 'VetxOutput': vet_info_output_name(args.output), + 'SucceedOnTypecheckFailure': False + } + # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4))) + return data + + +def create_vet_config(args, info): + with tempfile.NamedTemporaryFile(delete=False, suffix='.cfg') as f: + f.write(json.dumps(info).encode('UTF-8')) + return f.name + + +def decode_vet_report(json_report): + report = '' + if json_report: + try: + full_diags = json.JSONDecoder().decode(json_report.decode('UTF-8')) + except ValueError: + report = json_report + else: + messages = [] + for _, module_diags in six.iteritems(full_diags): + for _, type_diags in six.iteritems(module_diags): + for diag in type_diags: + messages.append('{}: {}'.format(diag['posn'], json.dumps(diag['message']))) + report = '\n'.join(messages) + + return report + + +def dump_vet_report(args, report): + if report: + report = report.replace(args.build_root, '$B') + report = report.replace(args.source_root, '$S') + with open(args.vet_report_output, 'w') as f: + f.write(report) + + +def read_vet_report(args): + assert args + report = '' + if os.path.exists(args.vet_report_output): + with open(args.vet_report_output, 'r') as f: + report += f.read() + return report + + +def dump_vet_report_for_tests(args, *test_args_list): + dump_vet_report(args, reduce(lambda x, y: x + read_vet_report(y), [_f for _f in test_args_list if _f], '')) + + +def do_vet(args): + assert args.vet + info = gen_vet_info(args) + vet_config = create_vet_config(args, info) + cmd = [args.go_vet, '-json'] + if args.vet_flags: + cmd.extend(args.vet_flags) + cmd.append(vet_config) + # sys.stderr.write('>>>> [{}]\n'.format(' '.join(cmd))) + p_vet = subprocess.Popen(cmd, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=args.source_root) + vet_out, vet_err = p_vet.communicate() + report = decode_vet_report(vet_out) if vet_out else '' + dump_vet_report(args, report) + if p_vet.returncode: + raise subprocess.CalledProcessError(returncode=p_vet.returncode, cmd=cmd, output=vet_err) + + +def _do_compile_go(args): + import_path, is_std_module = args.import_path, args.is_std + cmd = [ + args.go_compile, + '-o', + args.output, + '-p', + import_path, + '-D', + '""', + '-goversion', + 'go{}'.format(args.goversion) + ] + cmd.extend(get_trimpath_args(args)) + compiling_runtime = False + if is_std_module: + cmd.append('-std') + if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith('runtime/internal/'): + cmd.append('-+') + compiling_runtime = True + import_config_name = create_import_config(args.peers, True, args.import_map, args.module_map) + if import_config_name: + cmd += ['-importcfg', import_config_name] + else: + if import_path == 'unsafe' or len(args.objects) > 0 or args.asmhdr: + pass + else: + cmd.append('-complete') + # if compare_versions('1.16', args.goversion) >= 0: + if args.embed: + embed_config_name = create_embed_config(args) + cmd.extend(['-embedcfg', embed_config_name]) + if args.asmhdr: + cmd += ['-asmhdr', args.asmhdr] + # Use .symabis (starting from 1.12 version) + if args.symabis: + cmd += ['-symabis'] + args.symabis + # If 1.12 <= version < 1.13 we have to pass -allabis for 'runtime' and 'runtime/internal/atomic' + # if compare_versions('1.13', args.goversion) >= 0: + # pass + # elif import_path in ('runtime', 'runtime/internal/atomic'): + # cmd.append('-allabis') + compile_workers = '4' + if args.compile_flags: + if compiling_runtime: + cmd.extend(x for x in args.compile_flags if x not in COMPILE_OPTIMIZATION_FLAGS) + else: + cmd.extend(args.compile_flags) + if any([x in ('-race', '-shared') for x in args.compile_flags]): + compile_workers = '1' + cmd += ['-pack', '-c={}'.format(compile_workers)] + cmd += args.go_srcs + call(cmd, args.build_root) + + +class VetThread(threading.Thread): + + def __init__(self, target, args): + super(VetThread, self).__init__(target=target, args=args) + self.exc_info = None + + def run(self): + try: + super(VetThread, self).run() + except: + self.exc_info = sys.exc_info() + + def join_with_exception(self, reraise_exception): + self.join() + if reraise_exception and self.exc_info: + six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2]) + + +def do_compile_go(args): + raise_exception_from_vet = False + if args.vet: + run_vet = VetThread(target=do_vet, args=(args,)) + run_vet.start() + try: + _do_compile_go(args) + raise_exception_from_vet = True + finally: + if args.vet: + run_vet.join_with_exception(raise_exception_from_vet) + + +def do_compile_asm(args): + def need_compiling_runtime(import_path): + return import_path in ('runtime', 'reflect', 'syscall') or \ + import_path.startswith('runtime/internal/') or \ + compare_versions('1.17', args.goversion) >= 0 and import_path == 'internal/bytealg' + + assert(len(args.srcs) == 1 and len(args.asm_srcs) == 1) + cmd = [args.go_asm] + cmd += get_trimpath_args(args) + cmd += ['-I', args.output_root, '-I', os.path.join(args.pkg_root, 'include')] + cmd += ['-D', 'GOOS_' + args.targ_os, '-D', 'GOARCH_' + args.targ_arch, '-o', args.output] + + # if compare_versions('1.16', args.goversion) >= 0: + cmd += ['-p', args.import_path] + if need_compiling_runtime(args.import_path): + cmd += ['-compiling-runtime'] + + if args.asm_flags: + cmd += args.asm_flags + cmd += args.asm_srcs + call(cmd, args.build_root) + + +def do_link_lib(args): + if len(args.asm_srcs) > 0: + asmargs = copy_args(args) + asmargs.asmhdr = os.path.join(asmargs.output_root, 'go_asm.h') + do_compile_go(asmargs) + for src in asmargs.asm_srcs: + asmargs.srcs = [src] + asmargs.asm_srcs = [src] + asmargs.output = os.path.join(asmargs.output_root, os.path.basename(src) + '.o') + do_compile_asm(asmargs) + args.objects.append(asmargs.output) + else: + do_compile_go(args) + if args.objects or args.sysos: + cmd = [args.go_pack, 'r', args.output] + args.objects + args.sysos + call(cmd, args.build_root) + + +def do_link_exe(args): + assert args.extld is not None + assert args.non_local_peers is not None + compile_args = copy_args(args) + compile_args.output = os.path.join(args.output_root, 'main.a') + compile_args.real_import_path = compile_args.import_path + compile_args.import_path = 'main' if args.vcs and os.path.isfile(compile_args.vcs): build_info = os.path.join('library', 'go', 'core', 'buildinfo') - if any([x.startswith(build_info) for x in compile_args.peers]): + if any([x.startswith(build_info) for x in compile_args.peers]): compile_args.go_srcs.append(compile_args.vcs) - do_link_lib(compile_args) - cmd = [args.go_link, '-o', args.output] - import_config_name = create_import_config(args.peers + args.non_local_peers, False, args.import_map, args.module_map) - if import_config_name: - cmd += ['-importcfg', import_config_name] - if args.link_flags: - cmd += args.link_flags - - if args.mode in ('exe', 'test'): - cmd.append('-buildmode=exe') - elif args.mode == 'dll': - cmd.append('-buildmode=c-shared') - else: - assert False, 'Unexpected mode: {}'.format(args.mode) - cmd.append('-extld={}'.format(args.extld)) - - extldflags = [] - if args.extldflags is not None: - filter_musl = bool - if args.musl: - cmd.append('-linkmode=external') - extldflags.append('-static') - filter_musl = lambda x: x not in ('-lc', '-ldl', '-lm', '-lpthread', '-lrt') - extldflags += [x for x in args.extldflags if filter_musl(x)] - cgo_peers = [] - if args.cgo_peers is not None and len(args.cgo_peers) > 0: - is_group = args.targ_os == 'linux' - if is_group: - cgo_peers.append('-Wl,--start-group') - cgo_peers.extend(args.cgo_peers) - if is_group: - cgo_peers.append('-Wl,--end-group') - try: - index = extldflags.index('--cgo-peers') - extldflags = extldflags[:index] + cgo_peers + extldflags[index+1:] - except ValueError: - extldflags.extend(cgo_peers) - if len(extldflags) > 0: - cmd.append('-extldflags={}'.format(' '.join(extldflags))) - cmd.append(compile_args.output) - call(cmd, args.build_root) - - -def gen_cover_info(args): - lines = [] - lines.extend([ - """ -var ( - coverCounters = make(map[string][]uint32) - coverBlocks = make(map[string][]testing.CoverBlock) -) - """, - 'func init() {', - ]) - for var, file in (x.split(':') for x in args.cover_info): - lines.append(' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format(file=file, var=var)) - lines.extend([ - '}', - """ -func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts []uint16) { - if 3*len(counter) != len(pos) || len(counter) != len(numStmts) { - panic("coverage: mismatched sizes") - } - if coverCounters[fileName] != nil { - // Already registered. - return - } - coverCounters[fileName] = counter - block := make([]testing.CoverBlock, len(counter)) - for i := range counter { - block[i] = testing.CoverBlock{ - Line0: pos[3*i+0], - Col0: uint16(pos[3*i+2]), - Line1: pos[3*i+1], - Col1: uint16(pos[3*i+2]>>16), - Stmts: numStmts[i], - } - } - coverBlocks[fileName] = block -} - """, - ]) - return lines - - -def filter_out_skip_tests(tests, skip_tests): - skip_set = set() - star_skip_set = set() - for t in skip_tests: - work_set = star_skip_set if '*' in t else skip_set - work_set.add(t) - - re_star_tests = None - if len(star_skip_set) > 0: - re_star_tests = re.compile(re.sub(r'(\*)+', r'.\1', '^({})$'.format('|'.join(star_skip_set)))) - - return [x for x in tests if not (x in skip_tests or re_star_tests and re_star_tests.match(x))] - - -def gen_test_main(args, test_lib_args, xtest_lib_args): - assert args and (test_lib_args or xtest_lib_args) - test_miner = args.test_miner - test_module_path = test_lib_args.import_path if test_lib_args else xtest_lib_args.import_path - is_cover = args.cover_info and len(args.cover_info) > 0 - - # Prepare GOPATH - # $BINDIR - # |- __go__ - # |- src - # |- pkg - # |- ${TARGET_OS}_${TARGET_ARCH} - go_path_root = os.path.join(args.output_root, '__go__') - test_src_dir = os.path.join(go_path_root, 'src') - target_os_arch = '_'.join([args.targ_os, args.targ_arch]) - test_pkg_dir = os.path.join(go_path_root, 'pkg', target_os_arch, os.path.dirname(test_module_path)) - os.makedirs(test_pkg_dir) - - my_env = os.environ.copy() - my_env['GOROOT'] = '' - my_env['GOPATH'] = go_path_root - my_env['GOARCH'] = args.targ_arch - my_env['GOOS'] = args.targ_os - - tests = [] - xtests = [] - os_symlink = get_symlink_or_copyfile() - - # Get the list of "internal" tests - if test_lib_args: - os.makedirs(os.path.join(test_src_dir, test_module_path)) - os_symlink(test_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(test_module_path) + '.a')) - cmd = [test_miner, '-benchmarks', '-tests', test_module_path] - tests = [x for x in (call(cmd, test_lib_args.output_root, my_env).decode('UTF-8') or '').strip().split('\n') if len(x) > 0] - if args.skip_tests: - tests = filter_out_skip_tests(tests, args.skip_tests) - test_main_found = '#TestMain' in tests - - # Get the list of "external" tests - if xtest_lib_args: - xtest_module_path = xtest_lib_args.import_path - os.makedirs(os.path.join(test_src_dir, xtest_module_path)) - os_symlink(xtest_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(xtest_module_path) + '.a')) - cmd = [test_miner, '-benchmarks', '-tests', xtest_module_path] - xtests = [x for x in (call(cmd, xtest_lib_args.output_root, my_env).decode('UTF-8') or '').strip().split('\n') if len(x) > 0] - if args.skip_tests: - xtests = filter_out_skip_tests(xtests, args.skip_tests) - xtest_main_found = '#TestMain' in xtests - - test_main_package = None - if test_main_found and xtest_main_found: - assert False, 'multiple definition of TestMain' - elif test_main_found: - test_main_package = '_test' - elif xtest_main_found: - test_main_package = '_xtest' - - shutil.rmtree(go_path_root) - - lines = ['package main', '', 'import ('] - if test_main_package is None: - lines.append(' "os"') - lines.extend([' "testing"', ' "testing/internal/testdeps"']) - - if len(tests) > 0: - lines.append(' _test "{}"'.format(test_module_path)) - elif test_lib_args: - lines.append(' _ "{}"'.format(test_module_path)) - - if len(xtests) > 0: - lines.append(' _xtest "{}"'.format(xtest_module_path)) - elif xtest_lib_args: - lines.append(' _ "{}"'.format(xtest_module_path)) - - if is_cover: - lines.append(' _cover0 "{}"'.format(test_module_path)) - lines.extend([')', '']) - - if compare_versions('1.18', args.goversion) < 0: - kinds = ['Test', 'Benchmark', 'Example'] - else: - kinds = ['Test', 'Benchmark', 'FuzzTarget', 'Example'] - - var_names = [] - for kind in kinds: - var_name = '{}s'.format(kind.lower()) - var_names.append(var_name) - lines.append('var {} = []testing.Internal{}{{'.format(var_name, kind)) - for test in [x for x in tests if x.startswith(kind)]: - lines.append(' {{"{test}", _test.{test}}},'.format(test=test)) - for test in [x for x in xtests if x.startswith(kind)]: - lines.append(' {{"{test}", _xtest.{test}}},'.format(test=test)) - lines.extend(['}', '']) - - if is_cover: - lines.extend(gen_cover_info(args)) - - lines.append('func main() {') - if is_cover: - lines.extend([ - ' testing.RegisterCover(testing.Cover{', - ' Mode: "set",', - ' Counters: coverCounters,', - ' Blocks: coverBlocks,', - ' CoveredPackages: "",', - ' })', - ]) - lines.extend([ - ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)), - '', - ]) - - if test_main_package: - lines.append(' {}.TestMain(m)'.format(test_main_package)) - else: - lines.append(' os.Exit(m.Run())') - lines.extend(['}', '']) - - content = '\n'.join(lines) - # sys.stderr.write('{}\n'.format(content)) - return content - - -def do_link_test(args): - assert args.srcs or args.xtest_srcs - assert args.test_miner is not None - - test_module_path = get_source_path(args) - test_import_path, _ = get_import_path(test_module_path) - - test_lib_args = copy_args(args) if args.srcs else None - xtest_lib_args = copy_args(args) if args.xtest_srcs else None - if xtest_lib_args is not None: - xtest_lib_args.embed = args.embed_xtest if args.embed_xtest else None - - ydx_file_name = None - xtest_ydx_file_name = None - need_append_ydx = test_lib_args and xtest_lib_args and args.ydx_file and args.vet_flags - if need_append_ydx: - def find_ydx_file_name(name, flags): - for i, elem in enumerate(flags): - if elem.endswith(name): - return (i, elem) - assert False, 'Unreachable code' - - idx, ydx_file_name = find_ydx_file_name(xtest_lib_args.ydx_file, xtest_lib_args.vet_flags) - xtest_ydx_file_name = '{}_xtest'.format(ydx_file_name) - xtest_lib_args.vet_flags = copy.copy(xtest_lib_args.vet_flags) - xtest_lib_args.vet_flags[idx] = xtest_ydx_file_name - - if test_lib_args: - test_lib_args.output = os.path.join(args.output_root, 'test.a') - test_lib_args.vet_report_output = vet_report_output_name(test_lib_args.output) - test_lib_args.module_path = test_module_path - test_lib_args.import_path = test_import_path - do_link_lib(test_lib_args) - - if xtest_lib_args: - xtest_lib_args.srcs = xtest_lib_args.xtest_srcs - classify_srcs(xtest_lib_args.srcs, xtest_lib_args) - xtest_lib_args.output = os.path.join(args.output_root, 'xtest.a') - xtest_lib_args.vet_report_output = vet_report_output_name(xtest_lib_args.output) - xtest_lib_args.module_path = test_module_path + '_test' - xtest_lib_args.import_path = test_import_path + '_test' - if test_lib_args: - xtest_lib_args.module_map[test_import_path] = test_lib_args.output - need_append_ydx = args.ydx_file and args.srcs and args.vet_flags - do_link_lib(xtest_lib_args) - - if need_append_ydx: - with open(os.path.join(args.build_root, ydx_file_name), 'ab') as dst_file: - with open(os.path.join(args.build_root, xtest_ydx_file_name), 'rb') as src_file: - dst_file.write(src_file.read()) - - test_main_content = gen_test_main(args, test_lib_args, xtest_lib_args) - test_main_name = os.path.join(args.output_root, '_test_main.go') - with open(test_main_name, "w") as f: - f.write(test_main_content) - test_args = copy_args(args) - test_args.embed = None - test_args.srcs = [test_main_name] - if test_args.test_import_path is None: - # it seems that we can do it unconditionally, but this kind - # of mangling doesn't really looks good to me and we leave it - # for pure GO_TEST module - test_args.module_path = test_args.module_path + '___test_main__' - test_args.import_path = test_args.import_path + '___test_main__' - classify_srcs(test_args.srcs, test_args) - if test_lib_args: - test_args.module_map[test_lib_args.import_path] = test_lib_args.output - if xtest_lib_args: - test_args.module_map[xtest_lib_args.import_path] = xtest_lib_args.output - - if args.vet: - dump_vet_report_for_tests(test_args, test_lib_args, xtest_lib_args) - test_args.vet = False - - do_link_exe(test_args) - - -if __name__ == '__main__': + do_link_lib(compile_args) + cmd = [args.go_link, '-o', args.output] + import_config_name = create_import_config(args.peers + args.non_local_peers, False, args.import_map, args.module_map) + if import_config_name: + cmd += ['-importcfg', import_config_name] + if args.link_flags: + cmd += args.link_flags + + if args.mode in ('exe', 'test'): + cmd.append('-buildmode=exe') + elif args.mode == 'dll': + cmd.append('-buildmode=c-shared') + else: + assert False, 'Unexpected mode: {}'.format(args.mode) + cmd.append('-extld={}'.format(args.extld)) + + extldflags = [] + if args.extldflags is not None: + filter_musl = bool + if args.musl: + cmd.append('-linkmode=external') + extldflags.append('-static') + filter_musl = lambda x: x not in ('-lc', '-ldl', '-lm', '-lpthread', '-lrt') + extldflags += [x for x in args.extldflags if filter_musl(x)] + cgo_peers = [] + if args.cgo_peers is not None and len(args.cgo_peers) > 0: + is_group = args.targ_os == 'linux' + if is_group: + cgo_peers.append('-Wl,--start-group') + cgo_peers.extend(args.cgo_peers) + if is_group: + cgo_peers.append('-Wl,--end-group') + try: + index = extldflags.index('--cgo-peers') + extldflags = extldflags[:index] + cgo_peers + extldflags[index+1:] + except ValueError: + extldflags.extend(cgo_peers) + if len(extldflags) > 0: + cmd.append('-extldflags={}'.format(' '.join(extldflags))) + cmd.append(compile_args.output) + call(cmd, args.build_root) + + +def gen_cover_info(args): + lines = [] + lines.extend([ + """ +var ( + coverCounters = make(map[string][]uint32) + coverBlocks = make(map[string][]testing.CoverBlock) +) + """, + 'func init() {', + ]) + for var, file in (x.split(':') for x in args.cover_info): + lines.append(' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format(file=file, var=var)) + lines.extend([ + '}', + """ +func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts []uint16) { + if 3*len(counter) != len(pos) || len(counter) != len(numStmts) { + panic("coverage: mismatched sizes") + } + if coverCounters[fileName] != nil { + // Already registered. + return + } + coverCounters[fileName] = counter + block := make([]testing.CoverBlock, len(counter)) + for i := range counter { + block[i] = testing.CoverBlock{ + Line0: pos[3*i+0], + Col0: uint16(pos[3*i+2]), + Line1: pos[3*i+1], + Col1: uint16(pos[3*i+2]>>16), + Stmts: numStmts[i], + } + } + coverBlocks[fileName] = block +} + """, + ]) + return lines + + +def filter_out_skip_tests(tests, skip_tests): + skip_set = set() + star_skip_set = set() + for t in skip_tests: + work_set = star_skip_set if '*' in t else skip_set + work_set.add(t) + + re_star_tests = None + if len(star_skip_set) > 0: + re_star_tests = re.compile(re.sub(r'(\*)+', r'.\1', '^({})$'.format('|'.join(star_skip_set)))) + + return [x for x in tests if not (x in skip_tests or re_star_tests and re_star_tests.match(x))] + + +def gen_test_main(args, test_lib_args, xtest_lib_args): + assert args and (test_lib_args or xtest_lib_args) + test_miner = args.test_miner + test_module_path = test_lib_args.import_path if test_lib_args else xtest_lib_args.import_path + is_cover = args.cover_info and len(args.cover_info) > 0 + + # Prepare GOPATH + # $BINDIR + # |- __go__ + # |- src + # |- pkg + # |- ${TARGET_OS}_${TARGET_ARCH} + go_path_root = os.path.join(args.output_root, '__go__') + test_src_dir = os.path.join(go_path_root, 'src') + target_os_arch = '_'.join([args.targ_os, args.targ_arch]) + test_pkg_dir = os.path.join(go_path_root, 'pkg', target_os_arch, os.path.dirname(test_module_path)) + os.makedirs(test_pkg_dir) + + my_env = os.environ.copy() + my_env['GOROOT'] = '' + my_env['GOPATH'] = go_path_root + my_env['GOARCH'] = args.targ_arch + my_env['GOOS'] = args.targ_os + + tests = [] + xtests = [] + os_symlink = get_symlink_or_copyfile() + + # Get the list of "internal" tests + if test_lib_args: + os.makedirs(os.path.join(test_src_dir, test_module_path)) + os_symlink(test_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(test_module_path) + '.a')) + cmd = [test_miner, '-benchmarks', '-tests', test_module_path] + tests = [x for x in (call(cmd, test_lib_args.output_root, my_env).decode('UTF-8') or '').strip().split('\n') if len(x) > 0] + if args.skip_tests: + tests = filter_out_skip_tests(tests, args.skip_tests) + test_main_found = '#TestMain' in tests + + # Get the list of "external" tests + if xtest_lib_args: + xtest_module_path = xtest_lib_args.import_path + os.makedirs(os.path.join(test_src_dir, xtest_module_path)) + os_symlink(xtest_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(xtest_module_path) + '.a')) + cmd = [test_miner, '-benchmarks', '-tests', xtest_module_path] + xtests = [x for x in (call(cmd, xtest_lib_args.output_root, my_env).decode('UTF-8') or '').strip().split('\n') if len(x) > 0] + if args.skip_tests: + xtests = filter_out_skip_tests(xtests, args.skip_tests) + xtest_main_found = '#TestMain' in xtests + + test_main_package = None + if test_main_found and xtest_main_found: + assert False, 'multiple definition of TestMain' + elif test_main_found: + test_main_package = '_test' + elif xtest_main_found: + test_main_package = '_xtest' + + shutil.rmtree(go_path_root) + + lines = ['package main', '', 'import ('] + if test_main_package is None: + lines.append(' "os"') + lines.extend([' "testing"', ' "testing/internal/testdeps"']) + + if len(tests) > 0: + lines.append(' _test "{}"'.format(test_module_path)) + elif test_lib_args: + lines.append(' _ "{}"'.format(test_module_path)) + + if len(xtests) > 0: + lines.append(' _xtest "{}"'.format(xtest_module_path)) + elif xtest_lib_args: + lines.append(' _ "{}"'.format(xtest_module_path)) + + if is_cover: + lines.append(' _cover0 "{}"'.format(test_module_path)) + lines.extend([')', '']) + + if compare_versions('1.18', args.goversion) < 0: + kinds = ['Test', 'Benchmark', 'Example'] + else: + kinds = ['Test', 'Benchmark', 'FuzzTarget', 'Example'] + + var_names = [] + for kind in kinds: + var_name = '{}s'.format(kind.lower()) + var_names.append(var_name) + lines.append('var {} = []testing.Internal{}{{'.format(var_name, kind)) + for test in [x for x in tests if x.startswith(kind)]: + lines.append(' {{"{test}", _test.{test}}},'.format(test=test)) + for test in [x for x in xtests if x.startswith(kind)]: + lines.append(' {{"{test}", _xtest.{test}}},'.format(test=test)) + lines.extend(['}', '']) + + if is_cover: + lines.extend(gen_cover_info(args)) + + lines.append('func main() {') + if is_cover: + lines.extend([ + ' testing.RegisterCover(testing.Cover{', + ' Mode: "set",', + ' Counters: coverCounters,', + ' Blocks: coverBlocks,', + ' CoveredPackages: "",', + ' })', + ]) + lines.extend([ + ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)), + '', + ]) + + if test_main_package: + lines.append(' {}.TestMain(m)'.format(test_main_package)) + else: + lines.append(' os.Exit(m.Run())') + lines.extend(['}', '']) + + content = '\n'.join(lines) + # sys.stderr.write('{}\n'.format(content)) + return content + + +def do_link_test(args): + assert args.srcs or args.xtest_srcs + assert args.test_miner is not None + + test_module_path = get_source_path(args) + test_import_path, _ = get_import_path(test_module_path) + + test_lib_args = copy_args(args) if args.srcs else None + xtest_lib_args = copy_args(args) if args.xtest_srcs else None + if xtest_lib_args is not None: + xtest_lib_args.embed = args.embed_xtest if args.embed_xtest else None + + ydx_file_name = None + xtest_ydx_file_name = None + need_append_ydx = test_lib_args and xtest_lib_args and args.ydx_file and args.vet_flags + if need_append_ydx: + def find_ydx_file_name(name, flags): + for i, elem in enumerate(flags): + if elem.endswith(name): + return (i, elem) + assert False, 'Unreachable code' + + idx, ydx_file_name = find_ydx_file_name(xtest_lib_args.ydx_file, xtest_lib_args.vet_flags) + xtest_ydx_file_name = '{}_xtest'.format(ydx_file_name) + xtest_lib_args.vet_flags = copy.copy(xtest_lib_args.vet_flags) + xtest_lib_args.vet_flags[idx] = xtest_ydx_file_name + + if test_lib_args: + test_lib_args.output = os.path.join(args.output_root, 'test.a') + test_lib_args.vet_report_output = vet_report_output_name(test_lib_args.output) + test_lib_args.module_path = test_module_path + test_lib_args.import_path = test_import_path + do_link_lib(test_lib_args) + + if xtest_lib_args: + xtest_lib_args.srcs = xtest_lib_args.xtest_srcs + classify_srcs(xtest_lib_args.srcs, xtest_lib_args) + xtest_lib_args.output = os.path.join(args.output_root, 'xtest.a') + xtest_lib_args.vet_report_output = vet_report_output_name(xtest_lib_args.output) + xtest_lib_args.module_path = test_module_path + '_test' + xtest_lib_args.import_path = test_import_path + '_test' + if test_lib_args: + xtest_lib_args.module_map[test_import_path] = test_lib_args.output + need_append_ydx = args.ydx_file and args.srcs and args.vet_flags + do_link_lib(xtest_lib_args) + + if need_append_ydx: + with open(os.path.join(args.build_root, ydx_file_name), 'ab') as dst_file: + with open(os.path.join(args.build_root, xtest_ydx_file_name), 'rb') as src_file: + dst_file.write(src_file.read()) + + test_main_content = gen_test_main(args, test_lib_args, xtest_lib_args) + test_main_name = os.path.join(args.output_root, '_test_main.go') + with open(test_main_name, "w") as f: + f.write(test_main_content) + test_args = copy_args(args) + test_args.embed = None + test_args.srcs = [test_main_name] + if test_args.test_import_path is None: + # it seems that we can do it unconditionally, but this kind + # of mangling doesn't really looks good to me and we leave it + # for pure GO_TEST module + test_args.module_path = test_args.module_path + '___test_main__' + test_args.import_path = test_args.import_path + '___test_main__' + classify_srcs(test_args.srcs, test_args) + if test_lib_args: + test_args.module_map[test_lib_args.import_path] = test_lib_args.output + if xtest_lib_args: + test_args.module_map[xtest_lib_args.import_path] = xtest_lib_args.output + + if args.vet: + dump_vet_report_for_tests(test_args, test_lib_args, xtest_lib_args) + test_args.vet = False + + do_link_exe(test_args) + + +if __name__ == '__main__': args = pcf.get_args(sys.argv[1:]) - parser = argparse.ArgumentParser(prefix_chars='+') - parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True) - parser.add_argument('++srcs', nargs='*', required=True) - parser.add_argument('++cgo-srcs', nargs='*') - parser.add_argument('++test_srcs', nargs='*') - parser.add_argument('++xtest_srcs', nargs='*') - parser.add_argument('++cover_info', nargs='*') - parser.add_argument('++output', nargs='?', default=None) - parser.add_argument('++source-root', default=None) - parser.add_argument('++build-root', required=True) - parser.add_argument('++tools-root', default=None) - parser.add_argument('++output-root', required=True) - parser.add_argument('++toolchain-root', required=True) - parser.add_argument('++host-os', choices=['linux', 'darwin', 'windows'], required=True) + parser = argparse.ArgumentParser(prefix_chars='+') + parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True) + parser.add_argument('++srcs', nargs='*', required=True) + parser.add_argument('++cgo-srcs', nargs='*') + parser.add_argument('++test_srcs', nargs='*') + parser.add_argument('++xtest_srcs', nargs='*') + parser.add_argument('++cover_info', nargs='*') + parser.add_argument('++output', nargs='?', default=None) + parser.add_argument('++source-root', default=None) + parser.add_argument('++build-root', required=True) + parser.add_argument('++tools-root', default=None) + parser.add_argument('++output-root', required=True) + parser.add_argument('++toolchain-root', required=True) + parser.add_argument('++host-os', choices=['linux', 'darwin', 'windows'], required=True) parser.add_argument('++host-arch', choices=['amd64', 'arm64'], required=True) - parser.add_argument('++targ-os', choices=['linux', 'darwin', 'windows'], required=True) + parser.add_argument('++targ-os', choices=['linux', 'darwin', 'windows'], required=True) parser.add_argument('++targ-arch', choices=['amd64', 'x86', 'arm64'], required=True) - parser.add_argument('++peers', nargs='*') - parser.add_argument('++non-local-peers', nargs='*') - parser.add_argument('++cgo-peers', nargs='*') - parser.add_argument('++asmhdr', nargs='?', default=None) - parser.add_argument('++test-import-path', nargs='?') - parser.add_argument('++test-miner', nargs='?') - parser.add_argument('++arc-project-prefix', nargs='?', default=arc_project_prefix) - parser.add_argument('++std-lib-prefix', nargs='?', default=std_lib_prefix) - parser.add_argument('++vendor-prefix', nargs='?', default=vendor_prefix) - parser.add_argument('++extld', nargs='?', default=None) - parser.add_argument('++extldflags', nargs='+', default=None) - parser.add_argument('++goversion', required=True) - parser.add_argument('++asm-flags', nargs='*') - parser.add_argument('++compile-flags', nargs='*') - parser.add_argument('++link-flags', nargs='*') + parser.add_argument('++peers', nargs='*') + parser.add_argument('++non-local-peers', nargs='*') + parser.add_argument('++cgo-peers', nargs='*') + parser.add_argument('++asmhdr', nargs='?', default=None) + parser.add_argument('++test-import-path', nargs='?') + parser.add_argument('++test-miner', nargs='?') + parser.add_argument('++arc-project-prefix', nargs='?', default=arc_project_prefix) + parser.add_argument('++std-lib-prefix', nargs='?', default=std_lib_prefix) + parser.add_argument('++vendor-prefix', nargs='?', default=vendor_prefix) + parser.add_argument('++extld', nargs='?', default=None) + parser.add_argument('++extldflags', nargs='+', default=None) + parser.add_argument('++goversion', required=True) + parser.add_argument('++asm-flags', nargs='*') + parser.add_argument('++compile-flags', nargs='*') + parser.add_argument('++link-flags', nargs='*') parser.add_argument('++vcs', nargs='?', default=None) - parser.add_argument('++vet', nargs='?', const=True, default=False) - parser.add_argument('++vet-flags', nargs='*', default=None) - parser.add_argument('++vet-info-ext', default=vet_info_ext) - parser.add_argument('++vet-report-ext', default=vet_report_ext) - parser.add_argument('++musl', action='store_true') - parser.add_argument('++skip-tests', nargs='*', default=None) - parser.add_argument('++ydx-file', default='') - parser.add_argument('++debug-root-map', default=None) - parser.add_argument('++embed', action='append', nargs='*') - parser.add_argument('++embed_xtest', action='append', nargs='*') + parser.add_argument('++vet', nargs='?', const=True, default=False) + parser.add_argument('++vet-flags', nargs='*', default=None) + parser.add_argument('++vet-info-ext', default=vet_info_ext) + parser.add_argument('++vet-report-ext', default=vet_report_ext) + parser.add_argument('++musl', action='store_true') + parser.add_argument('++skip-tests', nargs='*', default=None) + parser.add_argument('++ydx-file', default='') + parser.add_argument('++debug-root-map', default=None) + parser.add_argument('++embed', action='append', nargs='*') + parser.add_argument('++embed_xtest', action='append', nargs='*') args = parser.parse_args(args) - - arc_project_prefix = args.arc_project_prefix - std_lib_prefix = args.std_lib_prefix - vendor_prefix = args.vendor_prefix - vet_info_ext = args.vet_info_ext - vet_report_ext = args.vet_report_ext - - preprocess_args(args) - - try: - os.unlink(args.output) - except OSError: - pass - - # We are going to support only 'lib', 'exe' and 'cgo' build modes currently - # and as a result we are going to generate only one build node per module - # (or program) - dispatch = { - 'exe': do_link_exe, - 'dll': do_link_exe, - 'lib': do_link_lib, - 'test': do_link_test - } - - exit_code = 1 - try: - dispatch[args.mode](args) - exit_code = 0 - except KeyError: - sys.stderr.write('Unknown build mode [{}]...\n'.format(args.mode)) - except subprocess.CalledProcessError as e: - sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output)) - exit_code = e.returncode - except Exception as e: - sys.stderr.write('Unhandled exception [{}]...\n'.format(str(e))) - sys.exit(exit_code) + + arc_project_prefix = args.arc_project_prefix + std_lib_prefix = args.std_lib_prefix + vendor_prefix = args.vendor_prefix + vet_info_ext = args.vet_info_ext + vet_report_ext = args.vet_report_ext + + preprocess_args(args) + + try: + os.unlink(args.output) + except OSError: + pass + + # We are going to support only 'lib', 'exe' and 'cgo' build modes currently + # and as a result we are going to generate only one build node per module + # (or program) + dispatch = { + 'exe': do_link_exe, + 'dll': do_link_exe, + 'lib': do_link_lib, + 'test': do_link_test + } + + exit_code = 1 + try: + dispatch[args.mode](args) + exit_code = 0 + except KeyError: + sys.stderr.write('Unknown build mode [{}]...\n'.format(args.mode)) + except subprocess.CalledProcessError as e: + sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output)) + exit_code = e.returncode + except Exception as e: + sys.stderr.write('Unhandled exception [{}]...\n'.format(str(e))) + sys.exit(exit_code) diff --git a/build/scripts/link_asrc.py b/build/scripts/link_asrc.py index 704075f554..eec5fe09a8 100644 --- a/build/scripts/link_asrc.py +++ b/build/scripts/link_asrc.py @@ -1,84 +1,84 @@ -import argparse -import itertools -import os -import tarfile - - -DELIM_JAVA = '__DELIM_JAVA__' -DELIM_RES = '__DELIM_RES__' -DELIM_ASSETS = '__DELIM_ASSETS__' -DELIM_AIDL = '__DELIM_AIDL__' - -DELIMS = ( - DELIM_JAVA, - DELIM_RES, - DELIM_ASSETS, - DELIM_AIDL, -) - -DESTS = { - DELIM_JAVA: 'src', - DELIM_RES: 'res', - DELIM_ASSETS: 'assets', - DELIM_AIDL: 'aidl', -} - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--asrcs', nargs='*') - parser.add_argument('--input', nargs='*') - parser.add_argument('--jsrcs', nargs='*') - parser.add_argument('--output', required=True) - parser.add_argument('--work', required=True) - - return parser.parse_args() - - -def main(): - args = parse_args() - - files = [] - parts = [] - - if args.input and len(args.input) > 0: - for x in args.input: - if x in DELIMS: - assert(len(parts) == 0 or len(parts[-1]) > 1) - parts.append([x]) - else: - assert(len(parts) > 0) - parts[-1].append(x) - assert(len(parts[-1]) > 1) - - if args.jsrcs and len(args.jsrcs): - src_dir = os.path.join(args.work, DESTS[DELIM_JAVA]) - os.makedirs(src_dir) - - for jsrc in filter(lambda x: x.endswith('.jsrc'), args.jsrcs): - with tarfile.open(jsrc, 'r') as tar: - names = tar.getnames() - if names and len(names) > 0: - parts.append([DELIM_JAVA, src_dir]) - parts[-1].extend(itertools.imap(lambda x: os.path.join(src_dir, x), names)) - tar.extractall(path=src_dir) - - if args.asrcs and len(args.asrcs): - for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.asrcs): - with tarfile.open(asrc, 'r') as tar: - files.extend(tar.getnames()) - tar.extractall(path=args.work) - - with tarfile.open(args.output, 'w') as out: - for part in parts: - dest = DESTS[part[0]] - prefix = part[1] - for f in part[2:]: - out.add(f, arcname=os.path.join(dest, os.path.relpath(f, prefix))) - - for f in files: - out.add(os.path.join(args.work, f), arcname=f) - - -if __name__ == '__main__': - main() +import argparse +import itertools +import os +import tarfile + + +DELIM_JAVA = '__DELIM_JAVA__' +DELIM_RES = '__DELIM_RES__' +DELIM_ASSETS = '__DELIM_ASSETS__' +DELIM_AIDL = '__DELIM_AIDL__' + +DELIMS = ( + DELIM_JAVA, + DELIM_RES, + DELIM_ASSETS, + DELIM_AIDL, +) + +DESTS = { + DELIM_JAVA: 'src', + DELIM_RES: 'res', + DELIM_ASSETS: 'assets', + DELIM_AIDL: 'aidl', +} + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--asrcs', nargs='*') + parser.add_argument('--input', nargs='*') + parser.add_argument('--jsrcs', nargs='*') + parser.add_argument('--output', required=True) + parser.add_argument('--work', required=True) + + return parser.parse_args() + + +def main(): + args = parse_args() + + files = [] + parts = [] + + if args.input and len(args.input) > 0: + for x in args.input: + if x in DELIMS: + assert(len(parts) == 0 or len(parts[-1]) > 1) + parts.append([x]) + else: + assert(len(parts) > 0) + parts[-1].append(x) + assert(len(parts[-1]) > 1) + + if args.jsrcs and len(args.jsrcs): + src_dir = os.path.join(args.work, DESTS[DELIM_JAVA]) + os.makedirs(src_dir) + + for jsrc in filter(lambda x: x.endswith('.jsrc'), args.jsrcs): + with tarfile.open(jsrc, 'r') as tar: + names = tar.getnames() + if names and len(names) > 0: + parts.append([DELIM_JAVA, src_dir]) + parts[-1].extend(itertools.imap(lambda x: os.path.join(src_dir, x), names)) + tar.extractall(path=src_dir) + + if args.asrcs and len(args.asrcs): + for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.asrcs): + with tarfile.open(asrc, 'r') as tar: + files.extend(tar.getnames()) + tar.extractall(path=args.work) + + with tarfile.open(args.output, 'w') as out: + for part in parts: + dest = DESTS[part[0]] + prefix = part[1] + for f in part[2:]: + out.add(f, arcname=os.path.join(dest, os.path.relpath(f, prefix))) + + for f in files: + out.add(os.path.join(args.work, f), arcname=f) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/mkdocs_builder_wrapper.py b/build/scripts/mkdocs_builder_wrapper.py index 74e0690044..7e10dfac07 100644 --- a/build/scripts/mkdocs_builder_wrapper.py +++ b/build/scripts/mkdocs_builder_wrapper.py @@ -1,35 +1,35 @@ -import os -import subprocess -import sys - - -def main(): - cmd = [] - build_root = sys.argv[1] - length = len(build_root) - is_dep = False - for arg in sys.argv[2:]: - if is_dep: - is_dep = False - if not arg.endswith('.tar.gz'): - continue - basename = os.path.basename(arg) - assert arg.startswith(build_root) and len(arg) > length + len(basename) and arg[length] in ('/', '\\') - cmd.extend(['--dep', '{}:{}:{}'.format(build_root, os.path.dirname(arg[length+1:]), basename)]) - elif arg == '--dep': - is_dep = True - else: - cmd.append(arg) - assert not is_dep - p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - out, err = p.communicate() - if p.returncode: - if out: - sys.stderr.write('stdout:\n{}\n'.format(out)) - if err: - sys.stderr.write('stderr:\n{}\n'.format(err)) - sys.exit(p.returncode) - - -if __name__ == '__main__': - main() +import os +import subprocess +import sys + + +def main(): + cmd = [] + build_root = sys.argv[1] + length = len(build_root) + is_dep = False + for arg in sys.argv[2:]: + if is_dep: + is_dep = False + if not arg.endswith('.tar.gz'): + continue + basename = os.path.basename(arg) + assert arg.startswith(build_root) and len(arg) > length + len(basename) and arg[length] in ('/', '\\') + cmd.extend(['--dep', '{}:{}:{}'.format(build_root, os.path.dirname(arg[length+1:]), basename)]) + elif arg == '--dep': + is_dep = True + else: + cmd.append(arg) + assert not is_dep + p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = p.communicate() + if p.returncode: + if out: + sys.stderr.write('stdout:\n{}\n'.format(out)) + if err: + sys.stderr.write('stderr:\n{}\n'.format(err)) + sys.exit(p.returncode) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/postprocess_go_fbs.py b/build/scripts/postprocess_go_fbs.py index 6a93c7a7ba..325fa07ea6 100644 --- a/build/scripts/postprocess_go_fbs.py +++ b/build/scripts/postprocess_go_fbs.py @@ -1,72 +1,72 @@ -import argparse -import re -import os - - -# very simple regexp to find go import statement in the source code -# NOTE! only one-line comments are somehow considered -IMPORT_DECL=re.compile(r''' - \bimport - ( - \s+((\.|\w+)\s+)?"[^"]+" ( \s+//[^\n]* )? - | \s* \( \s* ( ( \s+ ((\.|\w+)\s+)? "[^"]+" )? ( \s* //[^\n]* )? )* \s* \) - )''', re.MULTILINE | re.DOTALL | re.VERBOSE) - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--input-dir', required=True) - parser.add_argument('--map', nargs='*', default=None) - - return parser.parse_args() - - -def process_go_file(file_name, import_map): - content = '' - with open(file_name, 'r') as f: - content = f.read() - - start = -1 - end = -1 - for it in IMPORT_DECL.finditer(content): - if start < 0: - start = it.start() - end = it.end() - - if start < 0: - return - - imports = content[start:end] - for namespace, path in import_map.iteritems(): - ns = namespace.split('.') - name = '__'.join(ns) - import_path = '/'.join(ns) - imports = imports.replace('{} "{}"'.format(name, import_path), '{} "a.yandex-team.ru/{}"'.format(name, path)) - - if imports != content[start:end]: - with open(file_name, 'w') as f: - f.write(content[:start]) - f.write(imports) - f.write(content[end:]) - - -def main(): - args = parse_args() - - if not args.map: - return - - raw_import_map = sorted(set(args.map)) - import_map = dict(z.split('=', 1) for z in raw_import_map) - if len(raw_import_map) != len(import_map): - for k, v in (z.split('=', 1) for z in raw_import_map): - if v != import_map[k]: - raise Exception('import map [{}] contains different values for key [{}]: [{}] and [{}].'.format(args.map, k, v, import_map[k])) - - for root, _, files in os.walk(args.input_dir): - for src in (f for f in files if f.endswith('.go')): - process_go_file(os.path.join(root, src), import_map) - - -if __name__ == '__main__': - main() +import argparse +import re +import os + + +# very simple regexp to find go import statement in the source code +# NOTE! only one-line comments are somehow considered +IMPORT_DECL=re.compile(r''' + \bimport + ( + \s+((\.|\w+)\s+)?"[^"]+" ( \s+//[^\n]* )? + | \s* \( \s* ( ( \s+ ((\.|\w+)\s+)? "[^"]+" )? ( \s* //[^\n]* )? )* \s* \) + )''', re.MULTILINE | re.DOTALL | re.VERBOSE) + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--input-dir', required=True) + parser.add_argument('--map', nargs='*', default=None) + + return parser.parse_args() + + +def process_go_file(file_name, import_map): + content = '' + with open(file_name, 'r') as f: + content = f.read() + + start = -1 + end = -1 + for it in IMPORT_DECL.finditer(content): + if start < 0: + start = it.start() + end = it.end() + + if start < 0: + return + + imports = content[start:end] + for namespace, path in import_map.iteritems(): + ns = namespace.split('.') + name = '__'.join(ns) + import_path = '/'.join(ns) + imports = imports.replace('{} "{}"'.format(name, import_path), '{} "a.yandex-team.ru/{}"'.format(name, path)) + + if imports != content[start:end]: + with open(file_name, 'w') as f: + f.write(content[:start]) + f.write(imports) + f.write(content[end:]) + + +def main(): + args = parse_args() + + if not args.map: + return + + raw_import_map = sorted(set(args.map)) + import_map = dict(z.split('=', 1) for z in raw_import_map) + if len(raw_import_map) != len(import_map): + for k, v in (z.split('=', 1) for z in raw_import_map): + if v != import_map[k]: + raise Exception('import map [{}] contains different values for key [{}]: [{}] and [{}].'.format(args.map, k, v, import_map[k])) + + for root, _, files in os.walk(args.input_dir): + for src in (f for f in files if f.endswith('.go')): + process_go_file(os.path.join(root, src), import_map) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/process_whole_archive_option.py b/build/scripts/process_whole_archive_option.py index 1736e2c869..a9c4ef676a 100644 --- a/build/scripts/process_whole_archive_option.py +++ b/build/scripts/process_whole_archive_option.py @@ -12,17 +12,17 @@ class ProcessWholeArchiveOption(): self.start_wa_marker = '--start-wa' self.end_wa_marker = '--end-wa' - def _match_peer_lib(self, arg, ext): + def _match_peer_lib(self, arg, ext): key = None - if arg.endswith(ext): + if arg.endswith(ext): key = os.path.dirname(arg) return key if key and self.peers and key in self.peers else None def _match_lib(self, arg): return arg if self.libs and arg in self.libs else None - def _process_arg(self, arg, ext='.a'): - peer_key = self._match_peer_lib(arg, ext) + def _process_arg(self, arg, ext='.a'): + peer_key = self._match_peer_lib(arg, ext) lib_key = self._match_lib(arg) if peer_key: self.peers[peer_key] += 1 @@ -62,7 +62,7 @@ class ProcessWholeArchiveOption(): is_inside_wa_markers = False def add_prefix(arg, need_check_peers_and_libs): - key = self._process_arg(arg, '.lib') if need_check_peers_and_libs else arg + key = self._process_arg(arg, '.lib') if need_check_peers_and_libs else arg return whole_archive_prefix + arg if key else arg def add_whole_archive_prefix(arg, need_check_peers_and_libs): diff --git a/build/scripts/tar_directory.py b/build/scripts/tar_directory.py index d8247625b6..a91889fa22 100644 --- a/build/scripts/tar_directory.py +++ b/build/scripts/tar_directory.py @@ -36,9 +36,9 @@ def main(args): with tarfile.open(tar, 'r') as tar_file: tar_file.extractall(dest) else: - source = directory + source = directory with tarfile.open(tar, 'w') as out: - out.add(os.path.abspath(source), arcname=os.path.relpath(source, prefix) if prefix else source) + out.add(os.path.abspath(source), arcname=os.path.relpath(source, prefix) if prefix else source) if __name__ == '__main__': diff --git a/build/scripts/tar_sources.py b/build/scripts/tar_sources.py index 40545c9b74..54e2839a69 100644 --- a/build/scripts/tar_sources.py +++ b/build/scripts/tar_sources.py @@ -1,41 +1,41 @@ -import argparse -import os -import tarfile - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument('--exts', nargs='*', default=None) - parser.add_argument('--flat', action='store_true') - parser.add_argument('--input', required=True) - parser.add_argument('--output', required=True) - parser.add_argument('--prefix', default=None) - - return parser.parse_args() - - -def main(): - args = parse_args() - - py_srcs = [] - for root, _, files in os.walk(args.input): - for f in files: - if not args.exts or f.endswith(tuple(args.exts)): - py_srcs.append(os.path.join(root, f)) - - compression_mode = '' - if args.output.endswith(('.tar.gz', '.tgz')): - compression_mode = 'gz' - elif args.output.endswith('.bzip2'): - compression_mode = 'bz2' - - with tarfile.open(args.output, 'w:{}'.format(compression_mode)) as out: - for f in py_srcs: - arcname = os.path.basename(f) if args.flat else os.path.relpath(f, args.input) - if args.prefix: - arcname = os.path.join(args.prefix, arcname) - out.add(f, arcname=arcname) - - -if __name__ == '__main__': - main() +import argparse +import os +import tarfile + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--exts', nargs='*', default=None) + parser.add_argument('--flat', action='store_true') + parser.add_argument('--input', required=True) + parser.add_argument('--output', required=True) + parser.add_argument('--prefix', default=None) + + return parser.parse_args() + + +def main(): + args = parse_args() + + py_srcs = [] + for root, _, files in os.walk(args.input): + for f in files: + if not args.exts or f.endswith(tuple(args.exts)): + py_srcs.append(os.path.join(root, f)) + + compression_mode = '' + if args.output.endswith(('.tar.gz', '.tgz')): + compression_mode = 'gz' + elif args.output.endswith('.bzip2'): + compression_mode = 'bz2' + + with tarfile.open(args.output, 'w:{}'.format(compression_mode)) as out: + for f in py_srcs: + arcname = os.path.basename(f) if args.flat else os.path.relpath(f, args.input) + if args.prefix: + arcname = os.path.join(args.prefix, arcname) + out.add(f, arcname=arcname) + + +if __name__ == '__main__': + main() diff --git a/build/scripts/ya.make b/build/scripts/ya.make index d7f0fae2cf..710165e40d 100644 --- a/build/scripts/ya.make +++ b/build/scripts/ya.make @@ -16,19 +16,19 @@ TEST_SRCS( collect_java_srcs.py compile_cuda.py compile_java.py - compile_jsrc.py - compile_pysrc.py + compile_jsrc.py + compile_pysrc.py configure_file.py - copy_files_to_dir.py + copy_files_to_dir.py copy_to_dir.py coverage-info.py - cpp_flatc_wrapper.py + cpp_flatc_wrapper.py create_jcoverage_report.py - extract_asrc.py - extract_docs.py + extract_asrc.py + extract_docs.py extract_jacoco_report.py f2c.py - fail_module_cmd.py + fail_module_cmd.py fetch_from.py fetch_from_external.py fetch_from_mds.py @@ -39,20 +39,20 @@ TEST_SRCS( find_and_tar.py fix_msvc_output.py fs_tools.py - gen_aar_gradle_script.py + gen_aar_gradle_script.py gen_java_codenav_entry.py gen_java_codenav_protobuf.py gen_mx_table.py gen_py3_reg.py gen_py_reg.py - gen_test_apk_gradle_script.py + gen_test_apk_gradle_script.py gen_ub.py generate_pom.py - go_proto_wrapper.py - go_tool.py + go_proto_wrapper.py + go_tool.py ios_wrapper.py java_pack_to_file.py - link_asrc.py + link_asrc.py link_dyn_lib.py link_exe.py link_fat_obj.py @@ -61,12 +61,12 @@ TEST_SRCS( merge_coverage_data.py merge_files.py mkdir.py - mkdocs_builder_wrapper.py + mkdocs_builder_wrapper.py mkver.py pack_ios.py pack_jcoverage_resources.py perl_wrapper.py - postprocess_go_fbs.py + postprocess_go_fbs.py preprocess.py py_compile.py run_ios_simulator.py @@ -79,7 +79,7 @@ TEST_SRCS( stdout2stderr.py symlink.py tar_directory.py - tar_sources.py + tar_sources.py tared_protoc.py touch.py unpacking_jtest_runner.py |