aboutsummaryrefslogtreecommitdiffstats
path: root/build/scripts
diff options
context:
space:
mode:
authorthegeorg <thegeorg@yandex-team.com>2022-08-19 15:00:44 +0300
committerthegeorg <thegeorg@yandex-team.com>2022-08-19 15:00:44 +0300
commitad2a1b622d2bf6cf025982846153d9c4c791af2c (patch)
tree8906addc18a494ece9dff28b2701a37ef4b52bf8 /build/scripts
parent7b61b052f3baa7e43edca48c373f95b5e5f1c845 (diff)
downloadydb-ad2a1b622d2bf6cf025982846153d9c4c791af2c.tar.gz
Let cmake export determine which build/scripts are mandatory
Diffstat (limited to 'build/scripts')
-rw-r--r--build/scripts/cgo1_wrapper.py45
-rw-r--r--build/scripts/check_config_h.py89
-rw-r--r--build/scripts/clang_wrapper.py53
-rw-r--r--build/scripts/compile_java.py102
-rwxr-xr-xbuild/scripts/configure_file.py59
-rw-r--r--build/scripts/copy_docs_files_to_dir.py138
-rw-r--r--build/scripts/extract_docs.py43
-rwxr-xr-xbuild/scripts/fetch_from.py375
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py269
-rw-r--r--build/scripts/fs_tools.py104
-rw-r--r--build/scripts/gen_py3_reg.py34
-rw-r--r--build/scripts/gen_py_protos.py67
-rw-r--r--build/scripts/gen_py_reg.py32
-rw-r--r--build/scripts/go_fake_include/go_asm.h0
-rw-r--r--build/scripts/go_tool.py867
-rw-r--r--build/scripts/link_dyn_lib.py335
-rw-r--r--build/scripts/link_exe.py130
-rw-r--r--build/scripts/link_lib.py85
-rw-r--r--build/scripts/llvm_opt_wrapper.py18
-rw-r--r--build/scripts/merge_files.py8
-rw-r--r--build/scripts/preprocess.py48
-rwxr-xr-xbuild/scripts/py_compile.py24
-rw-r--r--build/scripts/rodata2asm.py31
-rw-r--r--build/scripts/run_llvm_dsymutil.py11
-rw-r--r--build/scripts/stdout2stderr.py6
-rw-r--r--build/scripts/tar_sources.py41
-rw-r--r--build/scripts/tared_protoc.py31
-rwxr-xr-xbuild/scripts/touch.py50
-rw-r--r--build/scripts/xargs.py18
-rw-r--r--build/scripts/yield_line.py7
30 files changed, 0 insertions, 3120 deletions
diff --git a/build/scripts/cgo1_wrapper.py b/build/scripts/cgo1_wrapper.py
deleted file mode 100644
index 986082f7e9..0000000000
--- a/build/scripts/cgo1_wrapper.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import argparse
-import shutil
-import subprocess
-import sys
-
-
-CGO1_SUFFIX='.cgo1.go'
-
-
-def call(cmd, cwd, env=None):
- # sys.stderr.write('{}\n'.format(' '.join(cmd)))
- return subprocess.call(cmd, stdin=None, stderr=sys.stderr, stdout=sys.stdout, cwd=cwd, env=env)
-
-
-def process_file(source_root, source_prefix, build_root, build_prefix, src_path, comment_prefix):
- dst_path = '{}.tmp'.format(src_path)
- with open(src_path, 'r') as src_file, open(dst_path, 'w') as dst_file:
- for line in src_file:
- if line.startswith(comment_prefix):
- dst_file.write(line.replace(source_root, source_prefix).replace(build_root, build_prefix))
- else:
- dst_file.write(line)
- shutil.move(dst_path, src_path)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument('--build-prefix', default='__ARCADIA_BUILD_ROOT_PREFIX__')
- parser.add_argument('--build-root', required=True)
- parser.add_argument('--cgo1-files', nargs='+', required=True)
- parser.add_argument('--cgo2-files', nargs='+', required=True)
- parser.add_argument('--source-prefix', default='__ARCADIA_SOURCE_ROOT_PREFIX__')
- parser.add_argument('--source-root', required=True)
- parser.add_argument('cgo1_cmd', nargs='*')
- args = parser.parse_args()
-
- exit_code = call(args.cgo1_cmd, args.source_root)
- if exit_code != 0:
- sys.exit(exit_code)
-
- for src_path in args.cgo1_files:
- process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '//')
-
- for src_path in args.cgo2_files:
- process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '#line')
diff --git a/build/scripts/check_config_h.py b/build/scripts/check_config_h.py
deleted file mode 100644
index 07bc12e230..0000000000
--- a/build/scripts/check_config_h.py
+++ /dev/null
@@ -1,89 +0,0 @@
-import sys
-
-data = """
-#if defined(SIZEOF_LONG)
-static_assert(sizeof(long) == SIZEOF_LONG, "fixme 1");
-#endif
-
-#if defined(SIZEOF_PTHREAD_T)
-#include <pthread.h>
-
-static_assert(sizeof(pthread_t) == SIZEOF_PTHREAD_T, "fixme 2");
-#endif
-
-#if defined(SIZEOF_SIZE_T)
-#include <stddef.h>
-
-static_assert(sizeof(size_t) == SIZEOF_SIZE_T, "fixme 3");
-#endif
-
-#if defined(SIZEOF_TIME_T)
-#include <time.h>
-
-static_assert(sizeof(time_t) == SIZEOF_TIME_T, "fixme 4");
-#endif
-
-#if defined(SIZEOF_UINTPTR_T)
-#include <stdint.h>
-
-static_assert(sizeof(uintptr_t) == SIZEOF_UINTPTR_T, "fixme 5");
-#endif
-
-#if defined(SIZEOF_VOID_P)
-static_assert(sizeof(void*) == SIZEOF_VOID_P, "fixme 6");
-#endif
-
-#if defined(SIZEOF_FPOS_T)
-#include <stdio.h>
-
-static_assert(sizeof(fpos_t) == SIZEOF_FPOS_T, "fixme 7");
-#endif
-
-#if defined(SIZEOF_DOUBLE)
-static_assert(sizeof(double) == SIZEOF_DOUBLE, "fixme 8");
-#endif
-
-#if defined(SIZEOF_LONG_DOUBLE)
-static_assert(sizeof(long double) == SIZEOF_LONG_DOUBLE, "fixme 9");
-#endif
-
-#if defined(SIZEOF_FLOAT)
-static_assert(sizeof(float) == SIZEOF_FLOAT, "fixme 10");
-#endif
-
-#if defined(SIZEOF_INT)
-static_assert(sizeof(int) == SIZEOF_INT, "fixme 11");
-#endif
-
-#if defined(SIZEOF_LONG_LONG)
-static_assert(sizeof(long long) == SIZEOF_LONG_LONG, "fixme 12");
-#endif
-
-#if defined(SIZEOF_OFF_T)
-#include <stdio.h>
-
-static_assert(sizeof(off_t) == SIZEOF_OFF_T, "fixme 13");
-#endif
-
-#if defined(SIZEOF_PID_T)
-#include <unistd.h>
-
-static_assert(sizeof(pid_t) == SIZEOF_PID_T, "fixme 14");
-#endif
-
-#if defined(SIZEOF_SHORT)
-static_assert(sizeof(short) == SIZEOF_SHORT, "fixme 15");
-#endif
-
-#if defined(SIZEOF_WCHAR_T)
-static_assert(sizeof(wchar_t) == SIZEOF_WCHAR_T, "fixme 16");
-#endif
-
-#if defined(SIZEOF__BOOL)
-//TODO
-#endif
-"""
-if __name__ == '__main__':
- with open(sys.argv[2], 'w') as f:
- f.write('#include <' + sys.argv[1] + '>\n\n')
- f.write(data)
diff --git a/build/scripts/clang_wrapper.py b/build/scripts/clang_wrapper.py
deleted file mode 100644
index af3869f789..0000000000
--- a/build/scripts/clang_wrapper.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import subprocess
-import sys
-
-
-def fix(s):
- # disable dbg DEVTOOLS-2744
- if s == '-g':
- return None
- if s == '/Z7' or s == '/Od' or s == '/Ob0' or s == '/D_DEBUG':
- return None
-
- # disable sanitizers for generated code
- if s.startswith('-fsanitize') or s == '-Dmemory_sanitizer_enabled' or s.startswith('-fsanitize-blacklist'):
- return None
-
- # strip gcc toolchain flags (appear when crosscompiling)
- if s.startswith('-fabi-version'):
- return None
-
- # remove arguments unknown to clang-cl
- if s == '-fcase-insensitive-paths': # or s == '-fno-lto': # DEVTOOLSSUPPORT-3966
- return None
-
- # Paths under .ya/tools/v3/.../msvc/include are divided with '\'
- return s.replace('\\', '/')
-
-
-def fix_path(p):
- try:
- i = p.rfind('/bin/clang')
- p = p[:i] + '/bin/clang-cl'
- except ValueError:
- pass
- return p
-
-
-if __name__ == '__main__':
- is_on_win = sys.argv[1] == 'yes'
- path = sys.argv[2]
- args = filter(None, [fix(s) for s in sys.argv[3:]])
- if is_on_win:
- path = fix_path(path)
- try:
- i = args.index('-emit-llvm')
- args[i:i+1] = ['-Xclang', '-emit-llvm']
- except ValueError:
- pass
- args.append('-fms-compatibility-version=19')
-
- cmd = [path] + args
-
- rc = subprocess.call(cmd, shell=False, stderr=sys.stderr, stdout=sys.stdout)
- sys.exit(rc)
diff --git a/build/scripts/compile_java.py b/build/scripts/compile_java.py
deleted file mode 100644
index e95869e853..0000000000
--- a/build/scripts/compile_java.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import optparse
-import contextlib
-import os
-import shutil
-import subprocess as sp
-import tarfile
-import zipfile
-import sys
-
-
-def parse_args(args):
- parser = optparse.OptionParser()
- parser.add_option('--javac-bin')
- parser.add_option('--jar-bin')
- parser.add_option('--vcs-mf')
- parser.add_option('--package-prefix')
- parser.add_option('--jar-output')
- parser.add_option('--srcs-jar-output')
- return parser.parse_args(args)
-
-
-def mkdir_p(directory):
- if not os.path.exists(directory):
- os.makedirs(directory)
-
-
-def split_cmd_by_delim(cmd, delim='DELIM'):
- result = [[]]
- for arg in cmd:
- if arg == delim:
- result.append([])
- else:
- result[-1].append(arg)
- return result
-
-
-def main():
- cmd_parts = split_cmd_by_delim(sys.argv)
- assert len(cmd_parts) == 3
- args, javac_opts, peers = cmd_parts
- opts, jsrcs = parse_args(args)
-
- jsrcs += list(filter(lambda x: x.endswith('.jsrc'), peers))
- peers = list(filter(lambda x: not x.endswith('.jsrc'), peers))
-
- sources_dir = 'src'
- mkdir_p(sources_dir)
- for s in jsrcs:
- if s.endswith('.jsrc'):
- with contextlib.closing(tarfile.open(s, 'r')) as tf:
- tf.extractall(sources_dir)
-
- srcs = []
- for r, _, files in os.walk(sources_dir):
- for f in files:
- srcs.append(os.path.join(r, f))
- srcs += jsrcs
- srcs = list(filter(lambda x: x.endswith('.java'), srcs))
-
- classes_dir = 'cls'
- mkdir_p(classes_dir)
- classpath = os.pathsep.join(peers)
-
- if srcs:
- temp_sources_file = 'temp.sources.list'
- with open(temp_sources_file, 'w') as ts:
- ts.write(' '.join(srcs))
- sp.check_call([opts.javac_bin, '-nowarn', '-g', '-classpath', classpath, '-encoding', 'UTF-8', '-d', classes_dir] + javac_opts + ['@' + temp_sources_file])
-
- for s in jsrcs:
- if s.endswith('-sources.jar'):
- with zipfile.ZipFile(s) as zf:
- zf.extractall(sources_dir)
-
- elif s.endswith('.jar'):
- with zipfile.ZipFile(s) as zf:
- zf.extractall(classes_dir)
-
- if opts.vcs_mf:
- sp.check_call([opts.jar_bin, 'cfm', opts.jar_output, opts.vcs_mf, os.curdir], cwd=classes_dir)
- else:
- sp.check_call([opts.jar_bin, 'cfM', opts.jar_output, os.curdir], cwd=classes_dir)
-
- if opts.srcs_jar_output:
- for s in jsrcs:
- if s.endswith('.java'):
- if opts.package_prefix:
- d = os.path.join(sources_dir, *(opts.package_prefix.split('.') + [os.path.basename(s)]))
-
- else:
- d = os.path.join(sources_dir, os.path.basename(s))
-
- shutil.copyfile(s, d)
-
- if opts.vcs_mf:
- sp.check_call([opts.jar_bin, 'cfm', opts.srcs_jar_output, opts.vcs_mf, os.curdir], cwd=sources_dir)
- else:
- sp.check_call([opts.jar_bin, 'cfM', opts.srcs_jar_output, os.curdir], cwd=sources_dir)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/configure_file.py b/build/scripts/configure_file.py
deleted file mode 100755
index 1873ed70eb..0000000000
--- a/build/scripts/configure_file.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python2.7
-
-import sys
-import os.path
-import re
-
-cmakeDef01 = "#cmakedefine01"
-cmakeDef = "#cmakedefine"
-
-
-def replaceLine(l, varDict, define):
- words = l.split()
- if words:
- if words[0] == cmakeDef:
- sPos = l.find(cmakeDef)
- ePos = sPos + len(cmakeDef)
- l = l[:sPos] + define + l[ePos:] + '\n'
- if words[0] == cmakeDef01:
- var = words[1]
- cmakeValue = varDict.get(var)
- if cmakeValue == 'yes':
- val = '1'
- else:
- val = '0'
- sPos = l.find(cmakeDef01)
- ePos = l.find(var) + len(var)
- l = l[:sPos] + define + ' ' + var + ' ' + val + l[ePos + 1:] + '\n'
-
- finder = re.compile(".*?(@[a-zA-Z0-9_]+@).*")
- while True:
- re_result = finder.match(l)
- if not re_result:
- return l
- key = re_result.group(1)[1:-1]
- l = l[:re_result.start(1)] + varDict.get(key, '') + l[re_result.end(1):]
-
-
-def main(inputPath, outputPath, varDict):
- define = '#define' if os.path.splitext(outputPath)[1] != '.asm' else '%define'
- with open(outputPath, 'w') as output:
- with open(inputPath, 'r') as input:
- for l in input:
- output.write(replaceLine(l, varDict, define))
-
-
-def usage():
- print "usage: configure_file.py inputPath outputPath key1=value1 ..."
- exit(1)
-
-
-if __name__ == "__main__":
- if len(sys.argv) < 3:
- usage()
- varDict = {}
- for x in sys.argv[3:]:
- key, value = str(x).split('=', 1)
- varDict[key] = value
-
- main(sys.argv[1], sys.argv[2], varDict)
diff --git a/build/scripts/copy_docs_files_to_dir.py b/build/scripts/copy_docs_files_to_dir.py
deleted file mode 100644
index 27fd171ee6..0000000000
--- a/build/scripts/copy_docs_files_to_dir.py
+++ /dev/null
@@ -1,138 +0,0 @@
-import argparse
-import errno
-import os
-import process_command_files as pcf
-import shutil
-import sys
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--bin-dir', nargs='*')
- parser.add_argument('--build-root', required=True)
- parser.add_argument('--dest-dir', required=True)
- parser.add_argument('--docs-dir', action='append', nargs=2, dest='docs_dirs', default=None)
- parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite')
- parser.add_argument('--source-root', required=True)
- parser.add_argument('--src-dir', action='append', nargs='*', dest='src_dirs', default=None)
- parser.add_argument('files', nargs='*')
- return parser.parse_args(pcf.get_args(sys.argv[1:]))
-
-
-def makedirs(dirname):
- try:
- os.makedirs(dirname)
- except OSError as e:
- if e.errno == errno.EEXIST and os.path.isdir(dirname):
- pass
- else:
- raise
-
-
-def copy_file(src, dst, overwrite=False, orig_path=None, generated=False):
- if os.path.exists(dst) and not overwrite:
- return
-
- makedirs(os.path.dirname(dst))
-
- with open(src, 'r') as fsrc, open(dst, 'w') as fdst:
- # if (orig_path or generated) and src.endswith('.md'):
- # fdst.write('---\n{}\n\n---\n'.format('generated: true' if generated else 'vcsPath: {}'.format(orig_path)))
- shutil.copyfileobj(fsrc, fdst)
-
-
-def main():
- args = parse_args()
-
- dest_dir = os.path.normpath(args.dest_dir)
- makedirs(dest_dir)
-
- source_root = os.path.normpath(args.source_root) + os.path.sep
- build_root = os.path.normpath(args.build_root) + os.path.sep
-
- is_overwrite_existing = args.existing == 'overwrite'
-
- if args.docs_dirs:
- for item in args.docs_dirs:
- assert len(item) == 2
- docs_dir, nm = item[0], item[1]
- assert not os.path.isabs(docs_dir)
- if nm and nm != '.':
- assert not os.path.isabs(nm)
- dst = os.path.join(dest_dir, nm)
- else:
- dst = dest_dir
-
- abs_docs_dir = os.path.join(args.source_root, docs_dir)
-
- for root, _, files in os.walk(abs_docs_dir):
- for f in files:
- if os.path.islink(os.path.join(root, f)):
- continue
- file_src = os.path.join(root, f)
- assert file_src.startswith(source_root)
- file_dst = os.path.join(dst, os.path.relpath(root, abs_docs_dir), f)
- copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=file_src[len(source_root):])
-
- if args.src_dirs:
- for item in args.src_dirs:
- assert len(item) > 1
- src_dir, nm = os.path.normpath(item[0]), item[1]
- assert os.path.isabs(src_dir)
- if nm and nm != '.':
- assert not os.path.isabs(nm)
- dst = os.path.join(dest_dir, nm)
- else:
- dst = dest_dir
-
- if src_dir.startswith(source_root):
- root = source_root
- is_from_source_root = True
- else:
- assert src_dir.startswith(build_root)
- root = build_root
- is_from_source_root = False
-
- for f in item[2:]:
- file_src = os.path.normpath(f)
- assert file_src.startswith(root)
- rel_path = file_src[len(root):] if is_from_source_root else None
- file_dst = os.path.join(dst, file_src[len(src_dir):])
- copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=rel_path)
-
- if args.bin_dir:
- assert len(args.bin_dir) > 1
- bin_dir, bin_dir_namespace = os.path.normpath(args.bin_dir[0]) + os.path.sep, args.bin_dir[1]
- assert bin_dir.startswith(build_root)
- if bin_dir_namespace and bin_dir_namespace != '.':
- assert not os.path.isabs(bin_dir_namespace)
- dst = os.path.join(dest_dir, bin_dir_namespace)
- else:
- dst = dest_dir
-
- for file_src in args.bin_dir[2:]:
- assert os.path.isfile(file_src)
- assert file_src.startswith(bin_dir)
- file_dst = os.path.join(dst, file_src[len(bin_dir):])
- copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=None)
-
- for src in args.files:
- generated = False
- file_src = os.path.normpath(src)
- assert os.path.isfile(file_src), 'File [{}] does not exist...'.format(file_src)
- rel_path = file_src
- if file_src.startswith(source_root):
- rel_path = file_src[len(source_root):]
- elif file_src.startswith(build_root):
- # generated = True
- # rel_path = file_src[len(build_root):]
- rel_path = None
- else:
- raise Exception('Unexpected file path [{}].'.format(file_src))
- assert not os.path.isabs(rel_path)
- file_dst = os.path.join(args.dest_dir, rel_path)
- copy_file(file_src, file_dst, is_overwrite_existing, rel_path, generated)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/extract_docs.py b/build/scripts/extract_docs.py
deleted file mode 100644
index 20e8311346..0000000000
--- a/build/scripts/extract_docs.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import argparse
-import os
-import process_command_files as pcf
-import tarfile
-import sys
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--dest-dir', required=True)
- parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[])
- parser.add_argument('docs', nargs='*')
- return parser.parse_args(pcf.get_args(sys.argv[1:]))
-
-
-def main():
- args = parse_args()
-
- prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes]
-
- def _valid_docslib(path):
- base = os.path.basename(path)
- return base.endswith(('.docslib', '.docslib.fake')) or base == 'preprocessed.tar.gz'
-
- for src in [p for p in args.docs if _valid_docslib(p)]:
- if src == 'preprocessed.tar.gz':
- rel_dst = os.path.dirname(os.path.normpath(src))
- for prefix in prefixes:
- if src.startswith(prefix):
- rel_dst = rel_dst[len(prefix):]
- continue
- assert not os.path.isabs(rel_dst)
- dest_dir = os.path.join(args.dest_dir, rel_dst)
- else:
- dest_dir = args.dest_dir
- if not os.path.exists(dest_dir):
- os.makedirs(dest_dir)
- with tarfile.open(src, 'r') as tar_file:
- tar_file.extractall(dest_dir)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
deleted file mode 100755
index aa04f816b5..0000000000
--- a/build/scripts/fetch_from.py
+++ /dev/null
@@ -1,375 +0,0 @@
-import datetime as dt
-import errno
-import hashlib
-import json
-import logging
-import os
-import platform
-import random
-import shutil
-import socket
-import string
-import sys
-import tarfile
-import urllib2
-
-import retry
-
-
-def make_user_agent():
- return 'fetch_from: {host}'.format(host=socket.gethostname())
-
-
-def add_common_arguments(parser):
- parser.add_argument('--copy-to') # used by jbuild in fetch_resource
- parser.add_argument('--rename-to') # used by test_node in inject_mds_resource_to_graph
- parser.add_argument('--copy-to-dir')
- parser.add_argument('--untar-to')
- parser.add_argument('--rename', action='append', default=[], metavar='FILE', help='rename FILE to the corresponding output')
- parser.add_argument('--executable', action='store_true', help='make outputs executable')
- parser.add_argument('--log-path')
- parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity')
- parser.add_argument('outputs', nargs='*', default=[])
-
-
-def ensure_dir(path):
- if not (path == '' or os.path.isdir(path)):
- os.makedirs(path)
-
-
-# Reference code: library/python/fs/__init__.py
-def hardlink_or_copy(src, dst):
- ensure_dir(os.path.dirname(dst))
-
- if os.name == 'nt':
- shutil.copy(src, dst)
- else:
- try:
- os.link(src, dst)
- except OSError as e:
- if e.errno == errno.EEXIST:
- return
- elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES):
- sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst))
- shutil.copy(src, dst)
- else:
- raise
-
-
-def rename_or_copy_and_remove(src, dst):
- ensure_dir(os.path.dirname(dst))
-
- try:
- os.rename(src, dst)
- except OSError:
- shutil.copy(src, dst)
- os.remove(src)
-
-
-class BadChecksumFetchError(Exception):
- pass
-
-
-class IncompleteFetchError(Exception):
- pass
-
-
-class ResourceUnpackingError(Exception):
- pass
-
-
-class ResourceIsDirectoryError(Exception):
- pass
-
-
-class OutputIsDirectoryError(Exception):
- pass
-
-
-class OutputNotExistError(Exception):
- pass
-
-
-def setup_logging(args, base_name):
- def makedirs(path):
- try:
- os.makedirs(path)
- except OSError:
- pass
-
- if args.log_path:
- log_file_name = args.log_path
- else:
- log_file_name = base_name + ".log"
-
- args.abs_log_path = os.path.abspath(log_file_name)
- makedirs(os.path.dirname(args.abs_log_path))
- logging.basicConfig(filename=args.abs_log_path, level=logging.DEBUG)
- if args.verbose:
- logging.getLogger().addHandler(logging.StreamHandler(sys.stderr))
-
-
-def is_temporary(e):
-
- def is_broken(e):
- return isinstance(e, urllib2.HTTPError) and e.code in (410, 404)
-
- if is_broken(e):
- return False
-
- if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
- return True
-
- import error
-
- return error.is_temporary_error(e)
-
-
-def uniq_string_generator(size=6, chars=string.ascii_lowercase + string.digits):
- return ''.join(random.choice(chars) for _ in range(size))
-
-
-def report_to_snowden(value):
- def inner():
- body = {
- 'namespace': 'ygg',
- 'key': 'fetch-from-sandbox',
- 'value': json.dumps(value),
- }
-
- urllib2.urlopen(
- 'https://back-snowden.qloud.yandex-team.ru/report/add',
- json.dumps([body, ]),
- timeout=5,
- )
-
- try:
- inner()
- except Exception as e:
- logging.warning('report_to_snowden failed: %s', e)
-
-
-def copy_stream(read, *writers, **kwargs):
- chunk_size = kwargs.get('size', 1024*1024)
- while True:
- data = read(chunk_size)
- if not data:
- break
- for write in writers:
- write(data)
-
-
-def md5file(fname):
- res = hashlib.md5()
- with open(fname, 'rb') as f:
- copy_stream(f.read, res.update)
- return res.hexdigest()
-
-
-def git_like_hash_with_size(filepath):
- """
- Calculate git like hash for path
- """
- sha = hashlib.sha1()
-
- file_size = 0
-
- with open(filepath, 'rb') as f:
- while True:
- block = f.read(2 ** 16)
-
- if not block:
- break
-
- file_size += len(block)
- sha.update(block)
-
- sha.update('\0')
- sha.update(str(file_size))
-
- return sha.hexdigest(), file_size
-
-
-def size_printer(display_name, size):
- sz = [0]
- last_stamp = [dt.datetime.now()]
-
- def printer(chunk):
- sz[0] += len(chunk)
- now = dt.datetime.now()
- if last_stamp[0] + dt.timedelta(seconds=10) < now:
- if size:
- print >>sys.stderr, "##status##{} - [[imp]]{:.1f}%[[rst]]".format(display_name, 100.0 * sz[0] / size if size else 0)
- last_stamp[0] = now
-
- return printer
-
-
-def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=None, tries=10, writers=None):
- logging.info('Downloading from url %s name %s and expected md5 %s', url, resource_file_name, expected_md5)
- tmp_file_name = uniq_string_generator()
-
- request = urllib2.Request(url, headers={'User-Agent': make_user_agent()})
- req = retry.retry_func(lambda: urllib2.urlopen(request, timeout=30), tries=tries, delay=5, backoff=1.57079)
- logging.debug('Headers: %s', req.headers.headers)
- expected_file_size = int(req.headers.get('Content-Length', 0))
- real_md5 = hashlib.md5()
- real_sha1 = hashlib.sha1()
-
- with open(tmp_file_name, 'wb') as fp:
- copy_stream(
- req.read,
- fp.write,
- real_md5.update,
- real_sha1.update,
- size_printer(resource_file_name, expected_file_size),
- *([] if writers is None else writers)
- )
-
- real_md5 = real_md5.hexdigest()
- real_file_size = os.path.getsize(tmp_file_name)
- real_sha1.update('\0')
- real_sha1.update(str(real_file_size))
- real_sha1 = real_sha1.hexdigest()
-
- if unpack:
- tmp_dir = tmp_file_name + '.dir'
- os.makedirs(tmp_dir)
- with tarfile.open(tmp_file_name, mode="r|gz") as tar:
- tar.extractall(tmp_dir)
- tmp_file_name = os.path.join(tmp_dir, resource_file_name)
- real_md5 = md5file(tmp_file_name)
-
- logging.info('File size %s (expected %s)', real_file_size, expected_file_size or "UNKNOWN")
- logging.info('File md5 %s (expected %s)', real_md5, expected_md5)
- logging.info('File sha1 %s (expected %s)', real_sha1, expected_sha1)
-
- if expected_md5 and real_md5 != expected_md5:
- report_to_snowden(
- {
- 'headers': req.headers.headers,
- 'expected_md5': expected_md5,
- 'real_md5': real_md5
- }
- )
-
- raise BadChecksumFetchError(
- 'Downloaded {}, but expected {} for {}'.format(
- real_md5,
- expected_md5,
- url,
- )
- )
-
- if expected_sha1 and real_sha1 != expected_sha1:
- report_to_snowden(
- {
- 'headers': req.headers.headers,
- 'expected_sha1': expected_sha1,
- 'real_sha1': real_sha1
- }
- )
-
- raise BadChecksumFetchError(
- 'Downloaded {}, but expected {} for {}'.format(
- real_sha1,
- expected_sha1,
- url,
- )
- )
-
- if expected_file_size and expected_file_size != real_file_size:
- report_to_snowden({'headers': req.headers.headers, 'file_size': real_file_size})
-
- raise IncompleteFetchError(
- 'Downloaded {}, but expected {} for {}'.format(
- real_file_size,
- expected_file_size,
- url,
- )
- )
-
- return tmp_file_name
-
-
-def chmod(filename, mode):
- if platform.system().lower() == 'windows':
- # https://docs.microsoft.com/en-us/windows/win32/fileio/hard-links-and-junctions:
- # hard to reset read-only attribute for removal if there are multiple hardlinks
- return
- stat = os.stat(filename)
- if stat.st_mode & 0o777 != mode:
- try:
- os.chmod(filename, mode)
- except OSError:
- import pwd
- sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)))
- raise
-
-
-def process(fetched_file, file_name, args, remove=True):
- assert len(args.rename) <= len(args.outputs), (
- 'too few outputs to rename', args.rename, 'into', args.outputs)
-
- # Forbid changes to the loaded resource
- chmod(fetched_file, 0o444)
-
- if not os.path.isfile(fetched_file):
- raise ResourceIsDirectoryError('Resource must be a file, not a directory: %s' % fetched_file)
-
- if args.copy_to:
- hardlink_or_copy(fetched_file, args.copy_to)
- if not args.outputs:
- args.outputs = [args.copy_to]
-
- if args.rename_to:
- args.rename.append(fetched_file)
- if not args.outputs:
- args.outputs = [args.rename_to]
-
- if args.copy_to_dir:
- hardlink_or_copy(fetched_file, os.path.join(args.copy_to_dir, file_name))
-
- if args.untar_to:
- ensure_dir(args.untar_to)
- # Extract only requested files
- try:
- with tarfile.open(fetched_file, mode='r:*') as tar:
- inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):]))
- members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs]
- tar.extractall(args.untar_to, members=members)
- # Forbid changes to the loaded resource data
- for root, _, files in os.walk(args.untar_to):
- for filename in files:
- chmod(os.path.join(root, filename), 0o444)
- except tarfile.ReadError as e:
- logging.exception(e)
- raise ResourceUnpackingError('File {} cannot be untared'.format(fetched_file))
-
- for src, dst in zip(args.rename, args.outputs):
- if src == 'RESOURCE':
- src = fetched_file
- if os.path.abspath(src) == os.path.abspath(fetched_file):
- logging.info('Copying %s to %s', src, dst)
- hardlink_or_copy(src, dst)
- else:
- logging.info('Renaming %s to %s', src, dst)
- if os.path.exists(dst):
- raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
- if remove:
- rename_or_copy_and_remove(src, dst)
- else:
- hardlink_or_copy(src, dst)
-
- for path in args.outputs:
- if not os.path.exists(path):
- raise OutputNotExistError('Output does not exist: %s' % os.path.abspath(path))
- if not os.path.isfile(path):
- raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path))
- if args.executable:
- chmod(path, os.stat(path).st_mode | 0o111)
- if os.path.abspath(path) == os.path.abspath(fetched_file):
- remove = False
-
- if remove:
- os.remove(fetched_file)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
deleted file mode 100755
index a99542e174..0000000000
--- a/build/scripts/fetch_from_sandbox.py
+++ /dev/null
@@ -1,269 +0,0 @@
-import itertools
-import json
-import logging
-import argparse
-import os
-import random
-import subprocess
-import sys
-import time
-import urllib2
-import uuid
-
-import fetch_from
-
-
-ORIGIN_SUFFIX = '?origin=fetch-from-sandbox'
-MDS_PREFIX = 'http://storage-int.mds.yandex.net/get-sandbox/'
-TEMPORARY_ERROR_CODES = (429, 500, 503, 504)
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- fetch_from.add_common_arguments(parser)
- parser.add_argument('--resource-id', type=int, required=True)
- parser.add_argument('--custom-fetcher')
- parser.add_argument('--resource-file')
- return parser.parse_args()
-
-
-class ResourceInfoError(Exception):
- pass
-
-
-class UnsupportedProtocolException(Exception):
- pass
-
-
-def _sky_path():
- return "/usr/local/bin/sky"
-
-
-def _is_skynet_avaliable():
- if not os.path.exists(_sky_path()):
- return False
- try:
- subprocess.check_output([_sky_path(), "--version"])
- return True
- except subprocess.CalledProcessError:
- return False
- except OSError:
- return False
-
-
-def download_by_skynet(resource_info, file_name):
- def sky_get(skynet_id, target_dir, timeout=None):
- cmd_args = [_sky_path(), 'get', "-N", "Backbone", "--user", "--wait", "--dir", target_dir, skynet_id]
- if timeout is not None:
- cmd_args += ["--timeout", str(timeout)]
- logging.info('Call skynet with args: %s', cmd_args)
- stdout = subprocess.check_output(cmd_args).strip()
- logging.debug('Skynet call with args %s is finished, result is %s', cmd_args, stdout)
- return stdout
-
- if not _is_skynet_avaliable():
- raise UnsupportedProtocolException("Skynet is not available")
-
- skynet_id = resource_info.get("skynet_id")
- if not skynet_id:
- raise ValueError("Resource does not have skynet_id")
-
- temp_dir = os.path.abspath(fetch_from.uniq_string_generator())
- os.mkdir(temp_dir)
- sky_get(skynet_id, temp_dir)
- return os.path.join(temp_dir, file_name)
-
-
-def _urlopen(url, data=None, headers=None):
- n = 10
- tout = 30
- started = time.time()
- reqid = uuid.uuid4()
-
- request = urllib2.Request(url, data=data, headers=headers or {})
- request.add_header('X-Request-Timeout', str(tout))
- request.add_header('X-Request-Id', str(reqid))
- request.add_header('User-Agent', 'fetch_from_sandbox.py')
- for i in xrange(n):
- retry_after = i
- try:
- request.add_header('X-Request-Duration', str(int(time.time() - started)))
- return urllib2.urlopen(request, timeout=tout).read()
-
- except urllib2.HTTPError as e:
- logging.warning('failed to fetch URL %s with HTTP code %d: %s', url, e.code, e)
- retry_after = int(e.headers.get('Retry-After', str(retry_after)))
-
- if e.code not in TEMPORARY_ERROR_CODES:
- raise
-
- except Exception as e:
- logging.warning('failed to fetch URL %s: %s', url, e)
-
- if i + 1 == n:
- raise e
-
- time.sleep(retry_after)
-
-
-def _query(url):
- return json.loads(_urlopen(url))
-
-
-_SANDBOX_BASE_URL = 'https://sandbox.yandex-team.ru/api/v1.0'
-
-
-def get_resource_info(resource_id, touch=False, no_links=False):
- url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id)))
- headers = {}
- if touch:
- headers.update({'X-Touch-Resource': '1'})
- if no_links:
- headers.update({'X-No-Links': '1'})
- return _query(url)
-
-
-def get_resource_http_links(resource_id):
- url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id), '/data/http'))
- return [r['url'] + ORIGIN_SUFFIX for r in _query(url)]
-
-
-def fetch_via_script(script, resource_id):
- return subprocess.check_output([script, str(resource_id)]).rstrip()
-
-
-def fetch(resource_id, custom_fetcher):
- try:
- resource_info = get_resource_info(resource_id, touch=True, no_links=True)
- except Exception as e:
- sys.stderr.write(
- "Failed to fetch resource {}: {}\n".format(resource_id, str(e))
- )
- raise
-
- if resource_info.get('state', 'DELETED') != 'READY':
- raise ResourceInfoError("Resource {} is not READY".format(resource_id))
-
- logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info))
-
- resource_file_name = os.path.basename(resource_info["file_name"])
- expected_md5 = resource_info.get('md5')
-
- proxy_link = resource_info['http']['proxy'] + ORIGIN_SUFFIX
-
- mds_id = resource_info.get('attributes', {}).get('mds')
- mds_link = MDS_PREFIX + mds_id if mds_id else None
-
- def get_storage_links():
- storage_links = get_resource_http_links(resource_id)
- random.shuffle(storage_links)
- return storage_links
-
- skynet = _is_skynet_avaliable()
-
- if not skynet:
- logging.info("Skynet is not available, will try other protocols")
-
- def iter_tries():
- if skynet:
- yield lambda: download_by_skynet(resource_info, resource_file_name)
-
- if custom_fetcher:
- yield lambda: fetch_via_script(custom_fetcher, resource_id)
-
- # Don't try too hard here: we will get back to proxy later on
- yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5, tries=2)
- for x in get_storage_links():
- # Don't spend too much time connecting single host
- yield lambda: fetch_from.fetch_url(x, False, resource_file_name, expected_md5, tries=1)
- if mds_link is not None:
- # Don't try too hard here: we will get back to MDS later on
- yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5, tries=2)
- yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5)
- if mds_link is not None:
- yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5)
-
- if resource_info.get('attributes', {}).get('ttl') != 'inf':
- sys.stderr.write('WARNING: resource {} ttl is not "inf".\n'.format(resource_id))
-
- exc_info = None
- for i, action in enumerate(itertools.islice(iter_tries(), 0, 10)):
- try:
- fetched_file = action()
- break
- except UnsupportedProtocolException:
- pass
- except subprocess.CalledProcessError as e:
- logging.warning('failed to fetch resource %s with subprocess: %s', resource_id, e)
- time.sleep(i)
- except urllib2.HTTPError as e:
- logging.warning('failed to fetch resource %s with HTTP code %d: %s', resource_id, e.code, e)
- if e.code not in TEMPORARY_ERROR_CODES:
- exc_info = exc_info or sys.exc_info()
- time.sleep(i)
- except Exception as e:
- logging.exception(e)
- exc_info = exc_info or sys.exc_info()
- time.sleep(i)
- else:
- if exc_info:
- raise exc_info[0], exc_info[1], exc_info[2]
- else:
- raise Exception("No available protocol and/or server to fetch resource")
-
- return fetched_file, resource_info['file_name']
-
-
-def _get_resource_info_from_file(resource_file):
- if resource_file is None or not os.path.exists(resource_file):
- return None
-
- RESOURCE_INFO_JSON = "resource_info.json"
- RESOURCE_CONTENT_FILE_NAME = "resource"
-
- resource_dir, resource_file = os.path.split(resource_file)
- if resource_file != RESOURCE_CONTENT_FILE_NAME:
- return None
-
- resource_json = os.path.join(resource_dir, RESOURCE_INFO_JSON)
- if not os.path.isfile(resource_json):
- return None
-
- try:
- with open(resource_json, 'r') as j:
- resource_info = json.load(j)
- resource_info['file_name'] # check consistency
- return resource_info
- except:
- logging.debug('Invalid %s in %s', RESOURCE_INFO_JSON, resource_dir)
-
- return None
-
-
-def main(args):
- custom_fetcher = os.environ.get('YA_CUSTOM_FETCHER')
-
- resource_info = _get_resource_info_from_file(args.resource_file)
- if resource_info:
- fetched_file = args.resource_file
- file_name = resource_info['file_name']
- else:
- # This code should be merged to ya and removed.
- fetched_file, file_name = fetch(args.resource_id, custom_fetcher)
-
- fetch_from.process(fetched_file, file_name, args, remove=not custom_fetcher and not resource_info)
-
-
-if __name__ == '__main__':
- args = parse_args()
- fetch_from.setup_logging(args, os.path.basename(__file__))
-
- try:
- main(args)
- except Exception as e:
- logging.exception(e)
- print >>sys.stderr, open(args.abs_log_path).read()
- sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fs_tools.py b/build/scripts/fs_tools.py
deleted file mode 100644
index dec4c349c8..0000000000
--- a/build/scripts/fs_tools.py
+++ /dev/null
@@ -1,104 +0,0 @@
-from __future__ import print_function
-
-import os
-import platform
-import sys
-import shutil
-import errno
-
-import process_command_files as pcf
-
-
-def link_or_copy(src, dst):
- try:
- if platform.system().lower() == 'windows':
- shutil.copy(src, dst)
- else:
- os.link(src, dst)
- except OSError as e:
- if e.errno == errno.EEXIST:
- print('link_or_copy: destination file already exists: {}'.format(dst), file=sys.stderr)
- if e.errno == errno.ENOENT:
- print('link_or_copy: source file doesn\'t exists: {}'.format(src), file=sys.stderr)
- raise
-
-
-if __name__ == '__main__':
- mode = sys.argv[1]
- args = pcf.get_args(sys.argv[2:])
-
- if mode == 'copy':
- shutil.copy(args[0], args[1])
- elif mode == 'copy_tree_no_link':
- dst = args[1]
- shutil.copytree(args[0], dst, ignore=lambda dirname, names: [n for n in names if os.path.islink(os.path.join(dirname, n))])
- elif mode == 'copy_files':
- src = args[0]
- dst = args[1]
- files = open(args[2]).read().strip().split()
- for f in files:
- s = os.path.join(src, f)
- d = os.path.join(dst, f)
- if os.path.exists(d):
- continue
- try:
- os.makedirs(os.path.dirname(d))
- except OSError:
- pass
- shutil.copy(s, d)
- elif mode == 'copy_all_files':
- src = args[0]
- dst = args[1]
- for root, _, files in os.walk(src):
- for f in files:
- if os.path.islink(os.path.join(root, f)):
- continue
- file_dst = os.path.join(dst, os.path.relpath(root, src), f)
- if os.path.exists(file_dst):
- continue
- try:
- os.makedirs(os.path.dirname(file_dst))
- except OSError:
- pass
- shutil.copy(os.path.join(root, f), file_dst)
- elif mode == 'rename_if_exists':
- if os.path.exists(args[0]):
- shutil.move(args[0], args[1])
- elif mode == 'rename':
- targetdir = os.path.dirname(args[1])
- if targetdir and not os.path.exists(targetdir):
- os.makedirs(os.path.dirname(args[1]))
- shutil.move(args[0], args[1])
- elif mode == 'remove':
- for f in args:
- try:
- if os.path.isfile(f) or os.path.islink(f):
- os.remove(f)
- else:
- shutil.rmtree(f)
- except OSError:
- pass
- elif mode == 'link_or_copy':
- link_or_copy(args[0], args[1])
- elif mode == 'link_or_copy_to_dir':
- assert len(args) > 1
- start = 0
- if args[0] == '--no-check':
- if args == 2:
- sys.exit()
- start = 1
- dst = args[-1]
- for src in args[start:-1]:
- link_or_copy(src, os.path.join(dst, os.path.basename(src)))
- elif mode == 'cat':
- with open(args[0], 'w') as dst:
- for input_name in args[1:]:
- with open(input_name) as src:
- dst.write(src.read())
- elif mode == 'md':
- try:
- os.makedirs(args[0])
- except OSError:
- pass
- else:
- raise Exception('unsupported tool %s' % mode)
diff --git a/build/scripts/gen_py3_reg.py b/build/scripts/gen_py3_reg.py
deleted file mode 100644
index 149c094898..0000000000
--- a/build/scripts/gen_py3_reg.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import sys
-
-template = '''
-struct PyObject;
-extern "C" int PyImport_AppendInittab(const char* name, PyObject* (*initfunc)());
-extern "C" PyObject* {1}();
-
-namespace {
- struct TRegistrar {
- inline TRegistrar() {
- // TODO Collect all modules and call PyImport_ExtendInittab once
- PyImport_AppendInittab("{0}", {1});
- }
- } REG;
-}
-'''
-
-
-def mangle(name):
- if '.' not in name:
- return name
- return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
-
-if __name__ == '__main__':
- if len(sys.argv) != 3:
- print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
- print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
- sys.exit(1)
-
- with open(sys.argv[2], 'w') as f:
- modname = sys.argv[1]
- initname = 'PyInit_' + mangle(modname)
- code = template.replace('{0}', modname).replace('{1}', initname)
- f.write(code)
diff --git a/build/scripts/gen_py_protos.py b/build/scripts/gen_py_protos.py
deleted file mode 100644
index 08397472f9..0000000000
--- a/build/scripts/gen_py_protos.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import os
-from os import path
-import shutil
-import subprocess
-import sys
-import tempfile
-import argparse
-import re
-
-
-OUT_DIR_ARG = '--python_out='
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("--suffixes", nargs="*", default=[])
- parser.add_argument("protoc_args", nargs=argparse.REMAINDER)
- script_args = parser.parse_args()
-
- args = script_args.protoc_args
-
- if args[0] == "--":
- args = args[1:]
-
- out_dir_orig = None
- out_dir_temp = None
- plugin_out_dirs_orig = {}
- for i in range(len(args)):
- if args[i].startswith(OUT_DIR_ARG):
- assert not out_dir_orig, 'Duplicate "{0}" param'.format(OUT_DIR_ARG)
- out_dir_orig = args[i][len(OUT_DIR_ARG):]
- out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig)
- args[i] = OUT_DIR_ARG + out_dir_temp
- continue
-
- match = re.match(r"^(--(\w+)_out=).*", args[i])
- if match:
- plugin_out_dir_arg = match.group(1)
- plugin = match.group(2)
- assert plugin not in plugin_out_dirs_orig, 'Duplicate "{0}" param'.format(plugin_out_dir_arg)
- plugin_out_dirs_orig[plugin] = args[i][len(plugin_out_dir_arg):]
- assert plugin_out_dirs_orig[plugin] == out_dir_orig, 'Params "{0}" and "{1}" expected to have the same value'.format(OUT_DIR_ARG, plugin_out_dir_arg)
- args[i] = plugin_out_dir_arg + out_dir_temp
-
- assert out_dir_temp, 'Param "{0}" not found'.format(OUT_DIR_ARG)
-
- retcode = subprocess.call(args)
- assert not retcode, 'Protoc failed for command {}'.format(' '.join(args))
-
- for root_temp, dirs, files in os.walk(out_dir_temp):
- sub_dir = path.relpath(root_temp, out_dir_temp)
- root_orig = path.join(out_dir_orig, sub_dir)
- for d in dirs:
- d_orig = path.join(root_orig, d)
- if not path.exists(d_orig):
- os.mkdir(d_orig)
- for f in files:
- f_orig = f
- for suf in script_args.suffixes:
- if f.endswith(suf):
- f_orig = f[:-len(suf)] + "__int__" + suf
- break
- os.rename(path.join(root_temp, f), path.join(root_orig, f_orig))
- shutil.rmtree(out_dir_temp)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/gen_py_reg.py b/build/scripts/gen_py_reg.py
deleted file mode 100644
index 1560135ae8..0000000000
--- a/build/scripts/gen_py_reg.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import sys
-
-template = '''
-extern "C" void PyImport_AppendInittab(const char* name, void (*fn)(void));
-extern "C" void {1}();
-
-namespace {
- struct TRegistrar {
- inline TRegistrar() {
- PyImport_AppendInittab("{0}", {1});
- }
- } REG;
-}
-'''
-
-
-def mangle(name):
- if '.' not in name:
- return name
- return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
-
-if __name__ == '__main__':
- if len(sys.argv) != 3:
- print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
- print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
- sys.exit(1)
-
- with open(sys.argv[2], 'w') as f:
- modname = sys.argv[1]
- initname = 'init' + mangle(modname)
- code = template.replace('{0}', modname).replace('{1}', initname)
- f.write(code)
diff --git a/build/scripts/go_fake_include/go_asm.h b/build/scripts/go_fake_include/go_asm.h
deleted file mode 100644
index e69de29bb2..0000000000
--- a/build/scripts/go_fake_include/go_asm.h
+++ /dev/null
diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py
deleted file mode 100644
index 7c51e65a2e..0000000000
--- a/build/scripts/go_tool.py
+++ /dev/null
@@ -1,867 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-import argparse
-import copy
-import json
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-import threading
-import six
-from functools import reduce
-
-import process_command_files as pcf
-import process_whole_archive_option as pwa
-
-arc_project_prefix = 'a.yandex-team.ru/'
-std_lib_prefix = 'contrib/go/_std_1.18/src/'
-vendor_prefix = 'vendor/'
-vet_info_ext = '.vet.out'
-vet_report_ext = '.vet.txt'
-
-FIXED_CGO1_SUFFIX='.fixed.cgo1.go'
-
-COMPILE_OPTIMIZATION_FLAGS=('-N',)
-
-
-def get_trimpath_args(args):
- return ['-trimpath', args.trimpath] if args.trimpath else []
-
-
-def preprocess_cgo1(src_path, dst_path, source_root):
- with open(src_path, 'r') as f:
- content = f.read()
- content = content.replace('__ARCADIA_SOURCE_ROOT_PREFIX__', source_root)
- with open(dst_path, 'w') as f:
- f.write(content)
-
-
-def preprocess_args(args):
- # Temporary work around for noauto
- if args.cgo_srcs and len(args.cgo_srcs) > 0:
- cgo_srcs_set = set(args.cgo_srcs)
- args.srcs = [x for x in args.srcs if x not in cgo_srcs_set]
-
- args.pkg_root = os.path.join(args.toolchain_root, 'pkg')
- toolchain_tool_root = os.path.join(args.pkg_root, 'tool', '{}_{}'.format(args.host_os, args.host_arch))
- args.go_compile = os.path.join(toolchain_tool_root, 'compile')
- args.go_cgo = os.path.join(toolchain_tool_root, 'cgo')
- args.go_link = os.path.join(toolchain_tool_root, 'link')
- args.go_asm = os.path.join(toolchain_tool_root, 'asm')
- args.go_pack = os.path.join(toolchain_tool_root, 'pack')
- args.go_vet = os.path.join(toolchain_tool_root, 'vet') if args.vet is True else args.vet
- args.output = os.path.normpath(args.output)
- args.vet_report_output = vet_report_output_name(args.output, args.vet_report_ext)
- args.trimpath = None
- if args.debug_root_map:
- roots = {'build': args.build_root, 'source': args.source_root, 'tools': args.tools_root}
- replaces = []
- for root in args.debug_root_map.split(';'):
- src, dst = root.split('=', 1)
- assert src in roots
- replaces.append('{}=>{}'.format(roots[src], dst))
- del roots[src]
- assert len(replaces) > 0
- args.trimpath = ';'.join(replaces)
- args.build_root = os.path.normpath(args.build_root)
- args.build_root_dir = args.build_root + os.path.sep
- args.source_root = os.path.normpath(args.source_root)
- args.source_root_dir = args.source_root + os.path.sep
- args.output_root = os.path.normpath(args.output_root)
- args.import_map = {}
- args.module_map = {}
- if args.cgo_peers:
- args.cgo_peers = [x for x in args.cgo_peers if not x.endswith('.fake.pkg')]
-
- srcs = []
- for f in args.srcs:
- if f.endswith('.gosrc'):
- with tarfile.open(f, 'r') as tar:
- srcs.extend(os.path.join(args.output_root, src) for src in tar.getnames())
- tar.extractall(path=args.output_root)
- else:
- srcs.append(f)
- args.srcs = srcs
-
- assert args.mode == 'test' or args.test_srcs is None and args.xtest_srcs is None
- # add lexical oreder by basename for go sources
- args.srcs.sort(key=lambda x: os.path.basename(x))
- if args.test_srcs:
- args.srcs += sorted(args.test_srcs, key=lambda x: os.path.basename(x))
- del args.test_srcs
- if args.xtest_srcs:
- args.xtest_srcs.sort(key=lambda x: os.path.basename(x))
-
- # compute root relative module dir path
- assert args.output is None or args.output_root == os.path.dirname(args.output)
- assert args.output_root.startswith(args.build_root_dir)
- args.module_path = args.output_root[len(args.build_root_dir):]
- args.source_module_dir = os.path.join(args.source_root, args.test_import_path or args.module_path) + os.path.sep
- assert len(args.module_path) > 0
- args.import_path, args.is_std = get_import_path(args.module_path)
-
- assert args.asmhdr is None or args.word == 'go'
-
- srcs = []
- for f in args.srcs:
- if f.endswith(FIXED_CGO1_SUFFIX) and f.startswith(args.build_root_dir):
- path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[:-len(FIXED_CGO1_SUFFIX)])))
- srcs.append(path)
- preprocess_cgo1(f, path, args.source_root)
- else:
- srcs.append(f)
- args.srcs = srcs
-
- if args.extldflags:
- args.extldflags = pwa.ProcessWholeArchiveOption(args.targ_os).construct_cmd(args.extldflags)
-
- classify_srcs(args.srcs, args)
-
-
-def compare_versions(version1, version2):
- def last_index(version):
- index = version.find('beta')
- return len(version) if index < 0 else index
-
- v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.'))
- v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.'))
- if v1 == v2:
- return 0
- return 1 if v1 < v2 else -1
-
-
-def get_symlink_or_copyfile():
- os_symlink = getattr(os, 'symlink', None)
- if os_symlink is None or os.name == 'nt':
- os_symlink = shutil.copyfile
- return os_symlink
-
-
-def copy_args(args):
- return copy.copy(args)
-
-
-def get_vendor_index(import_path):
- index = import_path.rfind('/' + vendor_prefix)
- if index < 0:
- index = 0 if import_path.startswith(vendor_prefix) else index
- else:
- index = index + 1
- return index
-
-
-def get_import_path(module_path):
- assert len(module_path) > 0
- import_path = module_path.replace('\\', '/')
- is_std_module = import_path.startswith(std_lib_prefix)
- if is_std_module:
- import_path = import_path[len(std_lib_prefix):]
- elif import_path.startswith(vendor_prefix):
- import_path = import_path[len(vendor_prefix):]
- else:
- import_path = arc_project_prefix + import_path
- assert len(import_path) > 0
- return import_path, is_std_module
-
-
-def call(cmd, cwd, env=None):
- # sys.stderr.write('{}\n'.format(' '.join(cmd)))
- return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env, text=True)
-
-
-def classify_srcs(srcs, args):
- args.go_srcs = [x for x in srcs if x.endswith('.go')]
- args.asm_srcs = [x for x in srcs if x.endswith('.s')]
- args.objects = [x for x in srcs if x.endswith('.o') or x.endswith('.obj')]
- args.symabis = [x for x in srcs if x.endswith('.symabis')]
- args.sysos = [x for x in srcs if x.endswith('.syso')]
-
-
-def get_import_config_info(peers, gen_importmap, import_map={}, module_map={}):
- info = {'importmap': [], 'packagefile': [], 'standard': {}}
- if gen_importmap:
- for key, value in six.iteritems(import_map):
- info['importmap'].append((key, value))
- for peer in peers:
- peer_import_path, is_std = get_import_path(os.path.dirname(peer))
- if gen_importmap:
- index = get_vendor_index(peer_import_path)
- if index >= 0:
- index += len(vendor_prefix)
- info['importmap'].append((peer_import_path[index:], peer_import_path))
- info['packagefile'].append((peer_import_path, os.path.join(args.build_root, peer)))
- if is_std:
- info['standard'][peer_import_path] = True
- for key, value in six.iteritems(module_map):
- info['packagefile'].append((key, value))
- return info
-
-
-def create_import_config(peers, gen_importmap, import_map={}, module_map={}):
- lines = []
- info = get_import_config_info(peers, gen_importmap, import_map, module_map)
- for key in ('importmap', 'packagefile'):
- for item in info[key]:
- lines.append('{} {}={}'.format(key, *item))
- if len(lines) > 0:
- lines.append('')
- content = '\n'.join(lines)
- # sys.stderr.writelines('{}\n'.format(l) for l in lines)
- with tempfile.NamedTemporaryFile(delete=False) as f:
- f.write(content.encode('UTF-8'))
- return f.name
- return None
-
-
-def create_embed_config(args):
- data = {
- 'Patterns': {},
- 'Files': {},
- }
- for info in args.embed:
- pattern = info[0]
- if pattern.endswith('/**/*'):
- pattern = pattern[:-3]
- files = {os.path.relpath(f, args.source_module_dir).replace('\\', '/'): f for f in info[1:]}
- data['Patterns'][pattern] = list(files.keys())
- data['Files'].update(files)
- # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4)))
- with tempfile.NamedTemporaryFile(delete=False, suffix='.embedcfg') as f:
- f.write(json.dumps(data).encode('UTF-8'))
- return f.name
-
-
-def vet_info_output_name(path, ext=None):
- return '{}{}'.format(path, ext or vet_info_ext)
-
-
-def vet_report_output_name(path, ext=None):
- return '{}{}'.format(path, ext or vet_report_ext)
-
-
-def get_source_path(args):
- return args.test_import_path or args.module_path
-
-
-def gen_vet_info(args):
- import_path = args.real_import_path if hasattr(args, 'real_import_path') else args.import_path
- info = get_import_config_info(args.peers, True, args.import_map, args.module_map)
-
- import_map = dict(info['importmap'])
- # FIXME(snermolaev): it seems that adding import map for 'fake' package
- # does't make any harm (it needs to be revised later)
- import_map['unsafe'] = 'unsafe'
-
- for (key, _) in info['packagefile']:
- if key not in import_map:
- import_map[key] = key
-
- data = {
- 'ID': import_path,
- 'Compiler': 'gc',
- 'Dir': os.path.join(args.source_root, get_source_path(args)),
- 'ImportPath': import_path,
- 'GoFiles': [x for x in args.go_srcs if x.endswith('.go')],
- 'NonGoFiles': [x for x in args.go_srcs if not x.endswith('.go')],
- 'ImportMap': import_map,
- 'PackageFile': dict(info['packagefile']),
- 'Standard': dict(info['standard']),
- 'PackageVetx': dict((key, vet_info_output_name(value)) for key, value in info['packagefile']),
- 'VetxOnly': False,
- 'VetxOutput': vet_info_output_name(args.output),
- 'SucceedOnTypecheckFailure': False
- }
- # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4)))
- return data
-
-
-def create_vet_config(args, info):
- with tempfile.NamedTemporaryFile(delete=False, suffix='.cfg') as f:
- f.write(json.dumps(info).encode('UTF-8'))
- return f.name
-
-
-def decode_vet_report(json_report):
- report = ''
- if json_report:
- try:
- full_diags = json.JSONDecoder().decode(json_report.decode('UTF-8'))
- except ValueError:
- report = json_report
- else:
- messages = []
- for _, module_diags in six.iteritems(full_diags):
- for _, type_diags in six.iteritems(module_diags):
- for diag in type_diags:
- messages.append('{}: {}'.format(diag['posn'], json.dumps(diag['message'])))
- report = '\n'.join(messages)
-
- return report
-
-
-def dump_vet_report(args, report):
- if report:
- report = report.replace(args.build_root, '$B')
- report = report.replace(args.source_root, '$S')
- with open(args.vet_report_output, 'w') as f:
- f.write(report)
-
-
-def read_vet_report(args):
- assert args
- report = ''
- if os.path.exists(args.vet_report_output):
- with open(args.vet_report_output, 'r') as f:
- report += f.read()
- return report
-
-
-def dump_vet_report_for_tests(args, *test_args_list):
- dump_vet_report(args, reduce(lambda x, y: x + read_vet_report(y), [_f for _f in test_args_list if _f], ''))
-
-
-def do_vet(args):
- assert args.vet
- info = gen_vet_info(args)
- vet_config = create_vet_config(args, info)
- cmd = [args.go_vet, '-json']
- if args.vet_flags:
- cmd.extend(args.vet_flags)
- cmd.append(vet_config)
- # sys.stderr.write('>>>> [{}]\n'.format(' '.join(cmd)))
- p_vet = subprocess.Popen(cmd, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=args.source_root)
- vet_out, vet_err = p_vet.communicate()
- report = decode_vet_report(vet_out) if vet_out else ''
- dump_vet_report(args, report)
- if p_vet.returncode:
- raise subprocess.CalledProcessError(returncode=p_vet.returncode, cmd=cmd, output=vet_err)
-
-
-def _do_compile_go(args):
- import_path, is_std_module = args.import_path, args.is_std
- cmd = [
- args.go_compile,
- '-o',
- args.output,
- '-p',
- import_path,
- '-D',
- '""',
- '-goversion',
- 'go{}'.format(args.goversion)
- ]
- if args.lang:
- cmd.append('-lang=go{}'.format(args.lang))
- cmd.extend(get_trimpath_args(args))
- compiling_runtime = False
- if is_std_module:
- cmd.append('-std')
- if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith('runtime/internal/'):
- cmd.append('-+')
- compiling_runtime = True
- import_config_name = create_import_config(args.peers, True, args.import_map, args.module_map)
- if import_config_name:
- cmd += ['-importcfg', import_config_name]
- else:
- if import_path == 'unsafe' or len(args.objects) > 0 or args.asmhdr:
- pass
- else:
- cmd.append('-complete')
- # if compare_versions('1.16', args.goversion) >= 0:
- if args.embed:
- embed_config_name = create_embed_config(args)
- cmd.extend(['-embedcfg', embed_config_name])
- if args.asmhdr:
- cmd += ['-asmhdr', args.asmhdr]
- # Use .symabis (starting from 1.12 version)
- if args.symabis:
- cmd += ['-symabis'] + args.symabis
- # If 1.12 <= version < 1.13 we have to pass -allabis for 'runtime' and 'runtime/internal/atomic'
- # if compare_versions('1.13', args.goversion) >= 0:
- # pass
- # elif import_path in ('runtime', 'runtime/internal/atomic'):
- # cmd.append('-allabis')
- compile_workers = '4'
- if args.compile_flags:
- if compiling_runtime:
- cmd.extend(x for x in args.compile_flags if x not in COMPILE_OPTIMIZATION_FLAGS)
- else:
- cmd.extend(args.compile_flags)
- if any([x in ('-race', '-shared') for x in args.compile_flags]):
- compile_workers = '1'
- cmd += ['-pack', '-c={}'.format(compile_workers)]
- cmd += args.go_srcs
- call(cmd, args.build_root)
-
-
-class VetThread(threading.Thread):
-
- def __init__(self, target, args):
- super(VetThread, self).__init__(target=target, args=args)
- self.exc_info = None
-
- def run(self):
- try:
- super(VetThread, self).run()
- except:
- self.exc_info = sys.exc_info()
-
- def join_with_exception(self, reraise_exception):
- self.join()
- if reraise_exception and self.exc_info:
- six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2])
-
-
-def do_compile_go(args):
- raise_exception_from_vet = False
- if args.vet:
- run_vet = VetThread(target=do_vet, args=(args,))
- run_vet.start()
- try:
- _do_compile_go(args)
- raise_exception_from_vet = True
- finally:
- if args.vet:
- run_vet.join_with_exception(raise_exception_from_vet)
-
-
-def do_compile_asm(args):
- def need_compiling_runtime(import_path):
- return import_path in ('runtime', 'reflect', 'syscall') or \
- import_path.startswith('runtime/internal/') or \
- compare_versions('1.17', args.goversion) >= 0 and import_path == 'internal/bytealg'
-
- assert(len(args.srcs) == 1 and len(args.asm_srcs) == 1)
- cmd = [args.go_asm]
- cmd += get_trimpath_args(args)
- cmd += ['-I', args.output_root, '-I', os.path.join(args.pkg_root, 'include')]
- cmd += ['-D', 'GOOS_' + args.targ_os, '-D', 'GOARCH_' + args.targ_arch, '-o', args.output]
-
- # if compare_versions('1.16', args.goversion) >= 0:
- cmd += ['-p', args.import_path]
- if need_compiling_runtime(args.import_path):
- cmd += ['-compiling-runtime']
-
- if args.asm_flags:
- cmd += args.asm_flags
- cmd += args.asm_srcs
- call(cmd, args.build_root)
-
-
-def do_link_lib(args):
- if len(args.asm_srcs) > 0:
- asmargs = copy_args(args)
- asmargs.asmhdr = os.path.join(asmargs.output_root, 'go_asm.h')
- do_compile_go(asmargs)
- for src in asmargs.asm_srcs:
- asmargs.srcs = [src]
- asmargs.asm_srcs = [src]
- asmargs.output = os.path.join(asmargs.output_root, os.path.basename(src) + '.o')
- do_compile_asm(asmargs)
- args.objects.append(asmargs.output)
- else:
- do_compile_go(args)
- if args.objects or args.sysos:
- cmd = [args.go_pack, 'r', args.output] + args.objects + args.sysos
- call(cmd, args.build_root)
-
-
-def do_link_exe(args):
- assert args.extld is not None
- assert args.non_local_peers is not None
- compile_args = copy_args(args)
- compile_args.output = os.path.join(args.output_root, 'main.a')
- compile_args.real_import_path = compile_args.import_path
- compile_args.import_path = 'main'
-
- if args.vcs and os.path.isfile(compile_args.vcs):
- build_info = os.path.join('library', 'go', 'core', 'buildinfo')
- if any([x.startswith(build_info) for x in compile_args.peers]):
- compile_args.go_srcs.append(compile_args.vcs)
-
- do_link_lib(compile_args)
- cmd = [args.go_link, '-o', args.output]
- import_config_name = create_import_config(args.peers + args.non_local_peers, False, args.import_map, args.module_map)
- if import_config_name:
- cmd += ['-importcfg', import_config_name]
- if args.link_flags:
- cmd += args.link_flags
-
- if args.mode in ('exe', 'test'):
- cmd.append('-buildmode=exe')
- elif args.mode == 'dll':
- cmd.append('-buildmode=c-shared')
- else:
- assert False, 'Unexpected mode: {}'.format(args.mode)
- cmd.append('-extld={}'.format(args.extld))
-
- extldflags = []
- if args.extldflags is not None:
- filter_musl = bool
- if args.musl:
- cmd.append('-linkmode=external')
- extldflags.append('-static')
- filter_musl = lambda x: x not in ('-lc', '-ldl', '-lm', '-lpthread', '-lrt')
- extldflags += [x for x in args.extldflags if filter_musl(x)]
- cgo_peers = []
- if args.cgo_peers is not None and len(args.cgo_peers) > 0:
- is_group = args.targ_os == 'linux'
- if is_group:
- cgo_peers.append('-Wl,--start-group')
- cgo_peers.extend(args.cgo_peers)
- if is_group:
- cgo_peers.append('-Wl,--end-group')
- try:
- index = extldflags.index('--cgo-peers')
- extldflags = extldflags[:index] + cgo_peers + extldflags[index+1:]
- except ValueError:
- extldflags.extend(cgo_peers)
- if len(extldflags) > 0:
- cmd.append('-extldflags={}'.format(' '.join(extldflags)))
- cmd.append(compile_args.output)
- call(cmd, args.build_root)
-
-
-def gen_cover_info(args):
- lines = []
- lines.extend([
- """
-var (
- coverCounters = make(map[string][]uint32)
- coverBlocks = make(map[string][]testing.CoverBlock)
-)
- """,
- 'func init() {',
- ])
- for var, file in (x.split(':') for x in args.cover_info):
- lines.append(' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format(file=file, var=var))
- lines.extend([
- '}',
- """
-func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts []uint16) {
- if 3*len(counter) != len(pos) || len(counter) != len(numStmts) {
- panic("coverage: mismatched sizes")
- }
- if coverCounters[fileName] != nil {
- // Already registered.
- return
- }
- coverCounters[fileName] = counter
- block := make([]testing.CoverBlock, len(counter))
- for i := range counter {
- block[i] = testing.CoverBlock{
- Line0: pos[3*i+0],
- Col0: uint16(pos[3*i+2]),
- Line1: pos[3*i+1],
- Col1: uint16(pos[3*i+2]>>16),
- Stmts: numStmts[i],
- }
- }
- coverBlocks[fileName] = block
-}
- """,
- ])
- return lines
-
-
-def filter_out_skip_tests(tests, skip_tests):
- skip_set = set()
- star_skip_set = set()
- for t in skip_tests:
- work_set = star_skip_set if '*' in t else skip_set
- work_set.add(t)
-
- re_star_tests = None
- if len(star_skip_set) > 0:
- re_star_tests = re.compile(re.sub(r'(\*)+', r'.\1', '^({})$'.format('|'.join(star_skip_set))))
-
- return [x for x in tests if not (x in skip_tests or re_star_tests and re_star_tests.match(x))]
-
-
-def gen_test_main(args, test_lib_args, xtest_lib_args):
- assert args and (test_lib_args or xtest_lib_args)
- test_miner = args.test_miner
- test_module_path = test_lib_args.import_path if test_lib_args else xtest_lib_args.import_path
- is_cover = args.cover_info and len(args.cover_info) > 0
-
- # Prepare GOPATH
- # $BINDIR
- # |- __go__
- # |- src
- # |- pkg
- # |- ${TARGET_OS}_${TARGET_ARCH}
- go_path_root = os.path.join(args.output_root, '__go__')
- test_src_dir = os.path.join(go_path_root, 'src')
- target_os_arch = '_'.join([args.targ_os, args.targ_arch])
- test_pkg_dir = os.path.join(go_path_root, 'pkg', target_os_arch, os.path.dirname(test_module_path))
- os.makedirs(test_pkg_dir)
-
- my_env = os.environ.copy()
- my_env['GOROOT'] = ''
- my_env['GOPATH'] = go_path_root
- my_env['GOARCH'] = args.targ_arch
- my_env['GOOS'] = args.targ_os
-
- tests = []
- xtests = []
- os_symlink = get_symlink_or_copyfile()
-
- # Get the list of "internal" tests
- if test_lib_args:
- os.makedirs(os.path.join(test_src_dir, test_module_path))
- os_symlink(test_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(test_module_path) + '.a'))
- cmd = [test_miner, '-benchmarks', '-tests', test_module_path]
- tests = [x for x in (call(cmd, test_lib_args.output_root, my_env) or '').strip().split('\n') if len(x) > 0]
- if args.skip_tests:
- tests = filter_out_skip_tests(tests, args.skip_tests)
- test_main_found = '#TestMain' in tests
-
- # Get the list of "external" tests
- if xtest_lib_args:
- xtest_module_path = xtest_lib_args.import_path
- os.makedirs(os.path.join(test_src_dir, xtest_module_path))
- os_symlink(xtest_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(xtest_module_path) + '.a'))
- cmd = [test_miner, '-benchmarks', '-tests', xtest_module_path]
- xtests = [x for x in (call(cmd, xtest_lib_args.output_root, my_env) or '').strip().split('\n') if len(x) > 0]
- if args.skip_tests:
- xtests = filter_out_skip_tests(xtests, args.skip_tests)
- xtest_main_found = '#TestMain' in xtests
-
- test_main_package = None
- if test_main_found and xtest_main_found:
- assert False, 'multiple definition of TestMain'
- elif test_main_found:
- test_main_package = '_test'
- elif xtest_main_found:
- test_main_package = '_xtest'
-
- shutil.rmtree(go_path_root)
-
- lines = ['package main', '', 'import (']
- if test_main_package is None:
- lines.append(' "os"')
- lines.extend([' "testing"', ' "testing/internal/testdeps"'])
-
- if len(tests) > 0:
- lines.append(' _test "{}"'.format(test_module_path))
- elif test_lib_args:
- lines.append(' _ "{}"'.format(test_module_path))
-
- if len(xtests) > 0:
- lines.append(' _xtest "{}"'.format(xtest_module_path))
- elif xtest_lib_args:
- lines.append(' _ "{}"'.format(xtest_module_path))
-
- if is_cover:
- lines.append(' _cover0 "{}"'.format(test_module_path))
- lines.extend([')', ''])
-
- if compare_versions('1.18', args.goversion) < 0:
- kinds = ['Test', 'Benchmark', 'Example']
- else:
- kinds = ['Test', 'Benchmark', 'FuzzTarget', 'Example']
-
- var_names = []
- for kind in kinds:
- var_name = '{}s'.format(kind.lower())
- var_names.append(var_name)
- lines.append('var {} = []testing.Internal{}{{'.format(var_name, kind))
- for test in [x for x in tests if x.startswith(kind)]:
- lines.append(' {{"{test}", _test.{test}}},'.format(test=test))
- for test in [x for x in xtests if x.startswith(kind)]:
- lines.append(' {{"{test}", _xtest.{test}}},'.format(test=test))
- lines.extend(['}', ''])
-
- if is_cover:
- lines.extend(gen_cover_info(args))
-
- lines.append('func main() {')
- if is_cover:
- lines.extend([
- ' testing.RegisterCover(testing.Cover{',
- ' Mode: "set",',
- ' Counters: coverCounters,',
- ' Blocks: coverBlocks,',
- ' CoveredPackages: "",',
- ' })',
- ])
- lines.extend([
- ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)),
- '',
- ])
-
- if test_main_package:
- lines.append(' {}.TestMain(m)'.format(test_main_package))
- else:
- lines.append(' os.Exit(m.Run())')
- lines.extend(['}', ''])
-
- content = '\n'.join(lines)
- # sys.stderr.write('{}\n'.format(content))
- return content
-
-
-def do_link_test(args):
- assert args.srcs or args.xtest_srcs
- assert args.test_miner is not None
-
- test_module_path = get_source_path(args)
- test_import_path, _ = get_import_path(test_module_path)
-
- test_lib_args = copy_args(args) if args.srcs else None
- xtest_lib_args = copy_args(args) if args.xtest_srcs else None
- if xtest_lib_args is not None:
- xtest_lib_args.embed = args.embed_xtest if args.embed_xtest else None
-
- ydx_file_name = None
- xtest_ydx_file_name = None
- need_append_ydx = test_lib_args and xtest_lib_args and args.ydx_file and args.vet_flags
- if need_append_ydx:
- def find_ydx_file_name(name, flags):
- for i, elem in enumerate(flags):
- if elem.endswith(name):
- return (i, elem)
- assert False, 'Unreachable code'
-
- idx, ydx_file_name = find_ydx_file_name(xtest_lib_args.ydx_file, xtest_lib_args.vet_flags)
- xtest_ydx_file_name = '{}_xtest'.format(ydx_file_name)
- xtest_lib_args.vet_flags = copy.copy(xtest_lib_args.vet_flags)
- xtest_lib_args.vet_flags[idx] = xtest_ydx_file_name
-
- if test_lib_args:
- test_lib_args.output = os.path.join(args.output_root, 'test.a')
- test_lib_args.vet_report_output = vet_report_output_name(test_lib_args.output)
- test_lib_args.module_path = test_module_path
- test_lib_args.import_path = test_import_path
- do_link_lib(test_lib_args)
-
- if xtest_lib_args:
- xtest_lib_args.srcs = xtest_lib_args.xtest_srcs
- classify_srcs(xtest_lib_args.srcs, xtest_lib_args)
- xtest_lib_args.output = os.path.join(args.output_root, 'xtest.a')
- xtest_lib_args.vet_report_output = vet_report_output_name(xtest_lib_args.output)
- xtest_lib_args.module_path = test_module_path + '_test'
- xtest_lib_args.import_path = test_import_path + '_test'
- if test_lib_args:
- xtest_lib_args.module_map[test_import_path] = test_lib_args.output
- need_append_ydx = args.ydx_file and args.srcs and args.vet_flags
- do_link_lib(xtest_lib_args)
-
- if need_append_ydx:
- with open(os.path.join(args.build_root, ydx_file_name), 'ab') as dst_file:
- with open(os.path.join(args.build_root, xtest_ydx_file_name), 'rb') as src_file:
- dst_file.write(src_file.read())
-
- test_main_content = gen_test_main(args, test_lib_args, xtest_lib_args)
- test_main_name = os.path.join(args.output_root, '_test_main.go')
- with open(test_main_name, "w") as f:
- f.write(test_main_content)
- test_args = copy_args(args)
- test_args.embed = None
- test_args.srcs = [test_main_name]
- if test_args.test_import_path is None:
- # it seems that we can do it unconditionally, but this kind
- # of mangling doesn't really looks good to me and we leave it
- # for pure GO_TEST module
- test_args.module_path = test_args.module_path + '___test_main__'
- test_args.import_path = test_args.import_path + '___test_main__'
- classify_srcs(test_args.srcs, test_args)
- if test_lib_args:
- test_args.module_map[test_lib_args.import_path] = test_lib_args.output
- if xtest_lib_args:
- test_args.module_map[xtest_lib_args.import_path] = xtest_lib_args.output
-
- if args.vet:
- dump_vet_report_for_tests(test_args, test_lib_args, xtest_lib_args)
- test_args.vet = False
-
- do_link_exe(test_args)
-
-
-if __name__ == '__main__':
- args = pcf.get_args(sys.argv[1:])
-
- parser = argparse.ArgumentParser(prefix_chars='+')
- parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True)
- parser.add_argument('++srcs', nargs='*', required=True)
- parser.add_argument('++cgo-srcs', nargs='*')
- parser.add_argument('++test_srcs', nargs='*')
- parser.add_argument('++xtest_srcs', nargs='*')
- parser.add_argument('++cover_info', nargs='*')
- parser.add_argument('++output', nargs='?', default=None)
- parser.add_argument('++source-root', default=None)
- parser.add_argument('++build-root', required=True)
- parser.add_argument('++tools-root', default=None)
- parser.add_argument('++output-root', required=True)
- parser.add_argument('++toolchain-root', required=True)
- parser.add_argument('++host-os', choices=['linux', 'darwin', 'windows'], required=True)
- parser.add_argument('++host-arch', choices=['amd64', 'arm64'], required=True)
- parser.add_argument('++targ-os', choices=['linux', 'darwin', 'windows'], required=True)
- parser.add_argument('++targ-arch', choices=['amd64', 'x86', 'arm64'], required=True)
- parser.add_argument('++peers', nargs='*')
- parser.add_argument('++non-local-peers', nargs='*')
- parser.add_argument('++cgo-peers', nargs='*')
- parser.add_argument('++asmhdr', nargs='?', default=None)
- parser.add_argument('++test-import-path', nargs='?')
- parser.add_argument('++test-miner', nargs='?')
- parser.add_argument('++arc-project-prefix', nargs='?', default=arc_project_prefix)
- parser.add_argument('++std-lib-prefix', nargs='?', default=std_lib_prefix)
- parser.add_argument('++vendor-prefix', nargs='?', default=vendor_prefix)
- parser.add_argument('++extld', nargs='?', default=None)
- parser.add_argument('++extldflags', nargs='+', default=None)
- parser.add_argument('++goversion', required=True)
- parser.add_argument('++lang', nargs='?', default=None)
- parser.add_argument('++asm-flags', nargs='*')
- parser.add_argument('++compile-flags', nargs='*')
- parser.add_argument('++link-flags', nargs='*')
- parser.add_argument('++vcs', nargs='?', default=None)
- parser.add_argument('++vet', nargs='?', const=True, default=False)
- parser.add_argument('++vet-flags', nargs='*', default=None)
- parser.add_argument('++vet-info-ext', default=vet_info_ext)
- parser.add_argument('++vet-report-ext', default=vet_report_ext)
- parser.add_argument('++musl', action='store_true')
- parser.add_argument('++skip-tests', nargs='*', default=None)
- parser.add_argument('++ydx-file', default='')
- parser.add_argument('++debug-root-map', default=None)
- parser.add_argument('++embed', action='append', nargs='*')
- parser.add_argument('++embed_xtest', action='append', nargs='*')
- args = parser.parse_args(args)
-
- arc_project_prefix = args.arc_project_prefix
- std_lib_prefix = args.std_lib_prefix
- vendor_prefix = args.vendor_prefix
- vet_info_ext = args.vet_info_ext
- vet_report_ext = args.vet_report_ext
-
- preprocess_args(args)
-
- try:
- os.unlink(args.output)
- except OSError:
- pass
-
- # We are going to support only 'lib', 'exe' and 'cgo' build modes currently
- # and as a result we are going to generate only one build node per module
- # (or program)
- dispatch = {
- 'exe': do_link_exe,
- 'dll': do_link_exe,
- 'lib': do_link_lib,
- 'test': do_link_test
- }
-
- exit_code = 1
- try:
- dispatch[args.mode](args)
- exit_code = 0
- except KeyError:
- sys.stderr.write('Unknown build mode [{}]...\n'.format(args.mode))
- except subprocess.CalledProcessError as e:
- sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output))
- exit_code = e.returncode
- except Exception as e:
- sys.stderr.write('Unhandled exception [{}]...\n'.format(str(e)))
- sys.exit(exit_code)
diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py
deleted file mode 100644
index 918c23293c..0000000000
--- a/build/scripts/link_dyn_lib.py
+++ /dev/null
@@ -1,335 +0,0 @@
-import sys
-import os
-import subprocess
-import tempfile
-import collections
-import optparse
-import pipes
-
-from process_whole_archive_option import ProcessWholeArchiveOption
-
-
-def shlex_join(cmd):
- # equivalent to shlex.join() in python 3
- return ' '.join(
- pipes.quote(part)
- for part in cmd
- )
-
-
-def parse_export_file(p):
- with open(p, 'r') as f:
- for l in f:
- l = l.strip()
-
- if l and '#' not in l:
- words = l.split()
- if len(words) == 2 and words[0] == 'linux_version':
- yield {'linux_version': words[1]}
- elif len(words) == 2:
- yield {'lang': words[0], 'sym': words[1]}
- elif len(words) == 1:
- yield {'lang': 'C', 'sym': words[0]}
- else:
- raise Exception('unsupported exports line: ' + l)
-
-
-def to_c(sym):
- symbols = collections.deque(sym.split('::'))
- c_prefixes = [ # demangle prefixes for c++ symbols
- '_ZN', # namespace
- '_ZTIN', # typeinfo for
- '_ZTSN', # typeinfo name for
- '_ZTTN', # VTT for
- '_ZTVN', # vtable for
- '_ZNK', # const methods
- ]
- c_sym = ''
- while symbols:
- s = symbols.popleft()
- if s == '*':
- c_sym += '*'
- break
- if '*' in s and len(s) > 1:
- raise Exception('Unsupported format, cannot guess length of symbol: ' + s)
- c_sym += str(len(s)) + s
- if symbols:
- raise Exception('Unsupported format: ' + sym)
- if c_sym[-1] != '*':
- c_sym += 'E*'
- return ['{prefix}{sym}'.format(prefix=prefix, sym=c_sym) for prefix in c_prefixes]
-
-
-def fix_darwin_param(ex):
- for item in ex:
- if item.get('linux_version'):
- continue
-
- if item['lang'] == 'C':
- yield '-Wl,-exported_symbol,_' + item['sym']
- elif item['lang'] == 'C++':
- for sym in to_c(item['sym']):
- yield '-Wl,-exported_symbol,_' + sym
- else:
- raise Exception('unsupported lang: ' + item['lang'])
-
-
-def fix_gnu_param(arch, ex):
- d = collections.defaultdict(list)
- version = None
- for item in ex:
- if item.get('linux_version'):
- if not version:
- version = item.get('linux_version')
- else:
- raise Exception('More than one linux_version defined')
- elif item['lang'] == 'C++':
- d['C'].extend(to_c(item['sym']))
- else:
- d[item['lang']].append(item['sym'])
-
- with tempfile.NamedTemporaryFile(mode='wt', delete=False) as f:
- if version:
- f.write('{} {{\nglobal:\n'.format(version))
- else:
- f.write('{\nglobal:\n')
-
- for k, v in d.items():
- f.write(' extern "' + k + '" {\n')
-
- for x in v:
- f.write(' ' + x + ';\n')
-
- f.write(' };\n')
-
- f.write('local: *;\n};\n')
-
- ret = ['-Wl,--version-script=' + f.name]
-
- if arch == 'ANDROID':
- ret += ['-Wl,--export-dynamic']
-
- return ret
-
-
-def fix_windows_param(ex):
- with tempfile.NamedTemporaryFile(delete=False) as def_file:
- exports = []
- for item in ex:
- if item.get('lang') == 'C':
- exports.append(item.get('sym'))
- def_file.write('EXPORTS\n')
- for export in exports:
- def_file.write(' {}\n'.format(export))
- return ['/DEF:{}'.format(def_file.name)]
-
-
-MUSL_LIBS = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
-
-CUDA_LIBRARIES = {
- '-lcublas_static': '-lcublas',
- '-lcublasLt_static': '-lcublasLt',
- '-lcudart_static': '-lcudart',
- '-lcudnn_static': '-lcudnn',
- '-lcufft_static_nocallback': '-lcufft',
- '-lcurand_static': '-lcurand',
- '-lcusolver_static': '-lcusolver',
- '-lcusparse_static': '-lcusparse',
- '-lmyelin_compiler_static': '-lmyelin',
- '-lmyelin_executor_static': '-lnvcaffe_parser',
- '-lmyelin_pattern_library_static': '',
- '-lmyelin_pattern_runtime_static': '',
- '-lnvinfer_static': '-lnvinfer',
- '-lnvinfer_plugin_static': '-lnvinfer_plugin',
- '-lnvonnxparser_static': '-lnvonnxparser',
- '-lnvparsers_static': '-lnvparsers'
-}
-
-
-def fix_cmd(arch, c):
- if arch == 'WINDOWS':
- prefix = '/DEF:'
- f = fix_windows_param
- else:
- prefix = '-Wl,--version-script='
- if arch in ('DARWIN', 'IOS', 'IOSSIM'):
- f = fix_darwin_param
- else:
- f = lambda x: fix_gnu_param(arch, x)
-
- def do_fix(p):
- if p.startswith(prefix) and p.endswith('.exports'):
- fname = p[len(prefix):]
-
- return list(f(list(parse_export_file(fname))))
-
- if p.endswith('.supp'):
- return []
-
- if p.endswith('.pkg.fake'):
- return []
-
- return [p]
-
- return sum((do_fix(x) for x in c), [])
-
-
-def fix_cmd_for_musl(cmd):
- flags = []
- for flag in cmd:
- if flag not in MUSL_LIBS:
- flags.append(flag)
- return flags
-
-
-def fix_cmd_for_dynamic_cuda(cmd):
- flags = []
- for flag in cmd:
- if flag in CUDA_LIBRARIES:
- flags.append(CUDA_LIBRARIES[flag])
- else:
- flags.append(flag)
- return flags
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.disable_interspersed_args()
- parser.add_option('--arch')
- parser.add_option('--target')
- parser.add_option('--soname')
- parser.add_option('--fix-elf')
- parser.add_option('--linker-output')
- parser.add_option('--musl', action='store_true')
- parser.add_option('--dynamic-cuda', action='store_true')
- parser.add_option('--whole-archive-peers', action='append')
- parser.add_option('--whole-archive-libs', action='append')
- return parser.parse_args()
-
-
-if __name__ == '__main__':
- opts, args = parse_args()
-
- assert opts.arch
- assert opts.target
-
- cmd = fix_cmd(opts.arch, args)
-
- if opts.musl:
- cmd = fix_cmd_for_musl(cmd)
- if opts.dynamic_cuda:
- cmd = fix_cmd_for_dynamic_cuda(cmd)
-
- cmd = ProcessWholeArchiveOption(opts.arch, opts.whole_archive_peers, opts.whole_archive_libs).construct_cmd(cmd)
-
- if opts.linker_output:
- stdout = open(opts.linker_output, 'w')
- else:
- stdout = sys.stdout
-
- proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=stdout)
- proc.communicate()
-
- if proc.returncode:
- print >>sys.stderr, 'linker has failed with retcode:', proc.returncode
- print >>sys.stderr, 'linker command:', shlex_join(cmd)
- sys.exit(proc.returncode)
-
- if opts.fix_elf:
- cmd = [opts.fix_elf, opts.target]
- proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=sys.stdout)
- proc.communicate()
-
- if proc.returncode:
- print >>sys.stderr, 'fix_elf has failed with retcode:', proc.returncode
- print >>sys.stderr, 'fix_elf command:', shlex_join(cmd)
- sys.exit(proc.returncode)
-
- if opts.soname and opts.soname != opts.target:
- if os.path.exists(opts.soname):
- os.unlink(opts.soname)
- os.link(opts.target, opts.soname)
-
-
-# -----------------Test---------------- #
-def write_temp_file(content):
- import yatest.common as yc
- filename = yc.output_path('test.exports')
- with open(filename, 'w') as f:
- f.write(content)
- return filename
-
-
-def test_fix_cmd_darwin():
- export_file_content = """
-C++ geobase5::details::lookup_impl::*
-C++ geobase5::hardcoded_service
-"""
- filename = write_temp_file(export_file_content)
- args = ['-Wl,--version-script={}'.format(filename)]
- assert fix_cmd('DARWIN', args) == [
- '-Wl,-exported_symbol,__ZN8geobase57details11lookup_impl*',
- '-Wl,-exported_symbol,__ZTIN8geobase57details11lookup_impl*',
- '-Wl,-exported_symbol,__ZTSN8geobase57details11lookup_impl*',
- '-Wl,-exported_symbol,__ZTTN8geobase57details11lookup_impl*',
- '-Wl,-exported_symbol,__ZTVN8geobase57details11lookup_impl*',
- '-Wl,-exported_symbol,__ZNK8geobase57details11lookup_impl*',
- '-Wl,-exported_symbol,__ZN8geobase517hardcoded_serviceE*',
- '-Wl,-exported_symbol,__ZTIN8geobase517hardcoded_serviceE*',
- '-Wl,-exported_symbol,__ZTSN8geobase517hardcoded_serviceE*',
- '-Wl,-exported_symbol,__ZTTN8geobase517hardcoded_serviceE*',
- '-Wl,-exported_symbol,__ZTVN8geobase517hardcoded_serviceE*',
- '-Wl,-exported_symbol,__ZNK8geobase517hardcoded_serviceE*',
- ]
-
-
-def run_fix_gnu_param(export_file_content):
- filename = write_temp_file(export_file_content)
- result = fix_gnu_param('LINUX', list(parse_export_file(filename)))[0]
- version_script_path = result[len('-Wl,--version-script='):]
- with open(version_script_path) as f:
- content = f.read()
- return content
-
-
-def test_fix_gnu_param():
- export_file_content = """
-C++ geobase5::details::lookup_impl::*
-C getFactoryMap
-"""
- assert run_fix_gnu_param(export_file_content) == """{
-global:
- extern "C" {
- _ZN8geobase57details11lookup_impl*;
- _ZTIN8geobase57details11lookup_impl*;
- _ZTSN8geobase57details11lookup_impl*;
- _ZTTN8geobase57details11lookup_impl*;
- _ZTVN8geobase57details11lookup_impl*;
- _ZNK8geobase57details11lookup_impl*;
- getFactoryMap;
- };
-local: *;
-};
-"""
-
-
-def test_fix_gnu_param_with_linux_version():
- export_file_content = """
-C++ geobase5::details::lookup_impl::*
-linux_version ver1.0
-C getFactoryMap
-"""
- assert run_fix_gnu_param(export_file_content) == """ver1.0 {
-global:
- extern "C" {
- _ZN8geobase57details11lookup_impl*;
- _ZTIN8geobase57details11lookup_impl*;
- _ZTSN8geobase57details11lookup_impl*;
- _ZTTN8geobase57details11lookup_impl*;
- _ZTVN8geobase57details11lookup_impl*;
- _ZNK8geobase57details11lookup_impl*;
- getFactoryMap;
- };
-local: *;
-};
-"""
diff --git a/build/scripts/link_exe.py b/build/scripts/link_exe.py
deleted file mode 100644
index aa96818851..0000000000
--- a/build/scripts/link_exe.py
+++ /dev/null
@@ -1,130 +0,0 @@
-import sys
-import subprocess
-import optparse
-
-from process_whole_archive_option import ProcessWholeArchiveOption
-
-
-def get_leaks_suppressions(cmd):
- supp, newcmd = [], []
- for arg in cmd:
- if arg.endswith(".supp"):
- supp.append(arg)
- else:
- newcmd.append(arg)
- return supp, newcmd
-
-
-MUSL_LIBS = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
-
-
-CUDA_LIBRARIES = {
- '-lcublas_static': '-lcublas',
- '-lcublasLt_static': '-lcublasLt',
- '-lcudart_static': '-lcudart',
- '-lcudnn_static': '-lcudnn',
- '-lcufft_static_nocallback': '-lcufft',
- '-lcurand_static': '-lcurand',
- '-lcusolver_static': '-lcusolver',
- '-lcusparse_static': '-lcusparse',
- '-lmyelin_compiler_static': '-lmyelin',
- '-lmyelin_executor_static': '-lnvcaffe_parser',
- '-lmyelin_pattern_library_static': '',
- '-lmyelin_pattern_runtime_static': '',
- '-lnvinfer_static': '-lnvinfer',
- '-lnvinfer_plugin_static': '-lnvinfer_plugin',
- '-lnvonnxparser_static': '-lnvonnxparser',
- '-lnvparsers_static': '-lnvparsers'
-}
-
-
-def remove_excessive_flags(cmd):
- flags = []
- for flag in cmd:
- if not flag.endswith('.ios.interface') and not flag.endswith('.pkg.fake'):
- flags.append(flag)
- return flags
-
-
-def fix_cmd_for_musl(cmd):
- flags = []
- for flag in cmd:
- if flag not in MUSL_LIBS:
- flags.append(flag)
- return flags
-
-
-def fix_cmd_for_dynamic_cuda(cmd):
- flags = []
- for flag in cmd:
- if flag in CUDA_LIBRARIES:
- flags.append(CUDA_LIBRARIES[flag])
- else:
- flags.append(flag)
- return flags
-
-
-def gen_default_suppressions(inputs, output, source_root):
- import collections
- import os
-
- supp_map = collections.defaultdict(set)
- for filename in inputs:
- sanitizer = os.path.basename(filename).split('.', 1)[0]
- with open(os.path.join(source_root, filename)) as src:
- for line in src:
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- supp_map[sanitizer].add(line)
-
- with open(output, "wb") as dst:
- for supp_type, supps in supp_map.items():
- dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type)
- dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps))))
- dst.write('}\n')
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.disable_interspersed_args()
- parser.add_option('--musl', action='store_true')
- parser.add_option('--custom-step')
- parser.add_option('--python')
- parser.add_option('--source-root')
- parser.add_option('--dynamic-cuda', action='store_true')
- parser.add_option('--arch')
- parser.add_option('--linker-output')
- parser.add_option('--whole-archive-peers', action='append')
- parser.add_option('--whole-archive-libs', action='append')
- return parser.parse_args()
-
-
-if __name__ == '__main__':
- opts, args = parse_args()
-
- cmd = remove_excessive_flags(args)
- if opts.musl:
- cmd = fix_cmd_for_musl(cmd)
-
- if opts.dynamic_cuda:
- cmd = fix_cmd_for_dynamic_cuda(cmd)
- cmd = ProcessWholeArchiveOption(opts.arch, opts.whole_archive_peers, opts.whole_archive_libs).construct_cmd(cmd)
-
- if opts.custom_step:
- assert opts.python
- subprocess.check_call([opts.python] + [opts.custom_step] + args)
-
- supp, cmd = get_leaks_suppressions(cmd)
- if supp:
- src_file = "default_suppressions.cpp"
- gen_default_suppressions(supp, src_file, opts.source_root)
- cmd += [src_file]
-
- if opts.linker_output:
- stdout = open(opts.linker_output, 'w')
- else:
- stdout = sys.stdout
-
- rc = subprocess.call(cmd, shell=False, stderr=sys.stderr, stdout=stdout)
- sys.exit(rc)
diff --git a/build/scripts/link_lib.py b/build/scripts/link_lib.py
deleted file mode 100644
index 5ca50ef844..0000000000
--- a/build/scripts/link_lib.py
+++ /dev/null
@@ -1,85 +0,0 @@
-import sys
-import subprocess
-import tempfile
-import os
-
-
-class Opts(object):
- def __init__(self, args):
- self.archiver = args[0]
- self.arch_type = args[1]
- self.llvm_ar_format = args[2]
- self.build_root = args[3]
- self.plugin = args[4]
- self.output = args[5]
- auto_input = args[6:]
-
- if self.arch_type == 'GNU_AR':
- self.create_flags = ['rcs']
- self.modify_flags = ['-M']
- elif self.arch_type == 'LLVM_AR':
- self.create_flags = ['rcs', '--format=%s' % self.llvm_ar_format]
- self.modify_flags = ['-M']
- elif self.arch_type == 'LIBTOOL':
- self.create_flags = ['-static', '-o']
- self.modify_flags = []
-
- need_modify = self.arch_type != 'LIBTOOL' and any(item.endswith('.a') for item in auto_input)
- if need_modify:
- self.objs = filter(lambda x: x.endswith('.o'), auto_input)
- self.libs = filter(lambda x: x.endswith('.a'), auto_input)
- else:
- self.objs = auto_input
- self.libs = []
-
- self.plugin_flags = ['--plugin', self.plugin] if self.plugin != 'None' else []
-
-
-def get_opts(args):
- return Opts(args)
-
-
-if __name__ == "__main__":
- opts = get_opts(sys.argv[1:])
-
- # There is a bug in llvm-ar. Some files with size slightly greater 2^32
- # still have GNU format instead of GNU64 and cause link problems.
- # Workaround just lowers llvm-ar's GNU64 threshold to 2^31.
- if opts.arch_type == 'LLVM_AR':
- os.environ['SYM64_THRESHOLD'] = '31'
-
- def call():
- try:
- p = subprocess.Popen(cmd, stdin=stdin, cwd=opts.build_root)
- rc = p.wait()
- return rc
- except OSError as e:
- raise Exception('while running %s: %s' % (' '.join(cmd), e))
-
- try:
- os.unlink(opts.output)
- except OSError:
- pass
-
- if not opts.libs:
- cmd = [opts.archiver] + opts.create_flags + opts.plugin_flags + [opts.output] + opts.objs
- stdin = None
- exit_code = call()
- else:
- temp = tempfile.NamedTemporaryFile(dir=os.path.dirname(opts.output), delete=False)
-
- with open(temp.name, 'w') as tmp:
- tmp.write('CREATE {0}\n'.format(opts.output))
- for lib in opts.libs:
- tmp.write('ADDLIB {0}\n'.format(lib))
- for obj in opts.objs:
- tmp.write('ADDMOD {0}\n'.format(obj))
- tmp.write('SAVE\n')
- tmp.write('END\n')
- cmd = [opts.archiver] + opts.modify_flags + opts.plugin_flags
- stdin = open(temp.name)
- exit_code = call()
- os.remove(temp.name)
-
- if exit_code != 0:
- raise Exception('{0} returned non-zero exit code {1}. Stop.'.format(' '.join(cmd), exit_code))
diff --git a/build/scripts/llvm_opt_wrapper.py b/build/scripts/llvm_opt_wrapper.py
deleted file mode 100644
index 38ca3004af..0000000000
--- a/build/scripts/llvm_opt_wrapper.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import subprocess
-import sys
-
-
-def fix(s):
- # we use '#' instead of ',' because ymake always splits args by comma
- if 'internalize' in s:
- return s.replace('#', ',')
-
- return s
-
-
-if __name__ == '__main__':
- path = sys.argv[1]
- args = [fix(s) for s in [path] + sys.argv[2:]]
-
- rc = subprocess.call(args, shell=False, stderr=sys.stderr, stdout=sys.stdout)
- sys.exit(rc)
diff --git a/build/scripts/merge_files.py b/build/scripts/merge_files.py
deleted file mode 100644
index d42d6a2139..0000000000
--- a/build/scripts/merge_files.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import sys
-
-
-if __name__ == "__main__":
- with open(sys.argv[1], "w") as f:
- for appended in sys.argv[2:]:
- with open(appended) as a:
- f.write(a.read())
diff --git a/build/scripts/preprocess.py b/build/scripts/preprocess.py
deleted file mode 100644
index 4657bef732..0000000000
--- a/build/scripts/preprocess.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import sys
-import os
-
-
-def load_file(p):
- with open(p, 'r') as f:
- return f.read()
-
-
-def step(base, data, hh):
- def flt():
- for l in data.split('\n'):
- if l in hh:
- pp = os.path.join(base, hh[l])
-
- yield '\n\n' + load_file(pp) + '\n\n'
-
- os.unlink(pp)
- else:
- yield l
-
- return '\n'.join(flt())
-
-
-def subst_headers(path, headers):
- hh = dict()
-
- for h in headers:
- hh['# include "' + h + '"'] = h
-
- data = load_file(path)
- prev = data
-
- while True:
- ret = step(os.path.dirname(path), prev, hh)
-
- if ret == prev:
- break
-
- prev = ret
-
- if data != prev:
- with open(path, 'w') as f:
- f.write(prev)
-
-
-if __name__ == '__main__':
- subst_headers(sys.argv[1], ['stack.hh', 'position.hh', 'location.hh'])
diff --git a/build/scripts/py_compile.py b/build/scripts/py_compile.py
deleted file mode 100755
index 936dbe8816..0000000000
--- a/build/scripts/py_compile.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-from __future__ import print_function, absolute_import, division
-
-import marshal
-import sys
-
-
-def main():
- srcpathx, in_fname, out_fname = sys.argv[1:]
- srcpath = srcpathx[:-1]
-
- with open(in_fname, 'r') as in_file:
- source = in_file.read()
-
- code = compile(source, srcpath, 'exec', dont_inherit=True)
-
- with open(out_fname, 'wb') as out_file:
- marshal.dump(code, out_file)
-
-
-if __name__ == "__main__":
- main()
diff --git a/build/scripts/rodata2asm.py b/build/scripts/rodata2asm.py
deleted file mode 100644
index 555639499f..0000000000
--- a/build/scripts/rodata2asm.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-import argparse
-
-
-def main():
- parser = argparse.ArgumentParser(description='Convert rodata into asm source with embedded file content')
- parser.add_argument('symbol', help='symvol name exported from generated filr')
- parser.add_argument('rodata', help='input .rodata file path')
- parser.add_argument('asm', type=argparse.FileType('w', encoding='UTF-8'), help='destination .asm file path')
- parser.add_argument('--elf', action='store_true')
-
- args = parser.parse_args()
-
- file_size = os.path.getsize(args.rodata)
-
- args.asm.write('global ' + args.symbol + '\n')
- args.asm.write('global ' + args.symbol + 'Size' + '\n')
- args.asm.write('SECTION .rodata ALIGN=16\n')
- args.asm.write(args.symbol + ':\nincbin "' + args.rodata + '"\n')
- args.asm.write('align 4, db 0\n')
- args.asm.write(args.symbol + 'Size:\ndd ' + str(file_size) + '\n')
-
- if args.elf:
- args.asm.write('size ' + args.symbol + ' ' + str(file_size) + '\n')
- args.asm.write('size ' + args.symbol + 'Size 4\n')
-
- args.asm.close()
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/run_llvm_dsymutil.py b/build/scripts/run_llvm_dsymutil.py
deleted file mode 100644
index 4f43362ad9..0000000000
--- a/build/scripts/run_llvm_dsymutil.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import os
-import sys
-import subprocess
-
-
-if __name__ == '__main__':
- with open(os.devnull, 'w') as fnull:
- p = subprocess.Popen(sys.argv[1:], shell=False, stderr=fnull, stdout=sys.stdout)
-
- p.communicate()
- sys.exit(p.returncode)
diff --git a/build/scripts/stdout2stderr.py b/build/scripts/stdout2stderr.py
deleted file mode 100644
index d7861fdda3..0000000000
--- a/build/scripts/stdout2stderr.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import subprocess
-import sys
-
-if __name__ == '__main__':
- assert len(sys.argv) > 1
- sys.exit(subprocess.Popen(sys.argv[1:], stdout=sys.stderr).wait())
diff --git a/build/scripts/tar_sources.py b/build/scripts/tar_sources.py
deleted file mode 100644
index 54e2839a69..0000000000
--- a/build/scripts/tar_sources.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import argparse
-import os
-import tarfile
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--exts', nargs='*', default=None)
- parser.add_argument('--flat', action='store_true')
- parser.add_argument('--input', required=True)
- parser.add_argument('--output', required=True)
- parser.add_argument('--prefix', default=None)
-
- return parser.parse_args()
-
-
-def main():
- args = parse_args()
-
- py_srcs = []
- for root, _, files in os.walk(args.input):
- for f in files:
- if not args.exts or f.endswith(tuple(args.exts)):
- py_srcs.append(os.path.join(root, f))
-
- compression_mode = ''
- if args.output.endswith(('.tar.gz', '.tgz')):
- compression_mode = 'gz'
- elif args.output.endswith('.bzip2'):
- compression_mode = 'bz2'
-
- with tarfile.open(args.output, 'w:{}'.format(compression_mode)) as out:
- for f in py_srcs:
- arcname = os.path.basename(f) if args.flat else os.path.relpath(f, args.input)
- if args.prefix:
- arcname = os.path.join(args.prefix, arcname)
- out.add(f, arcname=arcname)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/tared_protoc.py b/build/scripts/tared_protoc.py
deleted file mode 100644
index 7643e1dbfe..0000000000
--- a/build/scripts/tared_protoc.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-import optparse
-import tarfile
-import contextlib
-import subprocess as sp
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.disable_interspersed_args()
- parser.add_option('--tar-output')
- parser.add_option('--protoc-out-dir')
- return parser.parse_args()
-
-
-def main():
- opts, args = parse_args()
- assert opts.tar_output
- assert opts.protoc_out_dir
-
- if not os.path.exists(opts.protoc_out_dir):
- os.makedirs(opts.protoc_out_dir)
-
- sp.check_call(args)
-
- with contextlib.closing(tarfile.open(opts.tar_output, 'w')) as tf:
- tf.add(opts.protoc_out_dir, arcname='')
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/touch.py b/build/scripts/touch.py
deleted file mode 100755
index e01ba7f86b..0000000000
--- a/build/scripts/touch.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-
-import optparse
-import os
-import sys
-import time
-
-
-def main(argv):
- parser = optparse.OptionParser(add_help_option=False)
- parser.disable_interspersed_args()
-
- parser.add_option('-?', '--help', dest='help',
- action='store_true', default=None, help='print help')
- parser.add_option('-t', dest='t', action='store', default=None)
-
- opts, argv_rest = parser.parse_args(argv)
- if getattr(opts, 'help', False):
- parser.print_help()
- return 0
-
- tspec = opts.t
- if tspec is None:
- times = None
- else:
- head, sep, tail = tspec.partition('.')
- if 8 > len(head):
- raise Exception("time spec must follow format [[CC]YY]MMDDhhmm[.SS]: " + tspec + '; ' + head)
- tfmt = ''
- if 12 == len(head):
- tfmt += '%Y'
- elif 10 == len(head):
- tfmt += '%y'
- tfmt += '%m%d%H%M'
- if 2 == len(tail):
- tfmt += '.%S'
- mtime = time.mktime(time.strptime(tspec, tfmt))
- times = (mtime, mtime)
-
- for file in argv_rest:
- try:
- os.utime(file, times)
- except:
- open(file, 'w').close()
- if times is not None:
- os.utime(file, times)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/build/scripts/xargs.py b/build/scripts/xargs.py
deleted file mode 100644
index 5d68929ecc..0000000000
--- a/build/scripts/xargs.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import sys
-import os
-import subprocess
-
-if __name__ == '__main__':
- pos = sys.argv.index('--')
- fname = sys.argv[pos + 1]
- cmd = sys.argv[pos + 2:]
-
- with open(fname, 'r') as f:
- args = [x.strip() for x in f]
-
- os.remove(fname)
-
- p = subprocess.Popen(cmd + args, shell=False, stderr=sys.stderr, stdout=sys.stdout)
- p.communicate()
-
- sys.exit(p.returncode)
diff --git a/build/scripts/yield_line.py b/build/scripts/yield_line.py
deleted file mode 100644
index 9c1c539146..0000000000
--- a/build/scripts/yield_line.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import sys
-
-if __name__ == '__main__':
- pos = sys.argv.index('--')
-
- with open(sys.argv[pos + 1], 'a') as f:
- f.write(' '.join(sys.argv[pos + 2:]) + '\n')