aboutsummaryrefslogtreecommitdiffstats
path: root/build/scripts
diff options
context:
space:
mode:
authoralexv-smirnov <alex@ydb.tech>2023-06-13 11:05:01 +0300
committeralexv-smirnov <alex@ydb.tech>2023-06-13 11:05:01 +0300
commitbf0f13dd39ee3e65092ba3572bb5b1fcd125dcd0 (patch)
tree1d1df72c0541a59a81439842f46d95396d3e7189 /build/scripts
parent8bfdfa9a9bd19bddbc58d888e180fbd1218681be (diff)
downloadydb-bf0f13dd39ee3e65092ba3572bb5b1fcd125dcd0.tar.gz
add ymake export to ydb
Diffstat (limited to 'build/scripts')
-rw-r--r--build/scripts/__pycache__/process_command_files.cpython-310.pycbin0 -> 1215 bytes
-rw-r--r--build/scripts/__pycache__/process_whole_archive_option.cpython-310.pycbin0 -> 5217 bytes
-rw-r--r--build/scripts/_check_compiler.cpp1
-rw-r--r--build/scripts/_fake_src.cpp2
-rw-r--r--build/scripts/append_file.py9
-rw-r--r--build/scripts/autotar_gendirs.py70
-rwxr-xr-xbuild/scripts/build_catboost.py71
-rw-r--r--build/scripts/build_dll_and_java.py47
-rw-r--r--build/scripts/build_java_codenav_index.py49
-rw-r--r--build/scripts/build_java_with_error_prone.py36
-rw-r--r--build/scripts/build_java_with_error_prone2.py87
-rwxr-xr-xbuild/scripts/build_mn.py330
-rwxr-xr-xbuild/scripts/build_pln_header.py35
-rw-r--r--build/scripts/c_templates/README.md3
-rw-r--r--build/scripts/c_templates/ya.make7
-rwxr-xr-xbuild/scripts/cat.py15
-rw-r--r--build/scripts/cgo1_wrapper.py45
-rw-r--r--build/scripts/check_config_h.py89
-rw-r--r--build/scripts/clang_tidy.py185
-rw-r--r--build/scripts/clang_tidy_arch.py33
-rw-r--r--build/scripts/clang_wrapper.py53
-rw-r--r--build/scripts/collect_java_srcs.py51
-rw-r--r--build/scripts/compile_cuda.py168
-rw-r--r--build/scripts/compile_java.py122
-rw-r--r--build/scripts/compile_jsrc.py24
-rw-r--r--build/scripts/compile_pysrc.py101
-rwxr-xr-xbuild/scripts/configure_file.py59
-rw-r--r--build/scripts/container.py30
-rw-r--r--build/scripts/copy_docs_files.py102
-rw-r--r--build/scripts/copy_docs_files_to_dir.py164
-rw-r--r--build/scripts/copy_files_to_dir.py59
-rw-r--r--build/scripts/copy_to_dir.py75
-rw-r--r--build/scripts/coverage-info.py282
-rw-r--r--build/scripts/cpp_flatc_wrapper.py30
-rw-r--r--build/scripts/create_jcoverage_report.py112
-rw-r--r--build/scripts/custom_link_green_mysql.py97
-rw-r--r--build/scripts/decimal_md5.py79
-rw-r--r--build/scripts/error.py77
-rw-r--r--build/scripts/extract_asrc.py23
-rw-r--r--build/scripts/extract_docs.py43
-rw-r--r--build/scripts/extract_jacoco_report.py29
-rw-r--r--build/scripts/f2c.py58
-rw-r--r--build/scripts/fail_module_cmd.py7
-rwxr-xr-xbuild/scripts/fetch_from.py396
-rw-r--r--build/scripts/fetch_from_archive.py36
-rw-r--r--build/scripts/fetch_from_external.py60
-rw-r--r--build/scripts/fetch_from_mds.py50
-rw-r--r--build/scripts/fetch_from_npm.py109
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py272
-rw-r--r--build/scripts/fetch_resource.py43
-rw-r--r--build/scripts/filter_zip.py71
-rw-r--r--build/scripts/find_and_tar.py22
-rw-r--r--build/scripts/find_time_trace.py17
-rw-r--r--build/scripts/fix_java_command_file_cp.py34
-rw-r--r--build/scripts/fix_msvc_output.py43
-rw-r--r--build/scripts/fs_tools.py117
-rw-r--r--build/scripts/gen_aar_gradle_script.py284
-rw-r--r--build/scripts/gen_java_codenav_entry.py57
-rw-r--r--build/scripts/gen_java_codenav_protobuf.py22
-rw-r--r--build/scripts/gen_mx_table.py75
-rw-r--r--build/scripts/gen_py3_reg.py34
-rw-r--r--build/scripts/gen_py_protos.py67
-rw-r--r--build/scripts/gen_py_reg.py32
-rw-r--r--build/scripts/gen_swiftc_output_map.py15
-rw-r--r--build/scripts/gen_tasklet_reg.py51
-rw-r--r--build/scripts/gen_test_apk_gradle_script.py193
-rw-r--r--build/scripts/gen_yql_python_udf.py55
-rw-r--r--build/scripts/generate_mf.py113
-rw-r--r--build/scripts/generate_pom.py336
-rw-r--r--build/scripts/go_fake_include/go_asm.h0
-rw-r--r--build/scripts/go_proto_wrapper.py81
-rw-r--r--build/scripts/go_tool.py891
-rw-r--r--build/scripts/ios_wrapper.py180
-rw-r--r--build/scripts/java_pack_to_file.py43
-rw-r--r--build/scripts/jni_swig.py46
-rw-r--r--build/scripts/kt_copy.py17
-rw-r--r--build/scripts/link_asrc.py84
-rw-r--r--build/scripts/link_dyn_lib.py353
-rw-r--r--build/scripts/link_exe.py186
-rw-r--r--build/scripts/link_fat_obj.py97
-rw-r--r--build/scripts/link_jsrc.py27
-rw-r--r--build/scripts/link_lib.py101
-rw-r--r--build/scripts/list.py4
-rw-r--r--build/scripts/llvm_opt_wrapper.py18
-rw-r--r--build/scripts/make_container.py94
-rw-r--r--build/scripts/make_container_layer.py24
-rw-r--r--build/scripts/make_java_classpath_file.py26
-rw-r--r--build/scripts/make_java_srclists.py128
-rw-r--r--build/scripts/make_manifest_from_bf.py28
-rwxr-xr-xbuild/scripts/mangle_typeinfo_names.py317
-rw-r--r--build/scripts/merge_coverage_data.py32
-rw-r--r--build/scripts/merge_files.py8
-rwxr-xr-xbuild/scripts/mkdir.py12
-rw-r--r--build/scripts/mkdocs_builder_wrapper.py36
-rwxr-xr-xbuild/scripts/mkver.py12
-rw-r--r--build/scripts/move.py15
-rw-r--r--build/scripts/pack_ios.py48
-rw-r--r--build/scripts/pack_jcoverage_resources.py24
-rw-r--r--build/scripts/perl_wrapper.py24
-rw-r--r--build/scripts/postprocess_go_fbs.py72
-rw-r--r--build/scripts/preprocess.py48
-rw-r--r--build/scripts/process_command_files.pycbin0 -> 1845 bytes
-rw-r--r--build/scripts/process_whole_archive_option.py183
-rw-r--r--build/scripts/process_whole_archive_option.pycbin0 -> 6669 bytes
-rwxr-xr-xbuild/scripts/py_compile.py24
-rw-r--r--build/scripts/python_yndexer.py53
-rw-r--r--build/scripts/resolve_java_srcs.py106
-rw-r--r--build/scripts/retry.py29
-rw-r--r--build/scripts/rodata2asm.py31
-rw-r--r--build/scripts/run_ios_simulator.py79
-rw-r--r--build/scripts/run_javac.py122
-rw-r--r--build/scripts/run_junit.py65
-rw-r--r--build/scripts/run_llvm_dsymutil.py11
-rw-r--r--build/scripts/run_msvc_wine.py586
-rw-r--r--build/scripts/run_sonar.py121
-rw-r--r--build/scripts/setup_java_tmpdir.py40
-rw-r--r--build/scripts/sky.py43
-rw-r--r--build/scripts/stderr2stdout.py6
-rw-r--r--build/scripts/stdout2stderr.py6
-rwxr-xr-xbuild/scripts/symlink.py29
-rw-r--r--build/scripts/tar_directory.py45
-rw-r--r--build/scripts/tar_sources.py41
-rw-r--r--build/scripts/tared_protoc.py31
-rwxr-xr-xbuild/scripts/touch.py50
-rw-r--r--build/scripts/unpacking_jtest_runner.py148
-rw-r--r--build/scripts/with_coverage.py40
-rw-r--r--build/scripts/with_crash_on_timeout.py22
-rw-r--r--build/scripts/with_kapt_args.py35
-rw-r--r--build/scripts/with_pathsep_resolve.py23
-rw-r--r--build/scripts/wrap_groovyc.py23
-rw-r--r--build/scripts/wrapcc.py45
-rw-r--r--build/scripts/wrapper.py11
-rw-r--r--build/scripts/write_file_size.py15
-rw-r--r--build/scripts/writer.py40
-rw-r--r--build/scripts/xargs.py18
-rw-r--r--build/scripts/ya.make110
-rw-r--r--build/scripts/yield_line.py8
-rw-r--r--build/scripts/yndexer.py79
138 files changed, 10957 insertions, 0 deletions
diff --git a/build/scripts/__pycache__/process_command_files.cpython-310.pyc b/build/scripts/__pycache__/process_command_files.cpython-310.pyc
new file mode 100644
index 0000000000..bc379d64a6
--- /dev/null
+++ b/build/scripts/__pycache__/process_command_files.cpython-310.pyc
Binary files differ
diff --git a/build/scripts/__pycache__/process_whole_archive_option.cpython-310.pyc b/build/scripts/__pycache__/process_whole_archive_option.cpython-310.pyc
new file mode 100644
index 0000000000..fd67bee5b9
--- /dev/null
+++ b/build/scripts/__pycache__/process_whole_archive_option.cpython-310.pyc
Binary files differ
diff --git a/build/scripts/_check_compiler.cpp b/build/scripts/_check_compiler.cpp
new file mode 100644
index 0000000000..53c5fdf179
--- /dev/null
+++ b/build/scripts/_check_compiler.cpp
@@ -0,0 +1 @@
+#include <stdio.h>
diff --git a/build/scripts/_fake_src.cpp b/build/scripts/_fake_src.cpp
new file mode 100644
index 0000000000..139597f9cb
--- /dev/null
+++ b/build/scripts/_fake_src.cpp
@@ -0,0 +1,2 @@
+
+
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py
new file mode 100644
index 0000000000..1413cec352
--- /dev/null
+++ b/build/scripts/append_file.py
@@ -0,0 +1,9 @@
+import sys
+
+
+if __name__ == "__main__":
+
+ file_path = sys.argv[1]
+ with open(file_path, "a") as f:
+ for text in sys.argv[2:]:
+ f.write('{}\n'.format(text))
diff --git a/build/scripts/autotar_gendirs.py b/build/scripts/autotar_gendirs.py
new file mode 100644
index 0000000000..a1228108aa
--- /dev/null
+++ b/build/scripts/autotar_gendirs.py
@@ -0,0 +1,70 @@
+from __future__ import print_function
+
+import os
+import sys
+import argparse
+import tarfile
+import subprocess
+
+
+def is_exe(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+
+def pack_dir(dir_path, dest_path):
+ dir_path = os.path.abspath(dir_path)
+ for tar_exe in ('/usr/bin/tar', '/bin/tar'):
+ if is_exe(tar_exe):
+ subprocess.check_call([tar_exe, '-cf', dest_path, '-C', os.path.dirname(dir_path), os.path.basename(dir_path)])
+ break
+ else:
+ with tarfile.open(dest_path, 'w') as out:
+ out.add(dir_path, arcname=os.path.basename(dir_path))
+
+
+def unpack_dir(tared_dir, dest_path):
+ tared_dir = os.path.abspath(tared_dir)
+ if not os.path.exists(dest_path):
+ os.makedirs(dest_path)
+ for tar_exe in ('/usr/bin/tar', '/bin/tar'):
+ if is_exe(tar_exe):
+ subprocess.check_call([tar_exe, '-xf', tared_dir, '-C', dest_path])
+ break
+ else:
+ with tarfile.open(tared_dir, 'r') as tar_file:
+ tar_file.extractall(dest_path)
+
+
+# Must only be used to pack directories in build root
+# Must silently accept empty list of dirs and do nothing in such case (workaround for ymake.core.conf limitations)
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--pack', action='store_true', default=False)
+ parser.add_argument('--unpack', action='store_true', default=False)
+ parser.add_argument('--ext')
+ parser.add_argument('--outs', nargs='*', default=[])
+ parser.add_argument('dirs', nargs='*')
+ args = parser.parse_args(args)
+
+ if args.pack:
+ if len(args.dirs) != len(args.outs):
+ print("Number and oder of dirs to pack must match to the number and order of outs", file=sys.stderr)
+ return 1
+ for dir, dest in zip(args.dirs, args.outs):
+ pack_dir(dir, dest)
+ elif args.unpack:
+ for tared_dir in args.dirs:
+ if not tared_dir.endswith(args.ext):
+ print("Requested to unpack '{}' which do not have required extension '{}'".format(tared_dir, args.ext), file=sys.stderr)
+ return 1
+ dest = os.path.dirname(tared_dir)
+ unpack_dir(tared_dir, dest)
+ else:
+ print("Neither --pack nor --unpack specified. Don't know what to do.", file=sys.stderr)
+ return 1
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/scripts/build_catboost.py b/build/scripts/build_catboost.py
new file mode 100755
index 0000000000..78334fc5f7
--- /dev/null
+++ b/build/scripts/build_catboost.py
@@ -0,0 +1,71 @@
+import sys
+import os
+import shutil
+import re
+import subprocess
+
+def get_value(val):
+ dct = val.split('=', 1)
+ if len(dct) > 1:
+ return dct[1]
+ return ''
+
+
+class BuildCbBase(object):
+ def run(self, cbmodel, cbname, cb_cpp_path):
+
+ data_prefix = "CB_External_"
+ data = data_prefix + cbname
+ datasize = data + "Size"
+
+ cbtype = "const NCatboostCalcer::TCatboostCalcer"
+ cbload = "(ReadModel({0}, {1}, EModelType::CatboostBinary))".format(data, datasize)
+
+ cb_cpp_tmp_path = cb_cpp_path + ".tmp"
+ cb_cpp_tmp = open(cb_cpp_tmp_path, 'w')
+
+ cb_cpp_tmp.write("#include <kernel/catboost/catboost_calcer.h>\n")
+
+ ro_data_path = os.path.dirname(cb_cpp_path) + "/" + data_prefix + cbname + ".rodata"
+ cb_cpp_tmp.write("namespace{\n")
+ cb_cpp_tmp.write(" extern \"C\" {\n")
+ cb_cpp_tmp.write(" extern const unsigned char {1}{0}[];\n".format(cbname, data_prefix))
+ cb_cpp_tmp.write(" extern const ui32 {1}{0}Size;\n".format(cbname, data_prefix))
+ cb_cpp_tmp.write(" }\n")
+ cb_cpp_tmp.write("}\n")
+ archiverCall = subprocess.Popen([self.archiver, "-q", "-p", "-o", ro_data_path, cbmodel], stdout=None, stderr=subprocess.PIPE)
+ archiverCall.wait()
+ cb_cpp_tmp.write("extern {0} {1};\n".format(cbtype, cbname))
+ cb_cpp_tmp.write("{0} {1}{2};".format(cbtype, cbname, cbload))
+ cb_cpp_tmp.close()
+ shutil.move(cb_cpp_tmp_path, cb_cpp_path)
+
+class BuildCb(BuildCbBase):
+ def run(self, argv):
+ if len(argv) < 5:
+ print >>sys.stderr, "BuildCb.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <cppOutput> [params...])"
+ sys.exit(1)
+
+ self.SrcRoot = argv[0]
+ self.archiver = argv[1]
+ cbmodel = argv[2]
+ cbname = argv[3]
+ cb_cpp_path = argv[4]
+
+ super(BuildCb, self).run(cbmodel, cbname, cb_cpp_path)
+
+
+def build_cb_f(argv):
+ build_cb = BuildCb()
+ build_cb.run(argv)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >>sys.stderr, "Usage: build_cb.py <funcName> <args...>"
+ sys.exit(1)
+
+ if (sys.argv[2:]):
+ globals()[sys.argv[1]](sys.argv[2:])
+ else:
+ globals()[sys.argv[1]]()
diff --git a/build/scripts/build_dll_and_java.py b/build/scripts/build_dll_and_java.py
new file mode 100644
index 0000000000..b9d8aff4df
--- /dev/null
+++ b/build/scripts/build_dll_and_java.py
@@ -0,0 +1,47 @@
+import os
+import subprocess
+import sys
+
+
+def just_do_it(argv):
+ delim = argv[0]
+ args = []
+ for item in argv:
+ if item == delim:
+ args.append([])
+ else:
+ args[-1].append(item)
+ dll_cmd, java_cmd, inputs, dll_out, java_out, jsrs_out, roots = args
+ dll_out, java_out, jsrs_out, build_root, source_root = dll_out[0], java_out[0], jsrs_out[0], roots[0], roots[1]
+ for inp in inputs:
+ origin_inp = inp
+ if os.path.isabs(inp):
+ if os.path.relpath(inp, build_root).startswith('..'):
+ inp = os.path.relpath(inp, source_root)
+ else:
+ inp = os.path.relpath(inp, build_root)
+ ext = os.path.splitext(inp)[1]
+ if ext in ('.o', '.obj'):
+ if os.path.join(build_root, inp) in java_cmd:
+ inp = os.path.join(build_root, inp)
+ if sys.platform == 'win32':
+ inp = inp.replace('\\', '/')
+ if inp not in java_cmd:
+ inp = build_root + '/' + inp
+ java_cmd.remove(inp)
+ if ext in ('.java', '.jsrc'):
+ if origin_inp in dll_cmd:
+ inp = origin_inp
+ elif os.path.join(build_root, inp) in dll_cmd:
+ inp = os.path.join(build_root, inp)
+ if sys.platform == 'win32':
+ inp = inp.replace('\\', '/')
+ dll_cmd.remove(inp)
+ java_cmd.insert(java_cmd.index(dll_out), java_out)
+ java_cmd.remove(dll_out)
+ subprocess.check_call(java_cmd)
+ subprocess.check_call(dll_cmd)
+
+
+if __name__ == '__main__':
+ just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_java_codenav_index.py b/build/scripts/build_java_codenav_index.py
new file mode 100644
index 0000000000..d7ac4f3213
--- /dev/null
+++ b/build/scripts/build_java_codenav_index.py
@@ -0,0 +1,49 @@
+import sys
+import re
+import os
+import subprocess
+
+FAKE_ARCADIA_ROOT = 'fake_arcadia_root'
+FAKE_BUILD_ROOT = 'fake_build_root'
+
+
+def modify_sources_file(origin, target, source_roots_map):
+ def _cut_source_root(src):
+ for pref, fake_root in source_roots_map.items():
+ if src.startswith(pref):
+ return os.path.join(fake_root, os.path.relpath(src, pref))
+ return src
+
+ with open(origin) as o:
+ srcs = [i for line in o for i in re.split('\\s+', line) if i]
+ new_srcs = map(_cut_source_root, srcs)
+ with open(target, 'w') as t:
+ t.write(' '.join(new_srcs))
+
+
+def just_do_it(argv):
+ corpus_name, build_root, arcadia_root, sources_file, javac_tail_cmd = argv[0], argv[1], argv[2], argv[3], argv[4:]
+ fake_arcadia_root = os.path.join(build_root, FAKE_ARCADIA_ROOT)
+ fake_build_root = os.path.join(build_root, FAKE_BUILD_ROOT)
+ fake_source_roots = {
+ arcadia_root: fake_arcadia_root,
+ build_root: fake_build_root,
+ }
+ modify_sources_file(sources_file, os.path.join(os.path.dirname(sources_file), '_' + os.path.basename(sources_file)), fake_source_roots)
+ kindex_data_root = '{}/kindex'.format(os.path.join(build_root, os.path.dirname(corpus_name)))
+ if not os.path.exists(kindex_data_root):
+ os.makedirs(kindex_data_root)
+ env = os.environ.copy()
+ env['KYTHE_ROOT_DIRECTORY'] = build_root
+ env['KYTHE_OUTPUT_DIRECTORY'] = kindex_data_root
+ env['KYTHE_CORPUS'] = os.path.relpath(corpus_name, build_root)
+ os.symlink(arcadia_root, fake_arcadia_root)
+ os.symlink(build_root, fake_build_root)
+ try:
+ subprocess.check_call(javac_tail_cmd, env=env)
+ finally:
+ os.unlink(fake_arcadia_root)
+ os.unlink(fake_build_root)
+
+if __name__ == '__main__':
+ just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_java_with_error_prone.py b/build/scripts/build_java_with_error_prone.py
new file mode 100644
index 0000000000..910443552e
--- /dev/null
+++ b/build/scripts/build_java_with_error_prone.py
@@ -0,0 +1,36 @@
+import sys
+import os
+
+ERROR_PRONE_FLAGS = [
+ '-Xep:FunctionalInterfaceMethodChanged:WARN',
+ '-Xep:ReturnValueIgnored:WARN',
+]
+
+JAVA10_EXPORTS = [
+ '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED'
+]
+
+
+def just_do_it(argv):
+ java, error_prone_tool, javac_cmd = argv[0], argv[1], argv[2:]
+ if java.endswith('javac') or java.endswith('javac.exe'):
+ for f in javac_cmd:
+ if f.startswith('-Xep'):
+ ERROR_PRONE_FLAGS.append(f)
+ for f in ERROR_PRONE_FLAGS:
+ if f in javac_cmd:
+ javac_cmd.remove(f)
+ os.execv(java, [java] + JAVA10_EXPORTS + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + javac_cmd)
+ else:
+ os.execv(java, [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + ERROR_PRONE_FLAGS + javac_cmd)
+
+
+if __name__ == '__main__':
+ just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_java_with_error_prone2.py b/build/scripts/build_java_with_error_prone2.py
new file mode 100644
index 0000000000..ddf1ccbfc1
--- /dev/null
+++ b/build/scripts/build_java_with_error_prone2.py
@@ -0,0 +1,87 @@
+import sys
+import os
+import re
+import subprocess
+import platform
+
+
+ERROR_PRONE_FLAGS = [
+ '-Xep:FunctionalInterfaceMethodChanged:WARN',
+ '-Xep:ReturnValueIgnored:WARN',
+]
+
+JAVA10_EXPORTS = [
+ '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
+ '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED',
+]
+
+
+def get_java_version(exe):
+ p = subprocess.Popen([exe, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ for line in (out or '').strip().split("\n") + (err or '').strip().split("\n"):
+ m = re.match(r'java version "(.+)"', line)
+ if m:
+ parts = m.groups()[0].split(".")
+ return parts[1] if parts[0] == "1" else parts[0]
+ m = re.match(r'openjdk version "(\d+).*"', line)
+ if m:
+ parts = m.groups()[0].split(".")
+ return parts[0]
+ return None
+
+
+def get_classpath(cmd):
+ for i, part in enumerate(cmd):
+ if part == '-classpath':
+ i += 1
+ if i < len(cmd):
+ return cmd[i]
+ else:
+ return None
+ return None
+
+
+def parse_args(argv):
+ parsed = []
+ for i in range(len(argv)):
+ if not argv[i].startswith('-'):
+ parsed.append(argv[i])
+ if len(parsed) >= 3:
+ break
+ return parsed + [argv[i + 1:]]
+
+
+def just_do_it(argv):
+ java, javac, error_prone_tool, javac_cmd = parse_args(argv)
+ ver = get_java_version(java)
+ if not ver:
+ raise Exception("Can't determine java version")
+ if int(ver) >= 10:
+ for f in javac_cmd:
+ if f.startswith('-Xep'):
+ ERROR_PRONE_FLAGS.append(f)
+ for f in ERROR_PRONE_FLAGS:
+ if f in javac_cmd:
+ javac_cmd.remove(f)
+ if '-processor' in javac_cmd:
+ classpath = get_classpath(javac_cmd)
+ if classpath:
+ error_prone_tool = error_prone_tool + os.pathsep + classpath
+ cmd = [javac] + JAVA10_EXPORTS + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + javac_cmd
+ else:
+ cmd = [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + ERROR_PRONE_FLAGS + javac_cmd
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(cmd).wait())
+ else:
+ os.execv(cmd[0], cmd)
+
+
+if __name__ == '__main__':
+ just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_mn.py b/build/scripts/build_mn.py
new file mode 100755
index 0000000000..5bb03c247c
--- /dev/null
+++ b/build/scripts/build_mn.py
@@ -0,0 +1,330 @@
+#!/usr/bin/env python
+# Ymake MatrixNet support
+
+import sys
+import os
+import shutil
+import re
+import subprocess
+
+
+def get_value(val):
+ dct = val.split('=', 1)
+ if len(dct) > 1:
+ return dct[1]
+ return ''
+
+
+class BuildMnBase(object):
+ def Run(self, mninfo, mnname, mnrankingSuffix, mncppPath, check=False, ptr=False, multi=False):
+ self.mninfo = mninfo
+ self.mnname = mnname
+ self.mnrankingSuffix = mnrankingSuffix
+ self.mncppPath = mncppPath
+ self.check = check
+ self.ptr = ptr
+ self.multi = multi
+ dataprefix = "MN_External_"
+ mninfoName = os.path.basename(self.mninfo)
+ data = dataprefix + mnname
+ datasize = data + "Size"
+
+ if self.multi:
+ if self.ptr:
+ mntype = "const NMatrixnet::TMnMultiCategPtr"
+ mnload = "(new NMatrixnet::TMnMultiCateg( {1}, {2}, \"{0}\"))".format(mninfoName, data, datasize)
+ else:
+ mntype = "const NMatrixnet::TMnMultiCateg"
+ mnload = "({1}, {2}, \"{0}\")".format(mninfoName, data, datasize)
+ else:
+ if self.ptr:
+ mntype = "const NMatrixnet::TMnSsePtr"
+ mnload = "(new NMatrixnet::TMnSseInfo({1}, {2}, \"{0}\"))".format(mninfoName, data, datasize)
+ else:
+ mntype = "const NMatrixnet::TMnSseInfo"
+ mnload = "({1}, {2}, \"{0}\")".format(mninfoName, data, datasize)
+
+ if self.check:
+ self.CheckMn()
+
+ mncpptmpPath = self.mncppPath + ".tmp"
+ mncpptmp = open(mncpptmpPath, 'w')
+
+ if self.multi:
+ mncpptmp.write("#include <kernel/matrixnet/mn_multi_categ.h>\n")
+ else:
+ mncpptmp.write("#include <kernel/matrixnet/mn_sse.h>\n")
+
+ rodatapath = os.path.dirname(self.mncppPath) + "/" + dataprefix + self.mnname + ".rodata"
+ mncpptmp.write("namespace{\n")
+ mncpptmp.write(" extern \"C\" {\n")
+ mncpptmp.write(" extern const unsigned char {1}{0}[];\n".format(self.mnname, dataprefix))
+ mncpptmp.write(" extern const ui32 {1}{0}Size;\n".format(self.mnname, dataprefix))
+ mncpptmp.write(" }\n")
+ mncpptmp.write("}\n")
+ archiverCall = subprocess.Popen([self.archiver, "-q", "-p", "-o", rodatapath, self.mninfo], stdout=None, stderr=subprocess.PIPE)
+ archiverCall.wait()
+ mncpptmp.write("extern {0} {1};\n".format(mntype, self.mnname))
+ mncpptmp.write("{0} {1}{2};".format(mntype, self.mnname, mnload))
+ mncpptmp.close()
+ shutil.move(mncpptmpPath, self.mncppPath)
+
+ def CheckMn(self):
+ if not self.fml_unused_tool:
+ print >>sys.stderr, "fml_unused_tool undefined!"
+ failed_msg = "fml_unused_tool failed: {0} -A {1} -e -r {2}".format(self.fml_unused_tool, self.SrcRoot, self.mninfo)
+ assert not subprocess.call([self.fml_unused_tool, "-A", self.SrcRoot, "-e", "-r", self.mninfo]), failed_msg
+
+
+class BuildMn(BuildMnBase):
+ def Run(self, argv):
+ if len(argv) < 6:
+ print >>sys.stderr, "BuildMn.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <mnrankingSuffix> <cppOutput> [params...])"
+ sys.exit(1)
+
+ self.SrcRoot = argv[0]
+ self.archiver = argv[1]
+
+ mninfo = argv[2]
+ mnname = argv[3]
+ mnrankingSuffix = argv[4]
+ mncppPath = argv[5]
+ check = False
+ ptr = False
+ multi = False
+ self.fml_unused_tool = ''
+ for param in argv[6:]:
+ if param == "CHECK":
+ check = True
+ elif param == "PTR":
+ ptr = True
+ elif param == "MULTI":
+ multi = True
+ elif param.startswith('fml_tool='):
+ self.fml_unused_tool = get_value(param)
+ else:
+ print >>sys.stdout, "Unknown param: {0}".format(param)
+ super(BuildMn, self).Run(mninfo, mnname, mnrankingSuffix, mncppPath, check=check, ptr=ptr, multi=multi)
+
+
+class BuildMns(BuildMnBase):
+ def InitBase(self, listname, mnrankingSuffix):
+ self.autogen = '// DO NOT EDIT THIS FILE DIRECTLY, AUTOGENERATED!\n'
+ self.mnrankingSuffix = mnrankingSuffix
+ self.mnlistname = listname + mnrankingSuffix
+ self.mnlistelem = "const NMatrixnet::TMnSsePtr*"
+ mnlisttype = "TMap< TString, {0} >".format(self.mnlistelem)
+ self.mnlist = "const {0} {1}".format(mnlisttype, self.mnlistname)
+
+ self.mnmultilistname = "{0}{1}Multi".format(listname, self.mnrankingSuffix)
+ self.mnmultilistelem = "const NMatrixnet::TMnMultiCategPtr*"
+ mnmultilisttype = "TMap< TString, {0} >".format(self.mnmultilistelem)
+ self.mnmultilist = "const {0} {1}".format(mnmultilisttype, self.mnmultilistname)
+
+ def InitForAll(self, argv):
+ if len(argv) < 8:
+ print >>sys.stderr, "BuildMns.InitForAll(<ARCADIA_ROOT> <BINDIR> <archiver> <listname> <mnranking_suffix> <hdrfile> <srcfile> <mninfos> [fml_tool=<fml_unused_tool> CHECK])"
+ sys.exit(1)
+
+ bmns_args = []
+ self.check = False
+ self.fml_unused_tool = ''
+ for arg in argv:
+ if arg == "CHECK":
+ self.check = True
+ elif arg.startswith('fml_tool='):
+ self.fml_unused_tool = get_value(arg)
+ else:
+ bmns_args.append(arg)
+
+ self.SrcRoot = bmns_args[0]
+ self.BINDIR = bmns_args[1]
+ self.archiver = bmns_args[2]
+ self.listname = bmns_args[3]
+ self.mnrankingSuffix = get_value(bmns_args[4])
+ self.hdrfile = bmns_args[5]
+ self.srcfile = bmns_args[6]
+ self.mninfos = bmns_args[7:]
+
+ self.InitBase(self.listname, self.mnrankingSuffix)
+
+ def InitForHeader(self, argv):
+ if len(argv) < 4:
+ print >>sys.stderr, "BuildMns.InitForHeader(<listname> <rankingSuffix> <hdrfile> <mninfos...>)"
+ sys.exit(1)
+
+ self.listname = argv[0]
+ self.mnrankingSuffix = get_value(argv[1])
+ self.hdrfile = argv[2]
+ self.mninfos = argv[3:]
+
+ self.InitBase(self.listname, self.mnrankingSuffix)
+
+ def InitForCpp(self, argv):
+ if len(argv) < 5:
+ print >>sys.stderr, "BuildMns.InitForCpp(<listname> <rankingSuffix> <hdrfile> <srcfile> <mninfos...>)"
+ sys.exit(1)
+
+ self.listname = argv[0]
+ self.mnrankingSuffix = get_value(argv[1])
+ self.hdrfile = argv[2]
+ self.srcfile = argv[3]
+ self.mninfos = argv[4:]
+
+ self.InitBase(self.listname, self.mnrankingSuffix)
+
+ def InitForFiles(self, argv):
+ if len(argv) < 7:
+ print >>sys.stderr, "BuildMns.InitForFiles(<ARCADIA_ROOT> <BINDIR> <archiver> <fml_unused_tool> <listname> <rankingSuffix> <mninfos...> [CHECK])"
+ sys.exit(1)
+
+ bmns_args = []
+ self.check = False
+ self.fml_unused_tool = ''
+ for arg in argv:
+ if arg == "CHECK":
+ self.check = True
+ elif arg.startswith('fml_tool='):
+ self.fml_unused_tool = get_value(arg)
+ else:
+ bmns_args.append(arg)
+
+ self.SrcRoot = bmns_args[0]
+ self.BINDIR = bmns_args[1]
+ self.archiver = bmns_args[2]
+ self.listname = bmns_args[3]
+ self.mnrankingSuffix = get_value(bmns_args[4])
+ self.mninfos = bmns_args[5:]
+
+ def BuildMnsHeader(self):
+ if self.mninfos:
+ self.mninfos = sorted(set(self.mninfos))
+
+ tmpHdrPath = self.hdrfile + ".tmp"
+ tmpHdrFile = open(tmpHdrPath, 'w')
+
+ tmpHdrFile.write(self.autogen)
+ tmpHdrFile.write("#include <kernel/matrixnet/mn_sse.h>\n")
+ tmpHdrFile.write("#include <kernel/matrixnet/mn_multi_categ.h>\n\n")
+ tmpHdrFile.write("extern {0};\n".format(self.mnlist))
+ tmpHdrFile.write("extern {0};\n".format(self.mnmultilist))
+
+ for item in self.mninfos:
+ mnfilename = os.path.basename(item)
+ mnfilename, ext = os.path.splitext(mnfilename)
+
+ mnname = re.sub("[^-a-zA-Z0-9_]", "_", mnfilename)
+
+ if ext == ".info":
+ mnname = "staticMn{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
+ tmpHdrFile.write("extern const NMatrixnet::TMnSsePtr {0};\n".format(mnname))
+ elif ext == ".mnmc":
+ mnname = "staticMnMulti{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
+ tmpHdrFile.write("extern const NMatrixnet::TMnMultiCategPtr {0};\n".format(mnname))
+
+ tmpHdrFile.close()
+ shutil.move(tmpHdrPath, self.hdrfile)
+
+ def BuildMnFiles(self):
+ for item in self.mninfos:
+ mnfilename = os.path.basename(item)
+ mnfilename, ext = os.path.splitext(mnfilename)
+
+ mnname = re.sub("[^-a-zA-Z0-9_]", "_", mnfilename)
+
+ if ext == ".info":
+ mnname = "staticMn{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
+ super(BuildMns, self).Run(item, mnname, self.mnrankingSuffix, self.BINDIR + "/mn.{0}.cpp".format(mnname), check=self.check, ptr=True, multi=False)
+ elif ext == ".mnmc":
+ mnname = "staticMnMulti{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
+ # BUILD_MN_PTR_MULTI
+ super(BuildMns, self).Run(item, mnname, self.mnrankingSuffix, self.BINDIR + "/mnmulti.{0}.cpp".format(mnname), check=False, ptr=True, multi=True)
+
+ def BuildMnsCpp(self):
+ if self.mninfos:
+ self.mninfos = sorted(set(self.mninfos))
+
+ tmpSrcPath = self.srcfile + ".tmp"
+ tmpSrcFile = open(tmpSrcPath, 'w')
+ hdrrel = os.path.basename(self.hdrfile)
+
+ mnnames = []
+ mnmultinames = []
+ for item in self.mninfos:
+ mnfilename = os.path.basename(item)
+ mnfilename, ext = os.path.splitext(mnfilename)
+
+ if ext == ".info":
+ mnnames.append(mnfilename)
+ elif ext == ".mnmc":
+ mnmultinames.append(mnfilename)
+
+ tmpSrcFile.write(self.autogen)
+ tmpSrcFile.write("#include \"{0}\"\n\n".format(hdrrel))
+
+ if mnnames:
+ mndata = self.mnlistname + "_data"
+ tmpSrcFile.write("static const std::pair< TString, {0} > {1}[] = {{\n".format(self.mnlistelem, mndata))
+ for item in mnnames:
+ mnname = re.sub("[^-a-zA-Z0-9_]", "_", item)
+ tmpSrcFile.write(" std::make_pair(TString(\"{0}\"), &staticMn{1}{2}Ptr),\n".format(item, self.mnrankingSuffix, mnname))
+ tmpSrcFile.write("};\n")
+ tmpSrcFile.write("{0}({1},{1} + sizeof({1}) / sizeof({1}[0]));\n\n".format(self.mnlist, mndata))
+ else:
+ tmpSrcFile.write("{0};\n\n".format(self.mnlist))
+
+ if mnmultinames:
+ mnmultidata = self.mnmultilistname + "_data"
+ tmpSrcFile.write("static const std::pair< TString, {0} > {1}[] = {{\n".format(self.mnmultilistelem, mnmultidata))
+ for item in mnmultinames:
+ mnname = re.sub("[^-a-zA-Z0-9_]", "_", item)
+ tmpSrcFile.write(" std::make_pair(TString(\"{0}\"), &staticMnMulti{1}{2}Ptr),\n".format(item, self.mnrankingSuffix, mnname))
+ tmpSrcFile.write("};\n")
+ tmpSrcFile.write("{0}({1},{1} + sizeof({1}) / sizeof({1}[0]));\n".format(self.mnmultilist, mnmultidata))
+ else:
+ tmpSrcFile.write("{0};\n".format(self.mnmultilist))
+
+ tmpSrcFile.close()
+ shutil.move(tmpSrcPath, self.srcfile)
+
+
+def BuildMnsAllF(argv):
+ bldMns = BuildMns()
+ bldMns.InitForAll(argv)
+ bldMns.BuildMnsCpp()
+ bldMns.BuildMnsHeader()
+ bldMns.BuildMnFiles()
+
+
+def BuildMnsCppF(argv):
+ bldMns = BuildMns()
+ bldMns.InitForCpp(argv)
+ bldMns.BuildMnsCpp()
+
+
+def BuildMnsHeaderF(argv):
+ bldMns = BuildMns()
+ bldMns.InitForHeader(argv)
+ bldMns.BuildMnsHeader()
+
+
+def BuildMnsFilesF(argv):
+ bldMns = BuildMns()
+ bldMns.InitForFiles(argv)
+ bldMns.BuildMnFiles()
+
+
+def BuildMnF(argv):
+ bldMn = BuildMn()
+ bldMn.Run(argv)
+
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ print >>sys.stderr, "Usage: build_mn.py <funcName> <args...>"
+ sys.exit(1)
+
+ if (sys.argv[2:]):
+ globals()[sys.argv[1]](sys.argv[2:])
+ else:
+ globals()[sys.argv[1]]()
diff --git a/build/scripts/build_pln_header.py b/build/scripts/build_pln_header.py
new file mode 100755
index 0000000000..c73693f444
--- /dev/null
+++ b/build/scripts/build_pln_header.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+import sys
+import os
+
+
+def BuildPlnHeader():
+ if len(sys.argv) < 2:
+ print >>sys.stderr, "Usage: build_pln_header.py <absolute/path/to/OutFile>"
+ sys.exit(1)
+
+ print >>sys.stdout, "Build Pln Header..."
+ outPath = sys.argv[1]
+ tmpPath = outPath + '.tmp'
+ tmpFile = open(tmpPath, 'w')
+
+ tmpFile.write('#include <library/cpp/sse/sse.h>\n')
+ tmpFile.write('#include <kernel/relevfml/relev_fml.h>\n')
+ for path in sys.argv[2:]:
+ name = os.path.basename(path).split(".")[0] # name without extensions
+ tmpFile.write('\nextern SRelevanceFormula fml{0};\n'.format(name))
+ tmpFile.write('float {0}(const float* f);\n'.format(name))
+ tmpFile.write('void {0}SSE(const float* const* factors, float* result);\n'.format(name))
+ tmpFile.close()
+ try:
+ os.remove(outPath)
+ except:
+ pass
+ try:
+ os.rename(tmpPath, outPath)
+ except:
+ print >>sys.stdout, 'Error: Failed to rename ' + tmpPath + ' to ' + outPath
+
+if __name__ == '__main__':
+ BuildPlnHeader()
diff --git a/build/scripts/c_templates/README.md b/build/scripts/c_templates/README.md
new file mode 100644
index 0000000000..96265c7fec
--- /dev/null
+++ b/build/scripts/c_templates/README.md
@@ -0,0 +1,3 @@
+### Usage
+
+Не используйте эту библиотеку напрямую. Следует пользоваться `library/cpp/svnversion/svnversion.h`.
diff --git a/build/scripts/c_templates/ya.make b/build/scripts/c_templates/ya.make
new file mode 100644
index 0000000000..b395e11021
--- /dev/null
+++ b/build/scripts/c_templates/ya.make
@@ -0,0 +1,7 @@
+OWNER(g:ymake)
+
+LIBRARY(dummy-vcs)
+NO_PLATFORM()
+
+SRCS(svn_interface.c)
+END()
diff --git a/build/scripts/cat.py b/build/scripts/cat.py
new file mode 100755
index 0000000000..0c3f73d96f
--- /dev/null
+++ b/build/scripts/cat.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+import sys
+from shutil import copyfileobj as copy
+import os.path
+
+if __name__ == '__main__':
+ for filename in sys.argv[1:] or ["-"]:
+ if filename == "-":
+ copy(sys.stdin, sys.stdout)
+ else:
+ if os.path.exists(filename):
+ with open(filename, 'rb') as file:
+ copy(file, sys.stdout)
+ else:
+ sys.stderr.write('cat.py: {0}: No such file or directory\n'.format(filename))
diff --git a/build/scripts/cgo1_wrapper.py b/build/scripts/cgo1_wrapper.py
new file mode 100644
index 0000000000..986082f7e9
--- /dev/null
+++ b/build/scripts/cgo1_wrapper.py
@@ -0,0 +1,45 @@
+import argparse
+import shutil
+import subprocess
+import sys
+
+
+CGO1_SUFFIX='.cgo1.go'
+
+
+def call(cmd, cwd, env=None):
+ # sys.stderr.write('{}\n'.format(' '.join(cmd)))
+ return subprocess.call(cmd, stdin=None, stderr=sys.stderr, stdout=sys.stdout, cwd=cwd, env=env)
+
+
+def process_file(source_root, source_prefix, build_root, build_prefix, src_path, comment_prefix):
+ dst_path = '{}.tmp'.format(src_path)
+ with open(src_path, 'r') as src_file, open(dst_path, 'w') as dst_file:
+ for line in src_file:
+ if line.startswith(comment_prefix):
+ dst_file.write(line.replace(source_root, source_prefix).replace(build_root, build_prefix))
+ else:
+ dst_file.write(line)
+ shutil.move(dst_path, src_path)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-prefix', default='__ARCADIA_BUILD_ROOT_PREFIX__')
+ parser.add_argument('--build-root', required=True)
+ parser.add_argument('--cgo1-files', nargs='+', required=True)
+ parser.add_argument('--cgo2-files', nargs='+', required=True)
+ parser.add_argument('--source-prefix', default='__ARCADIA_SOURCE_ROOT_PREFIX__')
+ parser.add_argument('--source-root', required=True)
+ parser.add_argument('cgo1_cmd', nargs='*')
+ args = parser.parse_args()
+
+ exit_code = call(args.cgo1_cmd, args.source_root)
+ if exit_code != 0:
+ sys.exit(exit_code)
+
+ for src_path in args.cgo1_files:
+ process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '//')
+
+ for src_path in args.cgo2_files:
+ process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '#line')
diff --git a/build/scripts/check_config_h.py b/build/scripts/check_config_h.py
new file mode 100644
index 0000000000..07bc12e230
--- /dev/null
+++ b/build/scripts/check_config_h.py
@@ -0,0 +1,89 @@
+import sys
+
+data = """
+#if defined(SIZEOF_LONG)
+static_assert(sizeof(long) == SIZEOF_LONG, "fixme 1");
+#endif
+
+#if defined(SIZEOF_PTHREAD_T)
+#include <pthread.h>
+
+static_assert(sizeof(pthread_t) == SIZEOF_PTHREAD_T, "fixme 2");
+#endif
+
+#if defined(SIZEOF_SIZE_T)
+#include <stddef.h>
+
+static_assert(sizeof(size_t) == SIZEOF_SIZE_T, "fixme 3");
+#endif
+
+#if defined(SIZEOF_TIME_T)
+#include <time.h>
+
+static_assert(sizeof(time_t) == SIZEOF_TIME_T, "fixme 4");
+#endif
+
+#if defined(SIZEOF_UINTPTR_T)
+#include <stdint.h>
+
+static_assert(sizeof(uintptr_t) == SIZEOF_UINTPTR_T, "fixme 5");
+#endif
+
+#if defined(SIZEOF_VOID_P)
+static_assert(sizeof(void*) == SIZEOF_VOID_P, "fixme 6");
+#endif
+
+#if defined(SIZEOF_FPOS_T)
+#include <stdio.h>
+
+static_assert(sizeof(fpos_t) == SIZEOF_FPOS_T, "fixme 7");
+#endif
+
+#if defined(SIZEOF_DOUBLE)
+static_assert(sizeof(double) == SIZEOF_DOUBLE, "fixme 8");
+#endif
+
+#if defined(SIZEOF_LONG_DOUBLE)
+static_assert(sizeof(long double) == SIZEOF_LONG_DOUBLE, "fixme 9");
+#endif
+
+#if defined(SIZEOF_FLOAT)
+static_assert(sizeof(float) == SIZEOF_FLOAT, "fixme 10");
+#endif
+
+#if defined(SIZEOF_INT)
+static_assert(sizeof(int) == SIZEOF_INT, "fixme 11");
+#endif
+
+#if defined(SIZEOF_LONG_LONG)
+static_assert(sizeof(long long) == SIZEOF_LONG_LONG, "fixme 12");
+#endif
+
+#if defined(SIZEOF_OFF_T)
+#include <stdio.h>
+
+static_assert(sizeof(off_t) == SIZEOF_OFF_T, "fixme 13");
+#endif
+
+#if defined(SIZEOF_PID_T)
+#include <unistd.h>
+
+static_assert(sizeof(pid_t) == SIZEOF_PID_T, "fixme 14");
+#endif
+
+#if defined(SIZEOF_SHORT)
+static_assert(sizeof(short) == SIZEOF_SHORT, "fixme 15");
+#endif
+
+#if defined(SIZEOF_WCHAR_T)
+static_assert(sizeof(wchar_t) == SIZEOF_WCHAR_T, "fixme 16");
+#endif
+
+#if defined(SIZEOF__BOOL)
+//TODO
+#endif
+"""
+if __name__ == '__main__':
+ with open(sys.argv[2], 'w') as f:
+ f.write('#include <' + sys.argv[1] + '>\n\n')
+ f.write(data)
diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py
new file mode 100644
index 0000000000..c0c23b490a
--- /dev/null
+++ b/build/scripts/clang_tidy.py
@@ -0,0 +1,185 @@
+import argparse
+import json
+import os
+import re
+import shutil
+import sys
+
+import subprocess
+
+import yaml
+
+
+def setup_script(args):
+ global tidy_config_validation
+ sys.path.append(os.path.dirname(args.config_validation_script))
+ import tidy_config_validation
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--testing-src", required=True)
+ parser.add_argument("--clang-tidy-bin", required=True)
+ parser.add_argument("--config-validation-script", required=True)
+ parser.add_argument("--ymake-python", required=True)
+ parser.add_argument("--tidy-json", required=True)
+ parser.add_argument("--source-root", required=True)
+ parser.add_argument("--build-root", required=True)
+ parser.add_argument("--default-config-file", required=True)
+ parser.add_argument("--project-config-file", required=True)
+ parser.add_argument("--export-fixes", required=True)
+ parser.add_argument("--checks", required=False, default="")
+ parser.add_argument("--header-filter", required=False, default=None)
+ return parser.parse_known_args()
+
+
+def generate_compilation_database(clang_cmd, source_root, filename, path):
+ compile_database = [
+ {
+ "file": filename,
+ "command": subprocess.list2cmdline(clang_cmd),
+ "directory": source_root,
+ }
+ ]
+ compilation_database_json = os.path.join(path, "compile_commands.json")
+ with open(compilation_database_json, "w") as afile:
+ json.dump(compile_database, afile)
+ return compilation_database_json
+
+
+def load_profile(path):
+ if os.path.exists(path):
+ files = os.listdir(path)
+ if len(files) == 1:
+ with open(os.path.join(path, files[0])) as afile:
+ return json.load(afile)["profile"]
+ elif len(files) > 1:
+ return {
+ "error": "found several profile files: {}".format(files),
+ }
+ return {
+ "error": "profile file is missing",
+ }
+
+
+def load_fixes(path):
+ if os.path.exists(path):
+ with open(path, 'r') as afile:
+ return afile.read()
+ else:
+ return ""
+
+
+def is_generated(testing_src, build_root):
+ return testing_src.startswith(build_root)
+
+
+def generate_outputs(output_json):
+ output_obj = os.path.splitext(output_json)[0] + ".o"
+ open(output_obj, "w").close()
+ open(output_json, "w").close()
+
+
+def filter_configs(result_config, filtered_config):
+ with open(result_config, 'r') as afile:
+ input_config = yaml.safe_load(afile)
+ result_config = tidy_config_validation.filter_config(input_config)
+ with open(filtered_config, 'w') as afile:
+ yaml.safe_dump(result_config, afile)
+
+
+def filter_cmd(cmd):
+ skip = True
+
+ for x in cmd:
+ if not skip:
+ yield x
+
+ if '/wrapcc.py' in x:
+ skip = False
+
+
+def main():
+ args, clang_cmd = parse_args()
+ if '/wrapcc.py' in str(clang_cmd):
+ clang_cmd = list(filter_cmd(clang_cmd))
+ setup_script(args)
+ clang_tidy_bin = args.clang_tidy_bin
+ output_json = args.tidy_json
+ generate_outputs(output_json)
+ if is_generated(args.testing_src, args.build_root):
+ return
+ if args.header_filter is None:
+ # .pb.h files will be excluded because they are not in source_root
+ header_filter = r"^" + re.escape(os.path.dirname(args.testing_src)) + r".*"
+ else:
+ header_filter = r"^(" + args.header_filter + r").*"
+
+ def ensure_clean_dir(path):
+ path = os.path.join(args.build_root, path)
+ if os.path.exists(path):
+ shutil.rmtree(path)
+ os.makedirs(path)
+ return path
+
+ profile_tmpdir = ensure_clean_dir("profile_tmpdir")
+ db_tmpdir = ensure_clean_dir("db_tmpdir")
+ fixes_file = "fixes.txt"
+ config_dir = ensure_clean_dir("config_dir")
+ result_config_file = args.default_config_file
+ if args.project_config_file != args.default_config_file:
+ result_config = os.path.join(config_dir, "result_tidy_config.yaml")
+ filtered_config = os.path.join(config_dir, "filtered_tidy_config.yaml")
+ filter_configs(args.project_config_file, filtered_config)
+ result_config_file = tidy_config_validation.merge_tidy_configs(
+ base_config_path=args.default_config_file,
+ additional_config_path=filtered_config,
+ result_config_path=result_config,
+ )
+ compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir)
+
+ cmd = [
+ clang_tidy_bin,
+ args.testing_src,
+ "-p",
+ compile_command_path,
+ "--warnings-as-errors",
+ "*",
+ "--config-file",
+ result_config_file,
+ "--header-filter",
+ header_filter,
+ "--use-color",
+ "--enable-check-profile",
+ "--store-check-profile={}".format(profile_tmpdir),
+ ]
+ if args.export_fixes == "yes":
+ cmd += ["--export-fixes", fixes_file]
+
+ if args.checks:
+ cmd += ["--checks", args.checks]
+
+ print("cmd: {}".format(' '.join(cmd)))
+ res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = res.communicate()
+ out = out.replace(args.source_root, "$(SOURCE_ROOT)")
+ profile = load_profile(profile_tmpdir)
+ testing_src = os.path.relpath(args.testing_src, args.source_root)
+ tidy_fixes = load_fixes(fixes_file)
+
+ with open(output_json, "wb") as afile:
+ json.dump(
+ {
+ "file": testing_src,
+ "exit_code": res.returncode,
+ "profile": profile,
+ "stderr": err,
+ "stdout": out,
+ "fixes": tidy_fixes,
+ },
+ afile,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build/scripts/clang_tidy_arch.py b/build/scripts/clang_tidy_arch.py
new file mode 100644
index 0000000000..7caf623a3d
--- /dev/null
+++ b/build/scripts/clang_tidy_arch.py
@@ -0,0 +1,33 @@
+import os
+import argparse
+import json
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--output-file")
+ parser.add_argument("--build-root")
+ parser.add_argument("--source-root")
+ return parser.parse_known_args()
+
+
+def main():
+ args, unknown_args = parse_args()
+ inputs = unknown_args
+ result_json = {}
+ for inp in inputs:
+ if os.path.exists(inp) and inp.endswith("tidyjson"):
+ with open(inp, 'r') as afile:
+ file_content = afile.read().strip()
+ if not file_content:
+ continue
+ errors = json.loads(file_content)
+ testing_src = errors["file"]
+ result_json[testing_src] = errors
+
+ with open(args.output_file, 'w') as afile:
+ json.dump(result_json, afile, indent=4) # TODO remove indent
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build/scripts/clang_wrapper.py b/build/scripts/clang_wrapper.py
new file mode 100644
index 0000000000..af3869f789
--- /dev/null
+++ b/build/scripts/clang_wrapper.py
@@ -0,0 +1,53 @@
+import subprocess
+import sys
+
+
+def fix(s):
+ # disable dbg DEVTOOLS-2744
+ if s == '-g':
+ return None
+ if s == '/Z7' or s == '/Od' or s == '/Ob0' or s == '/D_DEBUG':
+ return None
+
+ # disable sanitizers for generated code
+ if s.startswith('-fsanitize') or s == '-Dmemory_sanitizer_enabled' or s.startswith('-fsanitize-blacklist'):
+ return None
+
+ # strip gcc toolchain flags (appear when crosscompiling)
+ if s.startswith('-fabi-version'):
+ return None
+
+ # remove arguments unknown to clang-cl
+ if s == '-fcase-insensitive-paths': # or s == '-fno-lto': # DEVTOOLSSUPPORT-3966
+ return None
+
+ # Paths under .ya/tools/v3/.../msvc/include are divided with '\'
+ return s.replace('\\', '/')
+
+
+def fix_path(p):
+ try:
+ i = p.rfind('/bin/clang')
+ p = p[:i] + '/bin/clang-cl'
+ except ValueError:
+ pass
+ return p
+
+
+if __name__ == '__main__':
+ is_on_win = sys.argv[1] == 'yes'
+ path = sys.argv[2]
+ args = filter(None, [fix(s) for s in sys.argv[3:]])
+ if is_on_win:
+ path = fix_path(path)
+ try:
+ i = args.index('-emit-llvm')
+ args[i:i+1] = ['-Xclang', '-emit-llvm']
+ except ValueError:
+ pass
+ args.append('-fms-compatibility-version=19')
+
+ cmd = [path] + args
+
+ rc = subprocess.call(cmd, shell=False, stderr=sys.stderr, stdout=sys.stdout)
+ sys.exit(rc)
diff --git a/build/scripts/collect_java_srcs.py b/build/scripts/collect_java_srcs.py
new file mode 100644
index 0000000000..170002520a
--- /dev/null
+++ b/build/scripts/collect_java_srcs.py
@@ -0,0 +1,51 @@
+import os
+import sys
+import contextlib
+import tarfile
+import zipfile
+
+
+if __name__ == '__main__':
+ build_root = sys.argv[1]
+ root = os.path.normpath(sys.argv[2])
+ dest = os.path.normpath(sys.argv[3])
+ srcs = sys.argv[4:]
+
+ for src in srcs:
+ src = os.path.normpath(src)
+ if src.endswith('.java') or src.endswith('.kt'):
+ src_rel_path = os.path.relpath(src, root)
+
+ if os.path.join(root, src_rel_path) == src:
+ # Inside root
+ dst = os.path.join(dest, src_rel_path)
+
+ else:
+ # Outside root
+ print>>sys.stderr, 'External src file "{}" is outside of srcdir {}, ignore'.format(
+ os.path.relpath(src, build_root),
+ os.path.relpath(root, build_root),
+ )
+ continue
+
+ if os.path.exists(dst):
+ print>>sys.stderr, 'Duplicate external src file {}, choice is undefined'.format(
+ os.path.relpath(dst, root)
+ )
+
+ else:
+ destdir = os.path.dirname(dst)
+ if destdir and not os.path.exists(destdir):
+ os.makedirs(destdir)
+ os.rename(src, dst)
+
+ elif src.endswith('.jsr'):
+ with contextlib.closing(tarfile.open(src, 'r')) as tf:
+ tf.extractall(dst)
+
+ elif src.endswith('-sources.jar'):
+ with zipfile.ZipFile(src) as zf:
+ zf.extractall(dst)
+
+ else:
+ print>>sys.stderr, 'Unrecognized file type', os.path.relpath(src, build_root)
diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py
new file mode 100644
index 0000000000..f8e1fa2b6d
--- /dev/null
+++ b/build/scripts/compile_cuda.py
@@ -0,0 +1,168 @@
+import sys
+import subprocess
+import os
+import collections
+import re
+import tempfile
+
+
+def is_clang(command):
+ for word in command:
+ if '--compiler-bindir' in word and 'clang' in word:
+ return True
+
+ return False
+
+
+def main():
+ try:
+ sys.argv.remove('--y_skip_nocxxinc')
+ skip_nocxxinc = True
+ except ValueError:
+ skip_nocxxinc = False
+
+ spl = sys.argv.index('--cflags')
+ cmd = 1
+ mtime0 = None
+ if sys.argv[1] == '--mtime':
+ mtime0 = sys.argv[2]
+ cmd = 3
+ command = sys.argv[cmd: spl]
+ cflags = sys.argv[spl + 1:]
+
+ dump_args = False
+ if '--y_dump_args' in command:
+ command.remove('--y_dump_args')
+ dump_args = True
+
+ executable = command[0]
+ if not os.path.exists(executable):
+ print >> sys.stderr, '{} not found'.format(executable)
+ sys.exit(1)
+
+ if is_clang(command):
+ # nvcc concatenates the sources for clang, and clang reports unused
+ # things from .h files as if they they were defined in a .cpp file.
+ cflags += ['-Wno-unused-function', '-Wno-unused-parameter']
+
+ if not is_clang(command) and '-fopenmp=libomp' in cflags:
+ cflags.append('-fopenmp')
+ cflags.remove('-fopenmp=libomp')
+
+ skip_list = [
+ '-gline-tables-only',
+ # clang coverage
+ '-fprofile-instr-generate',
+ '-fcoverage-mapping',
+ '/Zc:inline', # disable unreferenced functions (kernel registrators) remove
+ '-Wno-c++17-extensions',
+ '-flto',
+ '-faligned-allocation',
+ '-fsized-deallocation',
+ # While it might be reasonable to compile host part of .cu sources with these optimizations enabled,
+ # nvcc passes these options down towards cicc which lacks x86_64 extensions support.
+ '-msse2',
+ '-msse3',
+ '-mssse3',
+ '-msse4.1',
+ '-msse4.2',
+ ]
+
+ if skip_nocxxinc:
+ skip_list.append('-nostdinc++')
+
+ for flag in skip_list:
+ if flag in cflags:
+ cflags.remove(flag)
+
+ skip_prefix_list = [
+ '-fsanitize=',
+ '-fsanitize-coverage=',
+ '-fsanitize-blacklist=',
+ '--system-header-prefix',
+ ]
+ new_cflags = []
+ for flag in cflags:
+ if all(not flag.startswith(skip_prefix) for skip_prefix in skip_prefix_list):
+ if flag.startswith('-fopenmp-version='):
+ new_cflags.append('-fopenmp-version=45') # Clang 11 only supports OpenMP 4.5, but the default is 5.0, so we need to forcefully redefine it.
+ else:
+ new_cflags.append(flag)
+ cflags = new_cflags
+
+ if not is_clang(command):
+ def good(arg):
+ if arg.startswith('--target='):
+ return False
+ return True
+ cflags = filter(good, cflags)
+
+ cpp_args = []
+ compiler_args = []
+
+ # NVCC requires particular MSVC versions which may differ from the version
+ # used to compile regular C++ code. We have a separate MSVC in Arcadia for
+ # the CUDA builds and pass it's root in $Y_VC_Root.
+ # The separate MSVC for CUDA may absent in Yandex Open Source builds.
+ vc_root = os.environ.get('Y_VC_Root')
+
+ cflags_queue = collections.deque(cflags)
+ while cflags_queue:
+
+ arg = cflags_queue.popleft()
+ if arg == '-mllvm':
+ compiler_args.append(arg)
+ compiler_args.append(cflags_queue.popleft())
+ continue
+ if arg[:2].upper() in ('-I', '/I', '-B'):
+ value = arg[2:]
+ if not value:
+ value = cflags_queue.popleft()
+ if arg[1] == 'I':
+ cpp_args.append('-I{}'.format(value))
+ elif arg[1] == 'B': # todo: delete "B" flag check when cuda stop to use gcc
+ pass
+ continue
+
+ match = re.match(r'[-/]D(.*)', arg)
+ if match:
+ define = match.group(1)
+ # We have C++ flags configured for the regular C++ build.
+ # There is Y_MSVC_INCLUDE define with a path to the VC header files.
+ # We need to change the path accordingly when using a separate MSVC for CUDA.
+ if vc_root and define.startswith('Y_MSVC_INCLUDE'):
+ define = os.path.expandvars('Y_MSVC_INCLUDE={}/include'.format(vc_root))
+ cpp_args.append('-D' + define.replace('\\', '/'))
+ continue
+
+ compiler_args.append(arg)
+
+ command += cpp_args
+ if compiler_args:
+ command += ['--compiler-options', ','.join(compiler_args)]
+
+ # --keep is necessary to prevent nvcc from embedding nvcc pid in generated
+ # symbols. It makes nvcc use the original file name as the prefix in the
+ # generated files (otherwise it also prepends tmpxft_{pid}_00000000-5), and
+ # cicc derives the module name from its {input}.cpp1.ii file name.
+ command += ['--keep', '--keep-dir', tempfile.mkdtemp(prefix='compile_cuda.py.')]
+ # nvcc generates symbols like __fatbinwrap_{len}_{basename}_{hash} where
+ # {basename} is {input}.cpp1.ii with non-C chars translated to _, {len} is
+ # {basename} length, and {hash} is the hash of first exported symbol in
+ # {input}.cpp1.ii if there is one, otherwise it is based on its modification
+ # time (converted to string in the local timezone) and the current working
+ # directory. To stabilize the names of these symbols we need to fix mtime,
+ # timezone, and cwd.
+ if mtime0:
+ os.environ['LD_PRELOAD'] = mtime0
+ os.environ['TZ'] = 'UTC0' # POSIX fixed offset format.
+ os.environ['TZDIR'] = '/var/empty' # Against counterfeit /usr/share/zoneinfo/$TZ.
+
+ if dump_args:
+ sys.stdout.write('\n'.join(command))
+ else:
+ sys.exit(subprocess.Popen(command, stdout=sys.stderr, stderr=sys.stderr, cwd='/').wait())
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/compile_java.py b/build/scripts/compile_java.py
new file mode 100644
index 0000000000..7b6455ebf0
--- /dev/null
+++ b/build/scripts/compile_java.py
@@ -0,0 +1,122 @@
+import argparse
+import contextlib
+from distutils import dir_util
+import os
+import shutil
+import subprocess as sp
+import tarfile
+import zipfile
+import sys
+
+
+def parse_args(args):
+ parser = argparse.ArgumentParser(description='Wrapper to invoke Java compilation from ya make build')
+ parser.add_argument('--javac-bin', help='path to javac')
+ parser.add_argument('--jar-bin', help='path to jar tool')
+ parser.add_argument('--java-bin', help='path to java binary')
+ parser.add_argument('--kotlin-compiler', help='path to kotlin compiler jar file')
+ parser.add_argument('--vcs-mf', help='path to VCS info manifest snippet')
+ parser.add_argument('--package-prefix', help='package prefix for resource files')
+ parser.add_argument('--jar-output', help='jar file with compiled classes destination path')
+ parser.add_argument('--srcs-jar-output', help='jar file with sources destination path')
+ parser.add_argument('srcs', nargs="*")
+ args = parser.parse_args(args)
+ return args, args.srcs
+
+
+def mkdir_p(directory):
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+
+def split_cmd_by_delim(cmd, delim='DELIM'):
+ result = [[]]
+ for arg in cmd:
+ if arg == delim:
+ result.append([])
+ else:
+ result[-1].append(arg)
+ return result
+
+
+def main():
+ cmd_parts = split_cmd_by_delim(sys.argv[1:])
+ assert len(cmd_parts) == 4
+ args, javac_opts, peers, ktc_opts = cmd_parts
+ opts, jsrcs = parse_args(args)
+
+ jsrcs += list(filter(lambda x: x.endswith('.jsrc'), peers))
+ peers = list(filter(lambda x: not x.endswith('.jsrc'), peers))
+
+ sources_dir = 'src'
+ mkdir_p(sources_dir)
+ for s in jsrcs:
+ if s.endswith('.jsrc'):
+ with contextlib.closing(tarfile.open(s, 'r')) as tf:
+ tf.extractall(sources_dir)
+
+ srcs = []
+ for r, _, files in os.walk(sources_dir):
+ for f in files:
+ srcs.append(os.path.join(r, f))
+ srcs += jsrcs
+ ktsrcs = list(filter(lambda x: x.endswith('.kt'), srcs))
+ srcs = list(filter(lambda x: x.endswith('.java'), srcs))
+
+ classes_dir = 'cls'
+ mkdir_p(classes_dir)
+ classpath = os.pathsep.join(peers)
+
+ if srcs:
+ temp_sources_file = 'temp.sources.list'
+ with open(temp_sources_file, 'w') as ts:
+ ts.write(' '.join(srcs))
+
+ if ktsrcs:
+ temp_kt_sources_file = 'temp.kt.sources.list'
+ with open(temp_kt_sources_file, 'w') as ts:
+ ts.write(' '.join(ktsrcs + srcs))
+ kt_classes_dir = 'kt_cls'
+ mkdir_p(kt_classes_dir)
+ sp.check_call([opts.java_bin, '-jar', opts.kotlin_compiler, '-classpath', classpath, '-d', kt_classes_dir] + ktc_opts + ['@' + temp_kt_sources_file])
+ classpath = os.pathsep.join([kt_classes_dir, classpath])
+
+ if srcs:
+ sp.check_call([opts.javac_bin, '-nowarn', '-g', '-classpath', classpath, '-encoding', 'UTF-8', '-d', classes_dir] + javac_opts + ['@' + temp_sources_file])
+
+ for s in jsrcs:
+ if s.endswith('-sources.jar'):
+ with zipfile.ZipFile(s) as zf:
+ zf.extractall(sources_dir)
+
+ elif s.endswith('.jar'):
+ with zipfile.ZipFile(s) as zf:
+ zf.extractall(classes_dir)
+
+ if ktsrcs:
+ dir_util.copy_tree(kt_classes_dir, classes_dir)
+
+ if opts.vcs_mf:
+ sp.check_call([opts.jar_bin, 'cfm', opts.jar_output, opts.vcs_mf, os.curdir], cwd=classes_dir)
+ else:
+ sp.check_call([opts.jar_bin, 'cfM', opts.jar_output, os.curdir], cwd=classes_dir)
+
+ if opts.srcs_jar_output:
+ for s in jsrcs:
+ if s.endswith('.java'):
+ if opts.package_prefix:
+ d = os.path.join(sources_dir, *(opts.package_prefix.split('.') + [os.path.basename(s)]))
+
+ else:
+ d = os.path.join(sources_dir, os.path.basename(s))
+
+ shutil.copyfile(s, d)
+
+ if opts.vcs_mf:
+ sp.check_call([opts.jar_bin, 'cfm', opts.srcs_jar_output, opts.vcs_mf, os.curdir], cwd=sources_dir)
+ else:
+ sp.check_call([opts.jar_bin, 'cfM', opts.srcs_jar_output, os.curdir], cwd=sources_dir)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/compile_jsrc.py b/build/scripts/compile_jsrc.py
new file mode 100644
index 0000000000..8760e5eee9
--- /dev/null
+++ b/build/scripts/compile_jsrc.py
@@ -0,0 +1,24 @@
+import argparse
+import os
+import tarfile
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', nargs='*', required=True)
+ parser.add_argument('--output', required=True)
+ parser.add_argument('--prefix', required=True)
+
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+
+ with tarfile.open(args.output, 'w') as out:
+ for f in args.input:
+ out.add(f, arcname=os.path.relpath(f, args.prefix))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/compile_pysrc.py b/build/scripts/compile_pysrc.py
new file mode 100644
index 0000000000..e3637e18e2
--- /dev/null
+++ b/build/scripts/compile_pysrc.py
@@ -0,0 +1,101 @@
+import argparse
+import os
+import shutil
+import subprocess
+import tarfile
+
+
+LIMIT = 6000
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', required=True)
+ parser.add_argument('--output', required=True)
+ parser.add_argument('--rescompiler', required=True)
+ subparsers = parser.add_subparsers(dest='mode')
+
+ parser_py2 = subparsers.add_parser('py2')
+ parser_py2.add_argument('--py_compile', required=True)
+ parser_py2.add_argument('--python', required=True)
+
+ parser_py3 = subparsers.add_parser('py3')
+ parser_py3.add_argument('--pycc', required=True)
+
+ return parser.parse_args()
+
+
+def call(cmd, cwd=None, env=None):
+ return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env)
+
+
+def iterate_py2_resource_params(py_files):
+ for py in py_files:
+ mod = py[:-3].replace('/', '.')
+ key = '/py_modules/{}'.format(mod)
+ yield py, key
+ yield '-', 'resfs/src/{}={}'.format(key, py)
+ yield '{}.yapyc'.format(py), '/py_code/{}'.format(mod)
+
+
+def iterate_py3_resource_params(py_files):
+ for py in py_files:
+ for ext in ('', '.yapyc3'):
+ path = '{}{}'.format(py, ext)
+ dest = 'py/{}'.format(path)
+ key = 'resfs/file/{}'.format(dest)
+ src = 'resfs/src/{}={}'.format(key, os.path.basename(path))
+ yield '-', src
+ yield path, key
+
+
+def main():
+ args = parse_args()
+
+ names = []
+ with tarfile.open(args.input, 'r') as tar:
+ names = tar.getnames()
+ tar.extractall()
+
+ if args.mode == 'py3':
+ pycc_cmd = [args.pycc]
+ pycc_ext = '.yapyc3'
+ iterate_resource_params = iterate_py3_resource_params
+ else:
+ pycc_cmd = [args.python, args.py_compile]
+ pycc_ext = '.yapyc'
+ iterate_resource_params = iterate_py2_resource_params
+
+ py_files = sorted(names)
+
+ for py in py_files:
+ cmd = pycc_cmd + ['{}-'.format(os.path.basename(py)), py, '{}{}'.format(py, pycc_ext)]
+ call(cmd)
+
+ outputs = []
+ cmd = [args.rescompiler, '{}.0'.format(args.output)]
+ size = 0
+ for path, key in iterate_resource_params(py_files):
+ addendum = len(path) + len(key)
+ if size + addendum > LIMIT and len(cmd) > 2:
+ call(cmd)
+ outputs.append(cmd[1])
+ cmd[1] = '{}.{}'.format(args.output, len(outputs))
+ cmd = cmd[0:2]
+ size = 0
+ cmd.extend([path, key])
+ size += addendum
+ if len(outputs) == 0:
+ cmd[1] = args.output
+ call(cmd)
+ else:
+ call(cmd)
+ outputs.append(cmd[1])
+ with open(args.output, 'w') as fout:
+ for fname in outputs:
+ with open(fname, 'r') as fin:
+ shutil.copyfileobj(fin, fout)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/configure_file.py b/build/scripts/configure_file.py
new file mode 100755
index 0000000000..193ad7ec9e
--- /dev/null
+++ b/build/scripts/configure_file.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python2.7
+
+import sys
+import os.path
+import re
+
+cmakeDef01 = "#cmakedefine01"
+cmakeDef = "#cmakedefine"
+
+
+def replaceLine(l, varDict, define):
+ words = l.split()
+ if words:
+ if words[0] == cmakeDef:
+ sPos = l.find(cmakeDef)
+ ePos = sPos + len(cmakeDef)
+ l = l[:sPos] + define + l[ePos:] + '\n'
+ if words[0] == cmakeDef01:
+ var = words[1]
+ cmakeValue = varDict.get(var)
+ if cmakeValue == 'yes':
+ val = '1'
+ else:
+ val = '0'
+ sPos = l.find(cmakeDef01)
+ ePos = l.find(var) + len(var)
+ l = l[:sPos] + define + ' ' + var + ' ' + val + l[ePos + 1:] + '\n'
+
+ finder = re.compile(".*?(@[a-zA-Z0-9_]+@).*")
+ while True:
+ re_result = finder.match(l)
+ if not re_result:
+ return l
+ key = re_result.group(1)[1:-1]
+ l = l[:re_result.start(1)] + varDict.get(key, '') + l[re_result.end(1):]
+
+
+def main(inputPath, outputPath, varDict):
+ define = '#define' if os.path.splitext(outputPath)[1] != '.asm' else '%define'
+ with open(outputPath, 'w') as output:
+ with open(inputPath, 'r') as input:
+ for l in input:
+ output.write(replaceLine(l, varDict, define))
+
+
+def usage():
+ print("usage: configure_file.py inputPath outputPath key1=value1 ...")
+ exit(1)
+
+
+if __name__ == "__main__":
+ if len(sys.argv) < 3:
+ usage()
+ varDict = {}
+ for x in sys.argv[3:]:
+ key, value = str(x).split('=', 1)
+ varDict[key] = value
+
+ main(sys.argv[1], sys.argv[2], varDict)
diff --git a/build/scripts/container.py b/build/scripts/container.py
new file mode 100644
index 0000000000..27e6f921f3
--- /dev/null
+++ b/build/scripts/container.py
@@ -0,0 +1,30 @@
+import subprocess
+import os
+import shutil
+
+
+class ContainerError(Exception):
+ pass
+
+
+def join_layers(input_paths, output_path, squashfs_path):
+
+ if len(input_paths) == 1:
+ shutil.copy2(input_paths[0], output_path)
+
+ else:
+ # We cannot use appending here as it doesn't allow replacing files
+ for input_path in input_paths:
+ unpack_cmd = [ os.path.join(squashfs_path, 'unsquashfs') ]
+ unpack_cmd.extend([ '-f', input_path ])
+ subprocess.run(unpack_cmd)
+
+ pack_cmd = [ os.path.join(squashfs_path, 'mksquashfs') ]
+ pack_cmd.append(os.path.join(os.curdir, 'squashfs-root'))
+ pack_cmd.append(output_path)
+ pack_cmd.append('-all-root')
+ subprocess.run(pack_cmd)
+
+ shutil.rmtree(os.path.join(os.curdir, 'squashfs-root'))
+
+ return 0
diff --git a/build/scripts/copy_docs_files.py b/build/scripts/copy_docs_files.py
new file mode 100644
index 0000000000..c444dd509d
--- /dev/null
+++ b/build/scripts/copy_docs_files.py
@@ -0,0 +1,102 @@
+import argparse
+import codecs
+import errno
+import os
+import process_command_files as pcf
+import shutil
+import sys
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-root', required=True)
+ parser.add_argument('--dst-dir', required=True)
+ parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite')
+ parser.add_argument('--source-root', required=True)
+ parser.add_argument('--src-dir', required=None)
+ parser.add_argument('files', nargs='*')
+ return parser.parse_args(pcf.get_args(sys.argv[1:]))
+
+
+def makedirs(dirname):
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(dirname):
+ pass
+ else:
+ raise
+
+
+def copy_file(src, dst, overwrite=False, orig_path=None, generated=False):
+ if os.path.exists(dst) and not overwrite:
+ return
+
+ makedirs(os.path.dirname(dst))
+
+ with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst:
+ if (orig_path or generated) and src.endswith('.md'):
+ out = b''
+ buf = fsrc.readline()
+ bom_length = len(codecs.BOM_UTF8)
+ if buf[:bom_length] == codecs.BOM_UTF8:
+ out += codecs.BOM_UTF8
+ buf = buf[bom_length:]
+ info = 'generated: true\n' if generated else 'vcsPath: {}\n'.format(orig_path)
+ if buf.startswith(b'---') and b'\n' in buf[3:] and buf[3:].rstrip(b'\r\n') == b'':
+ content = b''
+ found = False
+ while True:
+ line = fsrc.readline()
+ if len(line) == 0:
+ break
+ content += line
+ if line.startswith(b'---') and line[3:].rstrip(b'\r\n') == b'':
+ found = True
+ break
+ out += buf
+ if found:
+ out += info.encode('utf-8')
+ out += content
+ else:
+ out += '---\n{}---\n'.format(info).encode('utf-8')
+ out += buf
+ fdst.write(out)
+ shutil.copyfileobj(fsrc, fdst)
+
+
+def main():
+ args = parse_args()
+
+ source_root = os.path.normpath(args.source_root) + os.path.sep
+ build_root = os.path.normpath(args.build_root) + os.path.sep
+
+ dst_dir = os.path.normpath(args.dst_dir)
+ assert dst_dir.startswith(build_root)
+ makedirs(dst_dir)
+
+ src_dir = os.path.normpath(args.src_dir) + os.path.sep
+ assert src_dir.startswith(source_root)
+
+ if src_dir.startswith(source_root):
+ root = source_root
+ is_from_source_root = True
+ elif src_dir.startswith(build_root):
+ root = build_root
+ is_from_source_root = False
+ else:
+ assert False, 'src_dir [{}] should start with [{}] or [{}]'.format(src_dir, source_root, build_root)
+
+ is_overwrite_existing = args.existing == 'overwrite'
+
+ for f in [os.path.normpath(f) for f in args.files]:
+ src_file = os.path.join(src_dir, f)
+ dst_file = os.path.join(dst_dir, f)
+ if src_file == dst_file:
+ continue
+ rel_path = src_file[len(root):] if is_from_source_root else None
+ copy_file(src_file, dst_file, overwrite=is_overwrite_existing, orig_path=rel_path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/copy_docs_files_to_dir.py b/build/scripts/copy_docs_files_to_dir.py
new file mode 100644
index 0000000000..bfd3c65698
--- /dev/null
+++ b/build/scripts/copy_docs_files_to_dir.py
@@ -0,0 +1,164 @@
+import argparse
+import codecs
+import errno
+import os
+import process_command_files as pcf
+import shutil
+import sys
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--bin-dir', nargs='*')
+ parser.add_argument('--build-root', required=True)
+ parser.add_argument('--dest-dir', required=True)
+ parser.add_argument('--docs-dir', action='append', nargs=2, dest='docs_dirs', default=None)
+ parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite')
+ parser.add_argument('--source-root', required=True)
+ parser.add_argument('--src-dir', action='append', nargs='*', dest='src_dirs', default=None)
+ parser.add_argument('files', nargs='*')
+ return parser.parse_args(pcf.get_args(sys.argv[1:]))
+
+
+def makedirs(dirname):
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(dirname):
+ pass
+ else:
+ raise
+
+
+def copy_file(src, dst, overwrite=False, orig_path=None):
+ if os.path.exists(dst) and not overwrite:
+ return
+
+ makedirs(os.path.dirname(dst))
+
+ with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst:
+ if orig_path and src.endswith('.md'):
+ out = b''
+ buf = fsrc.readline()
+ bom_length = len(codecs.BOM_UTF8)
+ if buf[:bom_length] == codecs.BOM_UTF8:
+ out += codecs.BOM_UTF8
+ buf = buf[bom_length:]
+ info = 'vcsPath: {}\n'.format(orig_path)
+ if buf.startswith(b'---') and b'\n' in buf[3:] and buf[3:].rstrip(b'\r\n') == b'':
+ content = b''
+ found = False
+ while True:
+ line = fsrc.readline()
+ if len(line) == 0:
+ break
+ content += line
+ if line.startswith(b'---') and line[3:].rstrip(b'\r\n') == b'':
+ found = True
+ break
+ out += buf
+ if found:
+ out += info.encode('utf-8')
+ out += content
+ else:
+ out += '---\n{}---\n'.format(info).encode('utf-8')
+ out += buf
+ fdst.write(out)
+ shutil.copyfileobj(fsrc, fdst)
+
+
+def main():
+ args = parse_args()
+
+ dest_dir = os.path.normpath(args.dest_dir)
+ makedirs(dest_dir)
+
+ source_root = os.path.normpath(args.source_root) + os.path.sep
+ build_root = os.path.normpath(args.build_root) + os.path.sep
+
+ is_overwrite_existing = args.existing == 'overwrite'
+
+ if args.docs_dirs:
+ for item in args.docs_dirs:
+ assert len(item) == 2
+ docs_dir, nm = item[0], item[1]
+ assert not os.path.isabs(docs_dir)
+ if nm and nm != '.':
+ assert not os.path.isabs(nm)
+ dst = os.path.join(dest_dir, nm)
+ else:
+ dst = dest_dir
+
+ abs_docs_dir = os.path.join(args.source_root, docs_dir)
+
+ for root, _, files in os.walk(abs_docs_dir):
+ for f in files:
+ if os.path.islink(os.path.join(root, f)):
+ continue
+ file_src = os.path.join(root, f)
+ assert file_src.startswith(source_root)
+ file_dst = os.path.join(dst, os.path.relpath(root, abs_docs_dir), f)
+ copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=file_src[len(source_root):])
+
+ if args.src_dirs:
+ for item in args.src_dirs:
+ assert len(item) > 1
+ src_dir, nm = os.path.normpath(item[0]), item[1]
+ assert os.path.isabs(src_dir)
+ if nm and nm != '.':
+ assert not os.path.isabs(nm)
+ dst = os.path.join(dest_dir, nm)
+ else:
+ dst = dest_dir
+
+ if src_dir.startswith(source_root):
+ root = source_root
+ is_from_source_root = True
+ else:
+ assert src_dir.startswith(build_root)
+ root = build_root
+ is_from_source_root = False
+
+ for f in item[2:]:
+ file_src = os.path.normpath(f)
+ assert file_src.startswith(root)
+ rel_path = file_src[len(root):] if is_from_source_root else None
+ file_dst = os.path.join(dst, file_src[len(src_dir):])
+ copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=rel_path)
+
+ if args.bin_dir:
+ assert len(args.bin_dir) > 1
+ bin_dir, bin_dir_namespace = os.path.normpath(args.bin_dir[0]) + os.path.sep, args.bin_dir[1]
+ assert bin_dir.startswith(build_root)
+ if bin_dir_namespace and bin_dir_namespace != '.':
+ assert not os.path.isabs(bin_dir_namespace)
+ dst = os.path.join(dest_dir, bin_dir_namespace)
+ else:
+ dst = dest_dir
+
+ for file_src in args.bin_dir[2:]:
+ assert os.path.isfile(file_src)
+ assert file_src.startswith(bin_dir)
+ file_dst = os.path.join(dst, file_src[len(bin_dir):])
+ copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=None)
+
+ for src in args.files:
+ file_src = os.path.normpath(src)
+ assert os.path.isfile(file_src), 'File [{}] does not exist...'.format(file_src)
+ rel_path = file_src
+ orig_path = None
+ if file_src.startswith(source_root):
+ rel_path = file_src[len(source_root):]
+ orig_path = rel_path
+ elif file_src.startswith(build_root):
+ rel_path = file_src[len(build_root):]
+ else:
+ raise Exception('Unexpected file path [{}].'.format(file_src))
+ assert not os.path.isabs(rel_path)
+ file_dst = os.path.join(args.dest_dir, rel_path)
+ if file_dst != file_src:
+ copy_file(file_src, file_dst, is_overwrite_existing, orig_path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/copy_files_to_dir.py b/build/scripts/copy_files_to_dir.py
new file mode 100644
index 0000000000..ead57ba16e
--- /dev/null
+++ b/build/scripts/copy_files_to_dir.py
@@ -0,0 +1,59 @@
+import argparse
+import errno
+import os
+import process_command_files as pcf
+import shutil
+import sys
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--dest-dir', required=True)
+ parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite')
+ parser.add_argument('--flat', action='store_true')
+ parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[])
+ parser.add_argument('files', nargs='*')
+ return parser.parse_args(pcf.get_args(sys.argv[1:]))
+
+
+def makedirs(dirname):
+ try:
+ os.makedirs(dirname)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(dirname):
+ pass
+ else:
+ raise
+
+
+def main():
+ args = parse_args()
+
+ dest_dir = os.path.normpath(args.dest_dir) + os.pathsep
+ makedirs(dest_dir)
+
+ prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes]
+
+ for src in args.files:
+ src = os.path.normpath(src)
+ assert os.path.isfile(src)
+ if args.flat:
+ rel_dst = os.path.basename(src)
+ else:
+ rel_dst = src
+ for prefix in prefixes:
+ if src.startswith(prefix):
+ rel_dst = src[len(prefix):]
+ break
+ assert not os.path.isabs(rel_dst)
+ dst = os.path.join(args.dest_dir, rel_dst)
+ if os.path.isfile(dst) and args.existing == 'skip':
+ break
+
+ makedirs(os.path.dirname(dst))
+
+ shutil.copyfile(src, dst)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py
new file mode 100644
index 0000000000..9baeb5ffac
--- /dev/null
+++ b/build/scripts/copy_to_dir.py
@@ -0,0 +1,75 @@
+import errno
+import sys
+import os
+import shutil
+import optparse
+import tarfile
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.add_option('--build-root')
+ parser.add_option('--dest-dir')
+ parser.add_option('--dest-arch')
+ return parser.parse_args()
+
+
+def ensure_dir_exists(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else:
+ raise
+
+
+def hardlink_or_copy(src, dst):
+ if os.name == 'nt':
+ shutil.copy(src, dst)
+ else:
+ try:
+ os.link(src, dst)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ return
+ elif e.errno == errno.EXDEV:
+ sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst))
+ shutil.copy(src, dst)
+ else:
+ raise
+
+
+def main():
+ opts, args = parse_args()
+ assert opts.build_root
+ assert opts.dest_dir
+
+ dest_arch = None
+ if opts.dest_arch:
+ if opts.dest_arch.endswith('.tar'):
+ dest_arch = tarfile.open(opts.dest_arch, 'w', dereference=True)
+ elif opts.dest_arch.endswith('.tar.gz') or opts.dest_arch.endswith('.tgz'):
+ dest_arch = tarfile.open(opts.dest_arch, 'w:gz', dereference=True)
+ else:
+ # TODO: move check to graph generation stage
+ raise Exception('Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format(os.path.basename(opts.dest_arch)))
+
+ for arg in args:
+ dst = arg
+ if dst.startswith(opts.build_root):
+ dst = dst[len(opts.build_root) + 1:]
+
+ if dest_arch and not arg.endswith('.pkg.fake'):
+ dest_arch.add(arg, arcname=dst)
+
+ dst = os.path.join(opts.dest_dir, dst)
+ ensure_dir_exists(os.path.dirname(dst))
+ hardlink_or_copy(arg, dst)
+
+ if dest_arch:
+ dest_arch.close()
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py
new file mode 100644
index 0000000000..94491d9256
--- /dev/null
+++ b/build/scripts/coverage-info.py
@@ -0,0 +1,282 @@
+import argparse
+import os
+import sys
+import tarfile
+import collections
+import subprocess
+import re
+
+
+GCDA_EXT = '.gcda'
+GCNO_EXT = '.gcno'
+
+
+def suffixes(path):
+ """
+ >>> list(suffixes('/a/b/c'))
+ ['c', 'b/c', '/a/b/c']
+ >>> list(suffixes('/a/b/c/'))
+ ['c', 'b/c', '/a/b/c']
+ >>> list(suffixes('/a'))
+ ['/a']
+ >>> list(suffixes('/a/'))
+ ['/a']
+ >>> list(suffixes('/'))
+ []
+ """
+ path = os.path.normpath(path)
+
+ def up_dirs(cur_path):
+ while os.path.dirname(cur_path) != cur_path:
+ cur_path = os.path.dirname(cur_path)
+ yield cur_path
+
+ for x in up_dirs(path):
+ yield path.replace(x + os.path.sep, '')
+
+
+def recast(in_file, out_file, probe_path, update_stat):
+ PREFIX = 'SF:'
+
+ probed_path = None
+
+ any_payload = False
+
+ with open(in_file, 'r') as input, open(out_file, 'w') as output:
+ active = True
+ for line in input:
+ line = line.rstrip('\n')
+ if line.startswith('TN:'):
+ output.write(line + '\n')
+ elif line.startswith(PREFIX):
+ path = line[len(PREFIX):]
+ probed_path = probe_path(path)
+ if probed_path:
+ output.write(PREFIX + probed_path + '\n')
+ active = bool(probed_path)
+ else:
+ if active:
+ update_stat(probed_path, line)
+ output.write(line + '\n')
+ any_payload = True
+
+ return any_payload
+
+
+def print_stat(da, fnda, teamcity_stat_output):
+ lines_hit = sum(map(bool, da.values()))
+ lines_total = len(da.values())
+ lines_coverage = 100.0 * lines_hit / lines_total if lines_total else 0
+
+ func_hit = sum(map(bool, fnda.values()))
+ func_total = len(fnda.values())
+ func_coverage = 100.0 * func_hit / func_total if func_total else 0
+
+ print >>sys.stderr, '[[imp]]Lines[[rst]] {: >16} {: >16} {: >16.1f}%'.format(lines_hit, lines_total, lines_coverage)
+ print >>sys.stderr, '[[imp]]Functions[[rst]] {: >16} {: >16} {: >16.1f}%'.format(func_hit, func_total, func_coverage)
+
+ if teamcity_stat_output:
+ with open(teamcity_stat_output, 'w') as tc_file:
+ tc_file.write("##teamcity[blockOpened name='Code Coverage Summary']\n")
+ tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLTotal\' value='{}']\n".format(lines_total))
+ tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLCovered\' value='{}']\n".format(lines_hit))
+ tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMTotal\' value='{}']\n".format(func_total))
+ tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMCovered\' value='{}']\n".format(func_hit))
+ tc_file.write("##teamcity[blockClosed name='Code Coverage Summary']\n")
+
+
+def chunks(l, n):
+ """
+ >>> list(chunks(range(10), 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+ >>> list(chunks(range(10), 5))
+ [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]
+ """
+ for i in xrange(0, len(l), n):
+ yield l[i:i + n]
+
+
+def combine_info_files(lcov, files, out_file):
+ chunk_size = 50
+ files = list(set(files))
+
+ for chunk in chunks(files, chunk_size):
+ combine_cmd = [lcov]
+ if os.path.exists(out_file):
+ chunk.append(out_file)
+ for trace in chunk:
+ assert os.path.exists(trace), "Trace file does not exist: {} (cwd={})".format(trace, os.getcwd())
+ combine_cmd += ["-a", os.path.abspath(trace)]
+ print >>sys.stderr, '## lcov', ' '.join(combine_cmd[1:])
+ out_file_tmp = "combined.tmp"
+ with open(out_file_tmp, "w") as stdout:
+ subprocess.check_call(combine_cmd, stdout=stdout)
+ if os.path.exists(out_file):
+ os.remove(out_file)
+ os.rename(out_file_tmp, out_file)
+
+
+def probe_path_global(path, source_root, prefix_filter, exclude_files):
+ if path.endswith('_ut.cpp'):
+ return None
+
+ for suff in reversed(list(suffixes(path))):
+ if (not prefix_filter or suff.startswith(prefix_filter)) and (not exclude_files or not exclude_files.match(suff)):
+ full_path = source_root + os.sep + suff
+ if os.path.isfile(full_path):
+ return full_path
+
+ return None
+
+
+def update_stat_global(src_file, line, fnda, da):
+ if line.startswith("FNDA:"):
+ visits, func_name = line[len("FNDA:"):].split(',')
+ fnda[src_file + func_name] += int(visits)
+
+ if line.startswith("DA"):
+ line_number, visits = line[len("DA:"):].split(',')
+ if visits == '=====':
+ visits = 0
+
+ da[src_file + line_number] += int(visits)
+
+
+def gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args):
+ print >>sys.stderr, '## geninfo', ' '.join(cmd)
+ subprocess.check_call(cmd)
+ if recast(cov_info + '.tmp', cov_info, probe_path, update_stat):
+ lcov_args.append(cov_info)
+
+
+def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files):
+ with tarfile.open(gcno_archive) as gcno_tf:
+ for gcno_item in gcno_tf:
+ if gcno_item.isfile() and gcno_item.name.endswith(GCNO_EXT):
+ gcno_tf.extract(gcno_item)
+
+ gcno_name = gcno_item.name
+ source_fname = gcno_name[:-len(GCNO_EXT)]
+ if prefix_filter and not source_fname.startswith(prefix_filter):
+ sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter))
+ continue
+ if exclude_files and exclude_files.search(source_fname):
+ sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern))
+ continue
+
+ fname2gcno[source_fname] = gcno_name
+
+ if os.path.getsize(gcno_name) > 0:
+ coverage_info = source_fname + '.' + str(len(fname2info[source_fname])) + '.info'
+ fname2info[source_fname].append(coverage_info)
+ geninfo_cmd = [
+ geninfo_executable,
+ '--gcov-tool', gcov_tool,
+ '-i', gcno_name,
+ '-o', coverage_info + '.tmp'
+ ]
+ gen_info(geninfo_cmd, coverage_info)
+
+
+def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info):
+ with tarfile.open(gcda_archive) as gcda_tf:
+ for gcda_item in gcda_tf:
+ if gcda_item.isfile() and gcda_item.name.endswith(GCDA_EXT):
+ gcda_name = gcda_item.name
+ source_fname = gcda_name[:-len(GCDA_EXT)]
+ for suff in suffixes(source_fname):
+ if suff in fname2gcno:
+ gcda_new_name = suff + GCDA_EXT
+ gcda_item.name = gcda_new_name
+ gcda_tf.extract(gcda_item)
+ if os.path.getsize(gcda_new_name) > 0:
+ coverage_info = suff + '.' + str(len(fname2info[suff])) + '.info'
+ fname2info[suff].append(coverage_info)
+ geninfo_cmd = [
+ geninfo_executable,
+ '--gcov-tool', gcov_tool,
+ gcda_new_name,
+ '-o', coverage_info + '.tmp'
+ ]
+ gen_info(geninfo_cmd, coverage_info)
+
+
+def gen_cobertura(tool, output, combined_info):
+ cmd = [
+ tool,
+ combined_info,
+ '-b', '#hamster#',
+ '-o', output
+ ]
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ if p.returncode:
+ raise Exception('lcov_cobertura failed with exit code {}\nstdout: {}\nstderr: {}'.format(p.returncode, out, err))
+
+
+def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filter, exclude_regexp, teamcity_stat_output, coverage_report_path, gcov_report, lcov_cobertura):
+ exclude_files = re.compile(exclude_regexp) if exclude_regexp else None
+
+ fname2gcno = {}
+ fname2info = collections.defaultdict(list)
+ lcov_args = []
+ geninfo_executable = os.path.join(source_root, 'devtools', 'lcov', 'geninfo')
+
+ def probe_path(path):
+ return probe_path_global(path, source_root, prefix_filter, exclude_files)
+
+ fnda = collections.defaultdict(int)
+ da = collections.defaultdict(int)
+
+ def update_stat(src_file, line):
+ update_stat_global(src_file, line, da, fnda)
+
+ def gen_info(cmd, cov_info):
+ gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args)
+
+ init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files)
+ process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info)
+
+ if coverage_report_path:
+ output_dir = coverage_report_path
+ else:
+ output_dir = output + '.dir'
+
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+
+ teamcity_stat_file = None
+ if teamcity_stat_output:
+ teamcity_stat_file = os.path.join(output_dir, 'teamcity.out')
+ print_stat(da, fnda, teamcity_stat_file)
+
+ if lcov_args:
+ output_trace = "combined.info"
+ combine_info_files(os.path.join(source_root, 'devtools', 'lcov', 'lcov'), lcov_args, output_trace)
+ cmd = [os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), '-p', source_root, '--ignore-errors', 'source', '-o', output_dir, output_trace]
+ print >>sys.stderr, '## genhtml', ' '.join(cmd)
+ subprocess.check_call(cmd)
+ if lcov_cobertura:
+ gen_cobertura(lcov_cobertura, gcov_report, output_trace)
+
+ with tarfile.open(output, 'w') as tar:
+ tar.add(output_dir, arcname='.')
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--source-root', action='store')
+ parser.add_argument('--output', action='store')
+ parser.add_argument('--gcno-archive', action='store')
+ parser.add_argument('--gcda-archive', action='store')
+ parser.add_argument('--gcov-tool', action='store')
+ parser.add_argument('--prefix-filter', action='store')
+ parser.add_argument('--exclude-regexp', action='store')
+ parser.add_argument('--teamcity-stat-output', action='store_const', const=True)
+ parser.add_argument('--coverage-report-path', action='store')
+ parser.add_argument('--gcov-report', action='store')
+ parser.add_argument('--lcov-cobertura', action='store')
+
+ args = parser.parse_args()
+ main(**vars(args))
diff --git a/build/scripts/cpp_flatc_wrapper.py b/build/scripts/cpp_flatc_wrapper.py
new file mode 100644
index 0000000000..9f74b65570
--- /dev/null
+++ b/build/scripts/cpp_flatc_wrapper.py
@@ -0,0 +1,30 @@
+import os
+import subprocess
+import sys
+
+
+def main():
+ cmd = sys.argv[1:]
+ h_file = None
+ try:
+ index = cmd.index('-o')
+ h_file = cmd[index+1]
+ cmd[index+1] = os.path.dirname(h_file)
+ except (ValueError, IndexError):
+ pass
+ p = subprocess.run(cmd, capture_output=True, text=True)
+ if p.returncode:
+ if p.stdout:
+ sys.stderr.write('stdout:\n{}\n'.format(p.stdout))
+ if p.stderr:
+ sys.stderr.write('stderr:\n{}\n'.format(p.stderr))
+ sys.exit(p.returncode)
+ if h_file and h_file.endswith(('.fbs.h', '.fbs64.h')):
+ cpp_file = '{}.cpp'.format(h_file[:-2])
+ with open(cpp_file, 'w') as f:
+ f.write('#include "{}"\n'.format(os.path.basename(h_file)))
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py
new file mode 100644
index 0000000000..45083ff4f7
--- /dev/null
+++ b/build/scripts/create_jcoverage_report.py
@@ -0,0 +1,112 @@
+import argparse
+import tarfile
+import zipfile
+import os
+import sys
+import time
+import subprocess
+
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
+
+class Timer(object):
+
+ def __init__(self):
+ self.start = time.time()
+
+ def step(self, msg):
+ sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start)))
+ self.start = time.time()
+
+
+def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths):
+ timer = Timer()
+ reports_dir = 'jacoco_reports_dir'
+ mkdir_p(reports_dir)
+ with tarfile.open(source) as tf:
+ tf.extractall(reports_dir)
+ timer.step("Coverage data extracted")
+ reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)]
+
+ with open(jars_list) as f:
+ jars = f.read().strip().split()
+ if jars and runners_paths:
+ for r in runners_paths:
+ try:
+ jars.remove(r)
+ except ValueError:
+ pass
+
+ src_dir = 'sources_dir'
+ cls_dir = 'classes_dir'
+
+ mkdir_p(src_dir)
+ mkdir_p(cls_dir)
+
+ for jar in jars:
+ if jar.endswith('devtools-jacoco-agent.jar'):
+ agent_disposition = jar
+
+ # Skip java contrib - it's irrelevant coverage
+ if jar.startswith('contrib/java'):
+ continue
+
+ with zipfile.ZipFile(jar) as jf:
+ for entry in jf.infolist():
+ if entry.filename.endswith('.java'):
+ dest = src_dir
+
+ elif entry.filename.endswith('.class'):
+ dest = cls_dir
+
+ else:
+ continue
+
+ entry.filename = entry.filename.encode('utf-8')
+ jf.extract(entry, dest)
+ timer.step("Jar files extracted")
+
+ if not agent_disposition:
+ print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.'
+
+ if tar_output:
+ report_dir = 'java.report.temp'
+ else:
+ report_dir = output
+ mkdir_p(report_dir)
+
+ if agent_disposition:
+ agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format]
+ agent_cmd += reports
+ subprocess.check_call(agent_cmd)
+ timer.step("Jacoco finished")
+
+ if tar_output:
+ with tarfile.open(output, 'w') as outf:
+ outf.add(report_dir, arcname='.')
+
+
+if __name__ == '__main__':
+ if 'LC_ALL' in os.environ:
+ if os.environ['LC_ALL'] == 'C':
+ os.environ['LC_ALL'] = 'en_GB.UTF-8'
+
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--source', action='store')
+ parser.add_argument('--output', action='store')
+ parser.add_argument('--java', action='store')
+ parser.add_argument('--prefix-filter', action='store')
+ parser.add_argument('--exclude-filter', action='store')
+ parser.add_argument('--jars-list', action='store')
+ parser.add_argument('--output-format', action='store', default="html")
+ parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True)
+ parser.add_argument('--agent-disposition', action='store')
+ parser.add_argument('--runner-path', dest='runners_paths', action='append', default=[])
+ args = parser.parse_args()
+ main(**vars(args))
diff --git a/build/scripts/custom_link_green_mysql.py b/build/scripts/custom_link_green_mysql.py
new file mode 100644
index 0000000000..13bb9e4ac7
--- /dev/null
+++ b/build/scripts/custom_link_green_mysql.py
@@ -0,0 +1,97 @@
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+SYMBOLS_TO_PATCH = (
+ 'connect',
+ 'poll',
+ 'recv',
+ 'recvfrom',
+ 'send',
+ 'sendto',
+)
+
+class Error(Exception):
+ pass
+
+
+def find_compiler(args):
+ for arg in args:
+ if os.path.basename(arg) in ('clang', 'clang++'):
+ return arg
+ raise Error('No known compiler found in the command line')
+
+
+def find_libraries(project, args):
+ if not project.endswith('/'):
+ project = project + '/'
+
+ for arg in args:
+ if arg.startswith(project):
+ yield arg
+
+
+def rename_symbol(symbol):
+ return 'green_{}'.format(symbol)
+
+
+def patch_object(object_path, objcopy):
+ args = [objcopy]
+ for symbol in SYMBOLS_TO_PATCH:
+ args.extend(('--redefine-sym', '{}={}'.format(symbol, rename_symbol(symbol))))
+ args.append(object_path)
+ subprocess.check_call(args)
+
+
+def patch_library(library_path, ar, objcopy):
+ tmpdir = tempfile.mkdtemp(dir=os.path.dirname(library_path))
+ try:
+ subprocess.check_call((ar, 'x', library_path), cwd=tmpdir)
+ names = os.listdir(tmpdir)
+ for name in names:
+ patch_object(os.path.join(tmpdir, name), objcopy=objcopy)
+
+ new_library_path = os.path.join(tmpdir, 'library.a')
+ subprocess.check_call([ar, 'rcs', new_library_path] + names, cwd=tmpdir)
+
+ os.rename(new_library_path, library_path)
+
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+def main():
+ try:
+ args = sys.argv[1:]
+ compiler = find_compiler(args)
+ compiler_dir = os.path.dirname(compiler)
+
+ def get_tool(name):
+ path = os.path.join(compiler_dir, name)
+ if not os.path.exists(path):
+ raise Error('No {} found alongside the compiler'.format(name))
+ return path
+
+ ar = get_tool('llvm-ar')
+ objcopy = get_tool('llvm-objcopy')
+
+ libraries = tuple(find_libraries('contrib/libs/libmysql_r', args))
+ for library in libraries:
+ library_path = os.path.abspath(library)
+ if not os.path.exists(library_path):
+ raise Error('No {} file exists'.format(library))
+
+ patch_library(library_path, ar=ar, objcopy=objcopy)
+
+ except Exception as error:
+ name = os.path.basename(sys.argv[0])
+ command = ' '.join(args)
+ message = '{name} failed: {error}\nCommand line: {command}'
+ print >> sys.stderr, message.format(**locals())
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/decimal_md5.py b/build/scripts/decimal_md5.py
new file mode 100644
index 0000000000..e70ca80a09
--- /dev/null
+++ b/build/scripts/decimal_md5.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import hashlib
+import struct
+import sys
+import os
+import argparse
+
+
+def print_code(checksum, func_name):
+ if len(func_name) == 0: # safe fallback for old ya.make files
+ func_name = "DecimalMD5"
+ print 'const char* ' + func_name + '() {return "' + checksum + '";}'
+
+
+def ensure_paths_exist(paths):
+ bad_paths = sorted(
+ path for path in paths
+ if not os.path.exists(path)
+ )
+ if bad_paths:
+ print >> sys.stderr, "decimal_md5 inputs do not exist:"
+ for path in bad_paths:
+ print >> sys.stderr, path
+ sys.exit(1)
+
+
+def _update_digest_with_file_contents(digest, path, block_size=65535):
+ with open(path) as f:
+ while True:
+ block = f.read(block_size)
+ if not block:
+ break
+ digest.update(block)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--fixed-output", help="don not calculate md5, use this value instead")
+ parser.add_argument("--lower-bits", help="use specified count of lower bits", type=int, default=32)
+ parser.add_argument("--source-root", help="arcadia source root")
+ parser.add_argument("--func-name", help="custom function name to be defined", default="DecimalMD5")
+ parser.add_argument("targets", nargs='*', default=['.'])
+
+ args = parser.parse_args()
+
+ abs_paths = [
+ os.path.join(args.source_root, target)
+ for target in args.targets
+ ]
+ ensure_paths_exist(abs_paths)
+
+ if args.fixed_output:
+ try:
+ bitmask = (1 << args.lower_bits) - 1
+ fmt = '{:0%dd}' % len(str(bitmask))
+ checksum = fmt.format(int(args.fixed_output) & bitmask)
+ except ValueError:
+ raise ValueError("decimal_md5: bad value passed via --fixed-output: %s" % args.fixed_output)
+ print_code(str(checksum), func_name=args.func_name)
+ return
+
+ md5 = hashlib.md5()
+ for path in abs_paths:
+ _update_digest_with_file_contents(md5, path)
+
+ md5_parts = struct.unpack('IIII', md5.digest())
+ md5_int = sum(part << (32 * n) for n, part in enumerate(md5_parts))
+ bitmask = (1 << args.lower_bits) - 1
+ fmt = '{:0%dd}' % len(str(bitmask))
+
+ checksum_str = fmt.format(md5_int & bitmask)
+ print_code(checksum_str, func_name=args.func_name)
+
+
+if __name__ == "__main__":
+ main()
+
diff --git a/build/scripts/error.py b/build/scripts/error.py
new file mode 100644
index 0000000000..f7d8ecb2cc
--- /dev/null
+++ b/build/scripts/error.py
@@ -0,0 +1,77 @@
+# Sync content of this file with devtools/ya/core/error/__init__.py
+
+TEMPORARY_ERROR_MESSAGES = [
+ 'Connection reset by peer',
+ 'Connection timed out',
+ 'Function not implemented',
+ 'I/O operation on closed file',
+ 'Internal Server Error',
+ 'Network connection closed unexpectedly',
+ 'Network is unreachable',
+ 'No route to host',
+ 'No space left on device',
+ 'Not enough space',
+ 'Temporary failure in name resolution',
+ 'The read operation timed out',
+ 'timeout: timed out',
+]
+
+
+# Node exit codes
+class ExitCodes(object):
+ TEST_FAILED = 10
+ COMPILATION_FAILED = 11
+ INFRASTRUCTURE_ERROR = 12
+ NOT_RETRIABLE_ERROR = 13
+ YT_STORE_FETCH_ERROR = 14
+
+
+def merge_exit_codes(exit_codes):
+ return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0
+
+
+def is_temporary_error(exc):
+ import logging
+ logger = logging.getLogger(__name__)
+
+ if getattr(exc, 'temporary', False):
+ logger.debug("Exception has temporary attribute: %s", exc)
+ return True
+
+ import errno
+ err = getattr(exc, 'errno', None)
+
+ if err == errno.ECONNREFUSED or err == errno.ENETUNREACH:
+ logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err)
+ return True
+
+ import socket
+
+ if isinstance(exc, socket.timeout) or isinstance(getattr(exc, 'reason', None), socket.timeout):
+ logger.debug("Socket timeout exception: %s", exc)
+ return True
+
+ if isinstance(exc, socket.gaierror):
+ logger.debug("Getaddrinfo exception: %s", exc)
+ return True
+
+ import urllib2
+
+ if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ):
+ logger.debug("urllib2.HTTPError: %s", exc)
+ return True
+
+ import httplib
+
+ if isinstance(exc, httplib.IncompleteRead):
+ logger.debug("IncompleteRead exception: %s", exc)
+ return True
+
+ exc_str = str(exc)
+
+ for message in TEMPORARY_ERROR_MESSAGES:
+ if message in exc_str:
+ logger.debug("Found temporary error pattern (%s): %s", message, exc_str)
+ return True
+
+ return False
diff --git a/build/scripts/extract_asrc.py b/build/scripts/extract_asrc.py
new file mode 100644
index 0000000000..89892ddf2d
--- /dev/null
+++ b/build/scripts/extract_asrc.py
@@ -0,0 +1,23 @@
+import argparse
+import os
+import tarfile
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', nargs='*', required=True)
+ parser.add_argument('--output', required=True)
+
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+
+ for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.input):
+ with tarfile.open(asrc, 'r') as tar:
+ tar.extractall(path=args.output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/extract_docs.py b/build/scripts/extract_docs.py
new file mode 100644
index 0000000000..20e8311346
--- /dev/null
+++ b/build/scripts/extract_docs.py
@@ -0,0 +1,43 @@
+import argparse
+import os
+import process_command_files as pcf
+import tarfile
+import sys
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--dest-dir', required=True)
+ parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[])
+ parser.add_argument('docs', nargs='*')
+ return parser.parse_args(pcf.get_args(sys.argv[1:]))
+
+
+def main():
+ args = parse_args()
+
+ prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes]
+
+ def _valid_docslib(path):
+ base = os.path.basename(path)
+ return base.endswith(('.docslib', '.docslib.fake')) or base == 'preprocessed.tar.gz'
+
+ for src in [p for p in args.docs if _valid_docslib(p)]:
+ if src == 'preprocessed.tar.gz':
+ rel_dst = os.path.dirname(os.path.normpath(src))
+ for prefix in prefixes:
+ if src.startswith(prefix):
+ rel_dst = rel_dst[len(prefix):]
+ continue
+ assert not os.path.isabs(rel_dst)
+ dest_dir = os.path.join(args.dest_dir, rel_dst)
+ else:
+ dest_dir = args.dest_dir
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir)
+ with tarfile.open(src, 'r') as tar_file:
+ tar_file.extractall(dest_dir)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/extract_jacoco_report.py b/build/scripts/extract_jacoco_report.py
new file mode 100644
index 0000000000..02e4ba9f13
--- /dev/null
+++ b/build/scripts/extract_jacoco_report.py
@@ -0,0 +1,29 @@
+import argparse
+import os
+import re
+import tarfile
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--archive', action='store')
+ parser.add_argument('--source-re', action='store')
+ parser.add_argument('--destination', action='store')
+
+ args = parser.parse_args()
+
+ with tarfile.open(args.archive) as tf:
+ open(args.destination, 'wb').close()
+ extract_list = []
+ matcher = re.compile(args.source_re)
+ temp_dir = os.path.join(os.path.dirname(args.destination), 'temp_profiles')
+ if not os.path.exists(temp_dir):
+ os.makedirs(temp_dir)
+ for f in [i for i in tf if matcher.match(i.name)]:
+ tf.extract(f, path=temp_dir)
+ for directory, _, srcs in os.walk(temp_dir):
+ for f in srcs:
+ with open(args.destination, 'ab') as dst:
+ with open(os.path.join(temp_dir, directory, f), 'rb') as src:
+ dst.write(src.read())
diff --git a/build/scripts/f2c.py b/build/scripts/f2c.py
new file mode 100644
index 0000000000..7021e1391f
--- /dev/null
+++ b/build/scripts/f2c.py
@@ -0,0 +1,58 @@
+import sys
+import subprocess
+import argparse
+import os
+
+
+header = '''\
+#ifdef __GNUC__
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunused-parameter"
+#pragma GCC diagnostic ignored "-Wmissing-braces"
+#pragma GCC diagnostic ignored "-Wuninitialized"
+#pragma GCC diagnostic ignored "-Wreturn-type"
+#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
+#endif
+
+'''
+
+footer = '''
+#ifdef __GNUC__
+#pragma GCC diagnostic pop
+#endif
+'''
+
+
+def mkdir_p(directory):
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('-t', '--tool')
+ parser.add_argument('-c', '--input')
+ parser.add_argument('-o', '--output')
+
+ args = parser.parse_args()
+ tmpdir = args.output + '.f2c'
+ mkdir_p(tmpdir)
+ # should parse includes, really
+ p = subprocess.Popen(
+ [args.tool, '-w', '-R', '-a', '-I' + os.path.dirname(args.input), '-T' + tmpdir],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
+ stdout, stderr = p.communicate(input=open(args.input).read())
+ ret = p.wait()
+
+ if ret:
+ print >>sys.stderr, 'f2c failed: %s, %s' % (stderr, ret)
+ sys.exit(ret)
+
+ if 'Error' in stderr:
+ print >>sys.stderr, stderr
+
+ with open(args.output, 'w') as f:
+ f.write(header)
+ f.write(stdout)
+ f.write(footer)
diff --git a/build/scripts/fail_module_cmd.py b/build/scripts/fail_module_cmd.py
new file mode 100644
index 0000000000..fa14c0d851
--- /dev/null
+++ b/build/scripts/fail_module_cmd.py
@@ -0,0 +1,7 @@
+import sys
+
+
+if __name__ == '__main__':
+ assert len(sys.argv) == 2, 'Unexpected number of arguments...'
+ sys.stderr.write('Error: module command for target [[bad]]{}[[rst]] was not executed due to build graph configuration errors...\n'.format(sys.argv[1]))
+ sys.exit(1)
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
new file mode 100755
index 0000000000..b6ea7cefa1
--- /dev/null
+++ b/build/scripts/fetch_from.py
@@ -0,0 +1,396 @@
+import datetime as dt
+import errno
+import hashlib
+import json
+import logging
+import os
+import platform
+import random
+import shutil
+import socket
+import string
+import sys
+import tarfile
+import urllib2
+
+import retry
+
+
+def make_user_agent():
+ return 'fetch_from: {host}'.format(host=socket.gethostname())
+
+
+def add_common_arguments(parser):
+ parser.add_argument('--copy-to') # used by jbuild in fetch_resource
+ parser.add_argument('--rename-to') # used by test_node in inject_mds_resource_to_graph
+ parser.add_argument('--copy-to-dir')
+ parser.add_argument('--untar-to')
+ parser.add_argument('--rename', action='append', default=[], metavar='FILE', help='rename FILE to the corresponding output')
+ parser.add_argument('--executable', action='store_true', help='make outputs executable')
+ parser.add_argument('--log-path')
+ parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity')
+ parser.add_argument('outputs', nargs='*', default=[])
+
+
+def ensure_dir(path):
+ if not (path == '' or os.path.isdir(path)):
+ os.makedirs(path)
+
+
+# Reference code: library/python/fs/__init__.py
+def hardlink_or_copy(src, dst):
+ ensure_dir(os.path.dirname(dst))
+
+ if os.name == 'nt':
+ shutil.copy(src, dst)
+ else:
+ try:
+ os.link(src, dst)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ return
+ elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES):
+ sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst))
+ shutil.copy(src, dst)
+ else:
+ sys.stderr.write("src: {} dst: {}\n".format(src, dst))
+ raise
+
+
+def rename_or_copy_and_remove(src, dst):
+ ensure_dir(os.path.dirname(dst))
+
+ try:
+ os.rename(src, dst)
+ except OSError:
+ shutil.copy(src, dst)
+ os.remove(src)
+
+
+class BadChecksumFetchError(Exception):
+ pass
+
+
+class IncompleteFetchError(Exception):
+ pass
+
+
+class ResourceUnpackingError(Exception):
+ pass
+
+
+class ResourceIsDirectoryError(Exception):
+ pass
+
+
+class OutputIsDirectoryError(Exception):
+ pass
+
+
+class OutputNotExistError(Exception):
+ pass
+
+
+def setup_logging(args, base_name):
+ def makedirs(path):
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
+ if args.log_path:
+ log_file_name = args.log_path
+ else:
+ log_file_name = base_name + ".log"
+
+ args.abs_log_path = os.path.abspath(log_file_name)
+ makedirs(os.path.dirname(args.abs_log_path))
+ logging.basicConfig(filename=args.abs_log_path, level=logging.DEBUG)
+ if args.verbose:
+ logging.getLogger().addHandler(logging.StreamHandler(sys.stderr))
+
+
+def is_temporary(e):
+
+ def is_broken(e):
+ return isinstance(e, urllib2.HTTPError) and e.code in (410, 404)
+
+ if is_broken(e):
+ return False
+
+ if isinstance(e, (BadChecksumFetchError, IncompleteFetchError, urllib2.URLError, socket.error)):
+ return True
+
+ import error
+
+ return error.is_temporary_error(e)
+
+
+def uniq_string_generator(size=6, chars=string.ascii_lowercase + string.digits):
+ return ''.join(random.choice(chars) for _ in range(size))
+
+
+def report_to_snowden(value):
+ def inner():
+ body = {
+ 'namespace': 'ygg',
+ 'key': 'fetch-from-sandbox',
+ 'value': json.dumps(value),
+ }
+
+ urllib2.urlopen(
+ 'https://back-snowden.qloud.yandex-team.ru/report/add',
+ json.dumps([body, ]),
+ timeout=5,
+ )
+
+ try:
+ inner()
+ except Exception as e:
+ logging.warning('report_to_snowden failed: %s', e)
+
+
+def copy_stream(read, *writers, **kwargs):
+ chunk_size = kwargs.get('size', 1024*1024)
+ while True:
+ data = read(chunk_size)
+ if not data:
+ break
+ for write in writers:
+ write(data)
+
+
+def md5file(fname):
+ res = hashlib.md5()
+ with open(fname, 'rb') as f:
+ copy_stream(f.read, res.update)
+ return res.hexdigest()
+
+
+def git_like_hash_with_size(filepath):
+ """
+ Calculate git like hash for path
+ """
+ sha = hashlib.sha1()
+
+ file_size = 0
+
+ with open(filepath, 'rb') as f:
+ while True:
+ block = f.read(2 ** 16)
+
+ if not block:
+ break
+
+ file_size += len(block)
+ sha.update(block)
+
+ sha.update('\0')
+ sha.update(str(file_size))
+
+ return sha.hexdigest(), file_size
+
+
+def size_printer(display_name, size):
+ sz = [0]
+ last_stamp = [dt.datetime.now()]
+
+ def printer(chunk):
+ sz[0] += len(chunk)
+ now = dt.datetime.now()
+ if last_stamp[0] + dt.timedelta(seconds=10) < now:
+ if size:
+ print >>sys.stderr, "##status##{} - [[imp]]{:.1f}%[[rst]]".format(display_name, 100.0 * sz[0] / size if size else 0)
+ last_stamp[0] = now
+
+ return printer
+
+
+def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1=None, tries=10, writers=None):
+ logging.info('Downloading from url %s name %s and expected md5 %s', url, resource_file_name, expected_md5)
+ tmp_file_name = uniq_string_generator()
+
+ request = urllib2.Request(url, headers={'User-Agent': make_user_agent()})
+ req = retry.retry_func(lambda: urllib2.urlopen(request, timeout=30), tries=tries, delay=5, backoff=1.57079)
+ logging.debug('Headers: %s', req.headers.headers)
+ expected_file_size = int(req.headers.get('Content-Length', 0))
+ real_md5 = hashlib.md5()
+ real_sha1 = hashlib.sha1()
+
+ with open(tmp_file_name, 'wb') as fp:
+ copy_stream(
+ req.read,
+ fp.write,
+ real_md5.update,
+ real_sha1.update,
+ size_printer(resource_file_name, expected_file_size),
+ *([] if writers is None else writers)
+ )
+
+ real_md5 = real_md5.hexdigest()
+ real_file_size = os.path.getsize(tmp_file_name)
+ real_sha1.update('\0')
+ real_sha1.update(str(real_file_size))
+ real_sha1 = real_sha1.hexdigest()
+
+ if unpack:
+ tmp_dir = tmp_file_name + '.dir'
+ os.makedirs(tmp_dir)
+ with tarfile.open(tmp_file_name, mode="r|gz") as tar:
+ tar.extractall(tmp_dir)
+ tmp_file_name = os.path.join(tmp_dir, resource_file_name)
+ if expected_md5:
+ real_md5 = md5file(tmp_file_name)
+
+ logging.info('File size %s (expected %s)', real_file_size, expected_file_size or "UNKNOWN")
+ logging.info('File md5 %s (expected %s)', real_md5, expected_md5)
+ logging.info('File sha1 %s (expected %s)', real_sha1, expected_sha1)
+
+ if expected_md5 and real_md5 != expected_md5:
+ report_to_snowden(
+ {
+ 'headers': req.headers.headers,
+ 'expected_md5': expected_md5,
+ 'real_md5': real_md5
+ }
+ )
+
+ raise BadChecksumFetchError(
+ 'Downloaded {}, but expected {} for {}'.format(
+ real_md5,
+ expected_md5,
+ url,
+ )
+ )
+
+ if expected_sha1 and real_sha1 != expected_sha1:
+ report_to_snowden(
+ {
+ 'headers': req.headers.headers,
+ 'expected_sha1': expected_sha1,
+ 'real_sha1': real_sha1
+ }
+ )
+
+ raise BadChecksumFetchError(
+ 'Downloaded {}, but expected {} for {}'.format(
+ real_sha1,
+ expected_sha1,
+ url,
+ )
+ )
+
+ if expected_file_size and expected_file_size != real_file_size:
+ report_to_snowden({'headers': req.headers.headers, 'file_size': real_file_size})
+
+ raise IncompleteFetchError(
+ 'Downloaded {}, but expected {} for {}'.format(
+ real_file_size,
+ expected_file_size,
+ url,
+ )
+ )
+
+ return tmp_file_name
+
+
+def chmod(filename, mode):
+ if platform.system().lower() == 'windows':
+ # https://docs.microsoft.com/en-us/windows/win32/fileio/hard-links-and-junctions:
+ # hard to reset read-only attribute for removal if there are multiple hardlinks
+ return
+ stat = os.stat(filename)
+ if stat.st_mode & 0o777 != mode:
+ try:
+ os.chmod(filename, mode)
+ except OSError:
+ import pwd
+ sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)))
+ raise
+
+
+def process(fetched_file, file_name, args, remove=True):
+ assert len(args.rename) <= len(args.outputs), (
+ 'too few outputs to rename', args.rename, 'into', args.outputs)
+
+ fetched_file_is_dir = os.path.isdir(fetched_file)
+ if fetched_file_is_dir and not args.untar_to:
+ raise ResourceIsDirectoryError('Resource may be directory only with untar_to option: ' + fetched_file)
+
+ # make all read only
+ if fetched_file_is_dir:
+ for root, _, files in os.walk(fetched_file):
+ for filename in files:
+ chmod(os.path.join(root, filename), 0o444)
+ else:
+ chmod(fetched_file, 0o444)
+
+
+ if args.copy_to:
+ hardlink_or_copy(fetched_file, args.copy_to)
+ if not args.outputs:
+ args.outputs = [args.copy_to]
+
+ if args.rename_to:
+ args.rename.append(fetched_file)
+ if not args.outputs:
+ args.outputs = [args.rename_to]
+
+ if args.copy_to_dir:
+ hardlink_or_copy(fetched_file, os.path.join(args.copy_to_dir, file_name))
+
+ if args.untar_to:
+ ensure_dir(args.untar_to)
+ inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):]))
+ if fetched_file_is_dir:
+ for member in inputs:
+ base, name = member.split('/', 1)
+ src = os.path.normpath(os.path.join(fetched_file, name))
+ dst = os.path.normpath(os.path.join(args.untar_to, member))
+ hardlink_or_copy(src, dst)
+ else:
+ # Extract only requested files
+ try:
+ with tarfile.open(fetched_file, mode='r:*') as tar:
+ members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs]
+ tar.extractall(args.untar_to, members=members)
+ except tarfile.ReadError as e:
+ logging.exception(e)
+ raise ResourceUnpackingError('File {} cannot be untared'.format(fetched_file))
+
+ # Forbid changes to the loaded resource data
+ for root, _, files in os.walk(args.untar_to):
+ for filename in files:
+ chmod(os.path.join(root, filename), 0o444)
+
+
+ for src, dst in zip(args.rename, args.outputs):
+ if src == 'RESOURCE':
+ src = fetched_file
+ if os.path.abspath(src) == os.path.abspath(fetched_file):
+ logging.info('Copying %s to %s', src, dst)
+ hardlink_or_copy(src, dst)
+ else:
+ logging.info('Renaming %s to %s', src, dst)
+ if os.path.exists(dst):
+ raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
+ if remove:
+ rename_or_copy_and_remove(src, dst)
+ else:
+ hardlink_or_copy(src, dst)
+
+ for path in args.outputs:
+ if not os.path.exists(path):
+ raise OutputNotExistError('Output does not exist: %s' % os.path.abspath(path))
+ if not os.path.isfile(path):
+ raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path))
+ if args.executable:
+ chmod(path, os.stat(path).st_mode | 0o111)
+ if os.path.abspath(path) == os.path.abspath(fetched_file):
+ remove = False
+
+ if remove:
+ if fetched_file_is_dir:
+ shutil.rmtree(fetched_file)
+ else:
+ os.remove(fetched_file)
diff --git a/build/scripts/fetch_from_archive.py b/build/scripts/fetch_from_archive.py
new file mode 100644
index 0000000000..57aff91b5e
--- /dev/null
+++ b/build/scripts/fetch_from_archive.py
@@ -0,0 +1,36 @@
+import os
+import sys
+import logging
+import argparse
+
+import fetch_from
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ fetch_from.add_common_arguments(parser)
+ parser.add_argument('--file-name', required=True)
+ parser.add_argument('--archive', required=True)
+
+ return parser.parse_args()
+
+def main(args):
+ archive = args.archive
+ file_name = args.file_name.rstrip('-')
+
+ fetch_from.process(archive, file_name, args, remove=False)
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ fetch_from.setup_logging(args, os.path.basename(__file__))
+
+ try:
+ main(args)
+ except Exception as e:
+ logging.exception(e)
+ print >>sys.stderr, open(args.abs_log_path).read()
+ sys.stderr.flush()
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_external.py b/build/scripts/fetch_from_external.py
new file mode 100644
index 0000000000..d4ed6f4221
--- /dev/null
+++ b/build/scripts/fetch_from_external.py
@@ -0,0 +1,60 @@
+import sys
+import json
+import os.path
+import fetch_from
+import argparse
+import logging
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ fetch_from.add_common_arguments(parser)
+ parser.add_argument('--external-file', required=True)
+ parser.add_argument('--custom-fetcher')
+ parser.add_argument('--resource-file')
+ return parser.parse_args()
+
+
+def main(args):
+ external_file = args.external_file.rstrip('.external')
+ if os.path.isfile(args.resource_file):
+ fetch_from.process(args.resource_file, os.path.basename(args.resource_file), args, False)
+ return
+
+ error = None
+ try:
+ with open(args.external_file) as f:
+ js = json.load(f)
+
+ if js['storage'] == 'SANDBOX':
+ import fetch_from_sandbox as ffsb
+ del args.external_file
+ args.resource_id = js['resource_id']
+ ffsb.main(args)
+ elif js['storage'] == 'MDS':
+ import fetch_from_mds as fmds
+ del args.external_file
+ args.key = js['resource_id']
+ fmds.main(args)
+ else:
+ error = 'Unsupported storage in {}'.format(external_file)
+ except:
+ logging.error('Invalid external file: {}'.format(external_file))
+ raise
+ if error:
+ raise Exception(error)
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ fetch_from.setup_logging(args, os.path.basename(__file__))
+
+ try:
+ main(args)
+ except Exception as e:
+ logging.exception(e)
+ print >>sys.stderr, open(args.abs_log_path).read()
+ sys.stderr.flush()
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_mds.py b/build/scripts/fetch_from_mds.py
new file mode 100644
index 0000000000..5e4e656394
--- /dev/null
+++ b/build/scripts/fetch_from_mds.py
@@ -0,0 +1,50 @@
+import os
+import sys
+import logging
+import argparse
+
+import fetch_from
+
+MDS_PREFIX = "https://storage.yandex-team.ru/get-devtools/"
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ fetch_from.add_common_arguments(parser)
+
+ parser.add_argument('--key', required=True)
+
+ return parser.parse_args()
+
+
+def fetch(key):
+ parts = key.split("/")
+ if len(parts) != 3:
+ raise ValueError("Invalid MDS key '{}'".format(key))
+
+ _, sha1, file_name = parts
+
+ fetched_file = fetch_from.fetch_url(MDS_PREFIX + key, False, file_name, expected_sha1=sha1)
+
+ return fetched_file, file_name
+
+
+def main(args):
+ fetched_file, resource_file_name = fetch(args.key)
+
+ fetch_from.process(fetched_file, resource_file_name, args)
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ fetch_from.setup_logging(args, os.path.basename(__file__))
+
+ try:
+ main(args)
+ except Exception as e:
+ logging.exception(e)
+ print >>sys.stderr, open(args.abs_log_path).read()
+ sys.stderr.flush()
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_npm.py b/build/scripts/fetch_from_npm.py
new file mode 100644
index 0000000000..3f87c1fefa
--- /dev/null
+++ b/build/scripts/fetch_from_npm.py
@@ -0,0 +1,109 @@
+import os
+import sys
+import time
+import logging
+import argparse
+import hashlib
+
+import sky
+import fetch_from
+
+
+NPM_BASEURL = "http://npm.yandex-team.ru/"
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ fetch_from.add_common_arguments(parser)
+
+ parser.add_argument("--name", required=True)
+ parser.add_argument("--version", required=True)
+ parser.add_argument("--sky-id", required=True)
+ parser.add_argument("--integrity", required=True)
+ parser.add_argument("--integrity-algorithm", required=True)
+
+ return parser.parse_args()
+
+
+def fetch(name, version, sky_id, integrity, integrity_algorithm, file_name, tries=5):
+ """
+ :param name: package name
+ :type name: str
+ :param version: package version
+ :type version: str
+ :param sky_id: sky id of tarball
+ :type sky_id: str
+ :param integrity: tarball integrity (hex)
+ :type integrity: str
+ :param integrity_algorithm: integrity algorithm (known for openssl)
+ :type integrity_algorithm: str
+ :param tries: tries count
+ :type tries: int
+ :return: path to fetched file
+ :rtype: str
+ """
+ # if sky.is_avaliable() and 'NOTS_FETCH_FROM_HTTP_ONLY' not in os.environ:
+ # fetcher = lambda: sky.fetch(sky_id, file_name)
+ # else:
+ # Отключаем походы через скай
+ # TODO: https://st.yandex-team.ru/FBP-351
+ if 'NOTS_FETCH_FROM_SKY' in os.environ and sky.is_avaliable():
+ fetcher = lambda: sky.fetch(sky_id, file_name)
+ else:
+ fetcher = lambda: _fetch_via_http(name, version, integrity, integrity_algorithm, file_name)
+
+ fetched_file = None
+ exc_info = None
+
+ for i in range(0, tries):
+ try:
+ fetched_file = fetcher()
+ exc_info = None
+ break
+ except Exception as e:
+ logging.exception(e)
+ exc_info = exc_info or sys.exc_info()
+ time.sleep(i)
+
+ if exc_info:
+ raise exc_info[0], exc_info[1], exc_info[2]
+
+ return fetched_file
+
+
+def _fetch_via_http(name, version, integrity, integrity_algorithm, file_name):
+ # Example: "http://npm.yandex-team.ru/@scope/name/-/name-0.0.1.tgz" for @scope/name v0.0.1.
+ url = NPM_BASEURL + "/".join([name, "-", "{}-{}.tgz".format(name.split("/").pop(), version)])
+
+ hashobj = hashlib.new(integrity_algorithm)
+ fetched_file = fetch_from.fetch_url(url, False, file_name, tries=1, writers=[hashobj.update])
+
+ if hashobj.hexdigest() != integrity:
+ raise fetch_from.BadChecksumFetchError("Expected {}, but got {} for {}".format(
+ integrity,
+ hashobj.hexdigest(),
+ file_name,
+ ))
+
+ return fetched_file
+
+
+def main(args):
+ file_name = os.path.basename(args.copy_to)
+ fetched_file = fetch(args.name, args.version, args.sky_id, args.integrity, args.integrity_algorithm, file_name)
+ fetch_from.process(fetched_file, file_name, args)
+
+
+if __name__ == "__main__":
+ args = parse_args()
+ fetch_from.setup_logging(args, os.path.basename(__file__))
+
+ try:
+ main(args)
+ except Exception as e:
+ logging.exception(e)
+ print >>sys.stderr, open(args.abs_log_path).read()
+ sys.stderr.flush()
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
new file mode 100755
index 0000000000..6af180b4b0
--- /dev/null
+++ b/build/scripts/fetch_from_sandbox.py
@@ -0,0 +1,272 @@
+import itertools
+import json
+import logging
+import argparse
+import os
+import random
+import subprocess
+import sys
+import time
+import urllib2
+import uuid
+
+import fetch_from
+
+
+ORIGIN_SUFFIX = '?origin=fetch-from-sandbox'
+MDS_PREFIX = 'http://storage-int.mds.yandex.net/get-sandbox/'
+TEMPORARY_ERROR_CODES = (429, 500, 503, 504)
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ fetch_from.add_common_arguments(parser)
+ parser.add_argument('--resource-id', type=int, required=True)
+ parser.add_argument('--custom-fetcher')
+ parser.add_argument('--resource-file')
+ return parser.parse_args()
+
+
+class ResourceInfoError(Exception):
+ pass
+
+
+class UnsupportedProtocolException(Exception):
+ pass
+
+
+def _sky_path():
+ return "/usr/local/bin/sky"
+
+
+def _is_skynet_avaliable():
+ if not os.path.exists(_sky_path()):
+ return False
+ try:
+ subprocess.check_output([_sky_path(), "--version"])
+ return True
+ except subprocess.CalledProcessError:
+ return False
+ except OSError:
+ return False
+
+
+def download_by_skynet(resource_info, file_name):
+ def sky_get(skynet_id, target_dir, timeout=None):
+ cmd_args = [_sky_path(), 'get', "-N", "Backbone", "--user", "--wait", "--dir", target_dir, skynet_id]
+ if timeout is not None:
+ cmd_args += ["--timeout", str(timeout)]
+ logging.info('Call skynet with args: %s', cmd_args)
+ stdout = subprocess.check_output(cmd_args).strip()
+ logging.debug('Skynet call with args %s is finished, result is %s', cmd_args, stdout)
+ return stdout
+
+ if not _is_skynet_avaliable():
+ raise UnsupportedProtocolException("Skynet is not available")
+
+ skynet_id = resource_info.get("skynet_id")
+ if not skynet_id:
+ raise ValueError("Resource does not have skynet_id")
+
+ temp_dir = os.path.abspath(fetch_from.uniq_string_generator())
+ os.mkdir(temp_dir)
+ sky_get(skynet_id, temp_dir)
+ return os.path.join(temp_dir, file_name)
+
+
+def _urlopen(url, data=None, headers=None):
+ n = 10
+ tout = 30
+ started = time.time()
+ reqid = uuid.uuid4()
+
+ request = urllib2.Request(url, data=data, headers=headers or {})
+ request.add_header('X-Request-Timeout', str(tout))
+ request.add_header('X-Request-Id', str(reqid))
+ request.add_header('User-Agent', 'fetch_from_sandbox.py')
+ for i in xrange(n):
+ retry_after = i
+ try:
+ request.add_header('X-Request-Duration', str(int(time.time() - started)))
+ return urllib2.urlopen(request, timeout=tout).read()
+
+ except urllib2.HTTPError as e:
+ logging.warning('failed to fetch URL %s with HTTP code %d: %s', url, e.code, e)
+ retry_after = int(e.headers.get('Retry-After', str(retry_after)))
+
+ if e.code not in TEMPORARY_ERROR_CODES:
+ raise
+
+ except Exception as e:
+ logging.warning('failed to fetch URL %s: %s', url, e)
+
+ if i + 1 == n:
+ raise e
+
+ time.sleep(retry_after)
+
+
+def _query(url):
+ return json.loads(_urlopen(url))
+
+
+_SANDBOX_BASE_URL = 'https://sandbox.yandex-team.ru/api/v1.0'
+
+
+def get_resource_info(resource_id, touch=False, no_links=False):
+ url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id)))
+ headers = {}
+ if touch:
+ headers.update({'X-Touch-Resource': '1'})
+ if no_links:
+ headers.update({'X-No-Links': '1'})
+ return _query(url)
+
+
+def get_resource_http_links(resource_id):
+ url = ''.join((_SANDBOX_BASE_URL, '/resource/', str(resource_id), '/data/http'))
+ return [r['url'] + ORIGIN_SUFFIX for r in _query(url)]
+
+
+def fetch_via_script(script, resource_id):
+ return subprocess.check_output([script, str(resource_id)]).rstrip()
+
+
+def fetch(resource_id, custom_fetcher):
+ try:
+ resource_info = get_resource_info(resource_id, touch=True, no_links=True)
+ except Exception as e:
+ sys.stderr.write(
+ "Failed to fetch resource {}: {}\n".format(resource_id, str(e))
+ )
+ raise
+
+ if resource_info.get('state', 'DELETED') != 'READY':
+ raise ResourceInfoError("Resource {} is not READY".format(resource_id))
+
+ logging.info('Resource %s info %s', str(resource_id), json.dumps(resource_info))
+
+ is_multifile = resource_info.get('multifile', False)
+ resource_file_name = os.path.basename(resource_info["file_name"])
+ expected_md5 = resource_info.get('md5')
+
+ proxy_link = resource_info['http']['proxy'] + ORIGIN_SUFFIX
+ if is_multifile:
+ proxy_link += '&stream=tgz'
+
+ mds_id = resource_info.get('attributes', {}).get('mds')
+ mds_link = MDS_PREFIX + mds_id if mds_id else None
+
+ def get_storage_links():
+ storage_links = get_resource_http_links(resource_id)
+ random.shuffle(storage_links)
+ return storage_links
+
+ skynet = _is_skynet_avaliable()
+
+ if not skynet:
+ logging.info("Skynet is not available, will try other protocols")
+
+ def iter_tries():
+ if skynet:
+ yield lambda: download_by_skynet(resource_info, resource_file_name)
+
+ if custom_fetcher:
+ yield lambda: fetch_via_script(custom_fetcher, resource_id)
+
+ # Don't try too hard here: we will get back to proxy later on
+ yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5, tries=2)
+ for x in get_storage_links():
+ # Don't spend too much time connecting single host
+ yield lambda: fetch_from.fetch_url(x, False, resource_file_name, expected_md5, tries=1)
+ if mds_link is not None:
+ # Don't try too hard here: we will get back to MDS later on
+ yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5, tries=2)
+ yield lambda: fetch_from.fetch_url(proxy_link, False, resource_file_name, expected_md5)
+ if mds_link is not None:
+ yield lambda: fetch_from.fetch_url(mds_link, True, resource_file_name, expected_md5)
+
+ if resource_info.get('attributes', {}).get('ttl') != 'inf':
+ sys.stderr.write('WARNING: resource {} ttl is not "inf".\n'.format(resource_id))
+
+ exc_info = None
+ for i, action in enumerate(itertools.islice(iter_tries(), 0, 10)):
+ try:
+ fetched_file = action()
+ break
+ except UnsupportedProtocolException:
+ pass
+ except subprocess.CalledProcessError as e:
+ logging.warning('failed to fetch resource %s with subprocess: %s', resource_id, e)
+ time.sleep(i)
+ except urllib2.HTTPError as e:
+ logging.warning('failed to fetch resource %s with HTTP code %d: %s', resource_id, e.code, e)
+ if e.code not in TEMPORARY_ERROR_CODES:
+ exc_info = exc_info or sys.exc_info()
+ time.sleep(i)
+ except Exception as e:
+ logging.exception(e)
+ exc_info = exc_info or sys.exc_info()
+ time.sleep(i)
+ else:
+ if exc_info:
+ raise exc_info[0], exc_info[1], exc_info[2]
+ else:
+ raise Exception("No available protocol and/or server to fetch resource")
+
+ return fetched_file, resource_info['file_name']
+
+
+def _get_resource_info_from_file(resource_file):
+ if resource_file is None or not os.path.exists(resource_file):
+ return None
+
+ RESOURCE_INFO_JSON = "resource_info.json"
+ RESOURCE_CONTENT_FILE_NAME = "resource"
+
+ resource_dir, resource_file = os.path.split(resource_file)
+ if resource_file != RESOURCE_CONTENT_FILE_NAME:
+ return None
+
+ resource_json = os.path.join(resource_dir, RESOURCE_INFO_JSON)
+ if not os.path.isfile(resource_json):
+ return None
+
+ try:
+ with open(resource_json, 'r') as j:
+ resource_info = json.load(j)
+ resource_info['file_name'] # check consistency
+ return resource_info
+ except:
+ logging.debug('Invalid %s in %s', RESOURCE_INFO_JSON, resource_dir)
+
+ return None
+
+
+def main(args):
+ custom_fetcher = os.environ.get('YA_CUSTOM_FETCHER')
+
+ resource_info = _get_resource_info_from_file(args.resource_file)
+ if resource_info:
+ fetched_file = args.resource_file
+ file_name = resource_info['file_name']
+ else:
+ # This code should be merged to ya and removed.
+ fetched_file, file_name = fetch(args.resource_id, custom_fetcher)
+
+ fetch_from.process(fetched_file, file_name, args, remove=not custom_fetcher and not resource_info)
+
+
+if __name__ == '__main__':
+ args = parse_args()
+ fetch_from.setup_logging(args, os.path.basename(__file__))
+
+ try:
+ main(args)
+ except Exception as e:
+ logging.exception(e)
+ print >>sys.stderr, open(args.abs_log_path).read()
+ sys.stderr.flush()
+
+ import error
+ sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_resource.py b/build/scripts/fetch_resource.py
new file mode 100644
index 0000000000..d5af311e5d
--- /dev/null
+++ b/build/scripts/fetch_resource.py
@@ -0,0 +1,43 @@
+import urllib2
+import argparse
+import xmlrpclib
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-r', '--resource-id', type=int, required=True)
+ parser.add_argument('-o', '--output', required=True)
+ return parser.parse_args()
+
+
+def fetch(url, retries=4, timeout=5):
+ for i in xrange(retries):
+ try:
+ return urllib2.urlopen(url, timeout=timeout).read()
+
+ except Exception:
+ if i + 1 < retries:
+ continue
+
+ else:
+ raise
+
+
+def fetch_resource(id_):
+ urls = xmlrpclib.ServerProxy("https://sandbox.yandex-team.ru/sandbox/xmlrpc").get_resource_http_links(id_)
+
+ for u in urls:
+ try:
+ return fetch(u)
+
+ except Exception:
+ continue
+
+ raise Exception('Cannot fetch resource {}'.format(id_))
+
+
+if __name__ == '__main__':
+ args = parse_args()
+
+ with open(args.output, 'wb') as f:
+ f.write(fetch_resource(int(args.resource_id)))
diff --git a/build/scripts/filter_zip.py b/build/scripts/filter_zip.py
new file mode 100644
index 0000000000..b2121b9c9e
--- /dev/null
+++ b/build/scripts/filter_zip.py
@@ -0,0 +1,71 @@
+import argparse
+import os
+import re
+import uuid
+import zipfile
+
+
+def pattern_to_regexp(p):
+ return re.compile(
+ '^'
+ + re.escape(p)
+ .replace(r'\*\*\/', '[_DIR_]')
+ .replace(r'\*', '[_FILE_]')
+ .replace('[_DIR_]', '(.*/)?')
+ .replace('[_FILE_]', '([^/]*)')
+ + '$'
+ )
+
+
+def is_deathman(positive_filter, negative_filter, candidate):
+ remove = positive_filter
+ for pf in positive_filter:
+ if pf.match(candidate):
+ remove = False
+ break
+ if not negative_filter or remove:
+ return remove
+ for nf in negative_filter:
+ if nf.match(candidate):
+ remove = True
+ break
+ return remove
+
+
+def just_do_it():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--positive', action='append', default=[])
+ parser.add_argument('--negative', action='append', default=[])
+ parser.add_argument('--file', action='store', required=True)
+ args = parser.parse_args()
+ if not args.positive and not args.negative:
+ return
+ pos = [pattern_to_regexp(i) for i in args.positive]
+ neg = [pattern_to_regexp(i) for i in args.negative]
+ temp_dirname = None
+ for _ in range(10):
+ candidate = '__unpacked_{}__'.format(uuid.uuid4())
+ if not os.path.exists(candidate):
+ temp_dirname = candidate
+ os.makedirs(temp_dirname)
+ if not temp_dirname:
+ raise Exception("Can't generate name for temp dir")
+
+ with zipfile.ZipFile(args.file, 'r') as zip_ref:
+ zip_ref.extractall(temp_dirname)
+
+ for root, _, files in os.walk(temp_dirname):
+ for f in files:
+ candidate = os.path.join(root, f).replace('\\', '/')
+ if is_deathman(pos, neg, os.path.relpath(candidate, temp_dirname)):
+ os.remove(candidate)
+
+ with zipfile.ZipFile(args.file, 'w') as zip_ref:
+ for root, _, files in os.walk(temp_dirname):
+ for f in files:
+ realname = os.path.join(root, f)
+ zip_ref.write(realname, os.path.sep.join(os.path.normpath(realname).split(os.path.sep, 2)[1:]))
+
+
+if __name__ == '__main__':
+ just_do_it()
diff --git a/build/scripts/find_and_tar.py b/build/scripts/find_and_tar.py
new file mode 100644
index 0000000000..f251623c68
--- /dev/null
+++ b/build/scripts/find_and_tar.py
@@ -0,0 +1,22 @@
+import os
+import sys
+import tarfile
+
+
+def find_gcno(dirname, tail):
+ for cur, _dirs, files in os.walk(dirname):
+ for f in files:
+ if f.endswith(tail):
+ yield os.path.relpath(os.path.join(cur, f))
+
+
+def main(args):
+ output = args[0]
+ tail = args[1] if len(args) > 1 else ''
+ with tarfile.open(output, 'w:') as tf:
+ for f in find_gcno(os.getcwd(), tail):
+ tf.add(f)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/build/scripts/find_time_trace.py b/build/scripts/find_time_trace.py
new file mode 100644
index 0000000000..954d203caa
--- /dev/null
+++ b/build/scripts/find_time_trace.py
@@ -0,0 +1,17 @@
+import os
+import sys
+
+# /scripts/find_time_trace.py <object_file> <destination>
+# clang generates `-ftime-trace` output file path based on main output file path
+
+
+def main():
+ assert len(sys.argv) == 3
+ obj_path = sys.argv[1]
+ trace_path = sys.argv[2]
+ orig_trace_path = obj_path.rpartition('.o')[0] + '.json'
+ os.rename(orig_trace_path, trace_path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/fix_java_command_file_cp.py b/build/scripts/fix_java_command_file_cp.py
new file mode 100644
index 0000000000..fc87048c32
--- /dev/null
+++ b/build/scripts/fix_java_command_file_cp.py
@@ -0,0 +1,34 @@
+import sys
+import os
+import argparse
+import subprocess
+import platform
+
+
+def fix_files(args):
+ args = args[:]
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-root', default=None)
+ args, tail = parser.parse_known_args(args)
+ for idx, arg in list(enumerate(tail)):
+ if arg.startswith('@') and os.path.isfile(arg[1:]):
+ with open(arg[1:]) as f:
+ fixed = [i.strip() for i in f]
+ if args.build_root:
+ fixed = [os.path.join(args.build_root, i) for ln in fixed for i in ln.split(os.path.pathsep)]
+ fixed = os.pathsep.join([i.strip() for i in fixed])
+ fixed_name = list(os.path.splitext(arg))
+ fixed_name[0] += '_fixed'
+ fixed_name = ''.join(fixed_name)
+ with open(fixed_name[1:], 'w') as f:
+ f.write(fixed)
+ tail[idx:idx + 1] = [fixed_name]
+ return tail
+
+
+if __name__ == '__main__':
+ args = fix_files(sys.argv[1:])
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(args).wait())
+ else:
+ os.execv(args[0], args)
diff --git a/build/scripts/fix_msvc_output.py b/build/scripts/fix_msvc_output.py
new file mode 100644
index 0000000000..183a442e1f
--- /dev/null
+++ b/build/scripts/fix_msvc_output.py
@@ -0,0 +1,43 @@
+import subprocess
+import sys
+
+import process_command_files as pcf
+import process_whole_archive_option as pwa
+
+
+def out2err(cmd):
+ return subprocess.Popen(cmd, stdout=sys.stderr).wait()
+
+
+def decoding_needed(strval):
+ if sys.version_info >= (3, 0, 0):
+ return isinstance(strval, bytes)
+ else:
+ return False
+
+
+def out2err_cut_first_line(cmd):
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ first_line = True
+ while True:
+ line = p.stdout.readline()
+ line = line.decode('utf-8') if decoding_needed(line) else line
+ if not line:
+ break
+ if first_line:
+ sys.stdout.write(line)
+ first_line = False
+ else:
+ sys.stderr.write(line)
+ return p.wait()
+
+
+if __name__ == '__main__':
+ mode = sys.argv[1]
+ args, wa_peers, wa_libs = pwa.get_whole_archive_peers_and_libs(pcf.skip_markers(sys.argv[2:]))
+ cmd = pwa.ProcessWholeArchiveOption('WINDOWS', wa_peers, wa_libs).construct_cmd(args)
+ run = out2err
+ if mode in ('cl', 'ml'):
+ # First line of cl.exe and ml64.exe stdout is useless: it prints input file
+ run = out2err_cut_first_line
+ sys.exit(run(cmd))
diff --git a/build/scripts/fs_tools.py b/build/scripts/fs_tools.py
new file mode 100644
index 0000000000..63ca00932e
--- /dev/null
+++ b/build/scripts/fs_tools.py
@@ -0,0 +1,117 @@
+from __future__ import print_function
+
+import os
+import platform
+import sys
+import shutil
+import errno
+
+import process_command_files as pcf
+
+
+def link_or_copy(src, dst, trace={}):
+ if dst not in trace:
+ trace[dst] = src
+
+ try:
+ if platform.system().lower() == 'windows':
+ shutil.copy(src, dst)
+ else:
+ os.link(src, dst)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ if dst in trace:
+ print(
+ '[[bad]]link_or_copy: copy collision found - tried to copy {} to {} which was copied earlier from {}[[rst]]'.format(
+ src, dst, trace[dst]
+ ),
+ file=sys.stderr,
+ )
+ else:
+ print('[[bad]]link_or_copy: destination file already exists: {}[[rst]]'.format(dst), file=sys.stderr)
+ if e.errno == errno.ENOENT:
+ print('[[bad]]link_or_copy: source file doesn\'t exists: {}[[rst]]'.format(src), file=sys.stderr)
+ raise
+
+
+if __name__ == '__main__':
+ mode = sys.argv[1]
+ args = pcf.get_args(sys.argv[2:])
+
+ if mode == 'copy':
+ shutil.copy(args[0], args[1])
+ elif mode == 'copy_tree_no_link':
+ dst = args[1]
+ shutil.copytree(
+ args[0], dst, ignore=lambda dirname, names: [n for n in names if os.path.islink(os.path.join(dirname, n))]
+ )
+ elif mode == 'copy_files':
+ src = args[0]
+ dst = args[1]
+ files = open(args[2]).read().strip().split()
+ for f in files:
+ s = os.path.join(src, f)
+ d = os.path.join(dst, f)
+ if os.path.exists(d):
+ continue
+ try:
+ os.makedirs(os.path.dirname(d))
+ except OSError:
+ pass
+ shutil.copy(s, d)
+ elif mode == 'copy_all_files':
+ src = args[0]
+ dst = args[1]
+ for root, _, files in os.walk(src):
+ for f in files:
+ if os.path.islink(os.path.join(root, f)):
+ continue
+ file_dst = os.path.join(dst, os.path.relpath(root, src), f)
+ if os.path.exists(file_dst):
+ continue
+ try:
+ os.makedirs(os.path.dirname(file_dst))
+ except OSError:
+ pass
+ shutil.copy(os.path.join(root, f), file_dst)
+ elif mode == 'rename_if_exists':
+ if os.path.exists(args[0]):
+ shutil.move(args[0], args[1])
+ elif mode == 'rename':
+ targetdir = os.path.dirname(args[1])
+ if targetdir and not os.path.exists(targetdir):
+ os.makedirs(os.path.dirname(args[1]))
+ shutil.move(args[0], args[1])
+ elif mode == 'remove':
+ for f in args:
+ try:
+ if os.path.isfile(f) or os.path.islink(f):
+ os.remove(f)
+ else:
+ shutil.rmtree(f)
+ except OSError:
+ pass
+ elif mode == 'link_or_copy':
+ link_or_copy(args[0], args[1])
+ elif mode == 'link_or_copy_to_dir':
+ assert len(args) > 1
+ start = 0
+ if args[0] == '--no-check':
+ if args == 2:
+ sys.exit()
+ start = 1
+ dst = args[-1]
+ for src in args[start:-1]:
+ link_or_copy(src, os.path.join(dst, os.path.basename(src)))
+ elif mode == 'cat':
+ with open(args[0], 'w') as dst:
+ for input_name in args[1:]:
+ with open(input_name) as src:
+ dst.write(src.read())
+ elif mode == 'md':
+ try:
+ os.makedirs(args[0])
+ except OSError:
+ pass
+ else:
+ raise Exception('unsupported tool %s' % mode)
diff --git a/build/scripts/gen_aar_gradle_script.py b/build/scripts/gen_aar_gradle_script.py
new file mode 100644
index 0000000000..67c3ac2ef3
--- /dev/null
+++ b/build/scripts/gen_aar_gradle_script.py
@@ -0,0 +1,284 @@
+import argparse
+import os
+import tarfile
+
+FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n'
+MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n'
+KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n'
+
+DO_NOT_STRIP = '''\
+ packagingOptions {
+ doNotStrip "*/arm64-v8a/*.so"
+ doNotStrip "*/armeabi-v7a/*.so"
+ doNotStrip "*/x86_64/*.so"
+ doNotStrip "*/x86/*.so"
+ }
+'''
+
+AAR_TEMPLATE = """\
+ext.jniLibsDirs = [
+ {jni_libs_dirs}
+]
+
+ext.resDirs = [
+ {res_dirs}
+]
+
+ext.assetsDirs = [
+ {assets_dirs}
+]
+
+ext.javaDirs = [
+ {java_dirs}
+]
+
+def aidlDirs = [
+ {aidl_dirs}
+]
+
+ext.bundles = [
+ {bundles}
+]
+
+ext.androidArs = [
+ {aars}
+]
+
+ext.compileOnlyAndroidArs = [
+ {compile_only_aars}
+]
+
+def minVersion = 21
+def compileVersion = 30
+def targetVersion = 30
+def buildVersion = '30.0.3'
+
+import com.android.build.gradle.LibraryPlugin
+import java.nio.file.Files
+import java.nio.file.Paths
+import java.util.regex.Matcher
+import java.util.regex.Pattern
+import java.util.zip.ZipFile
+
+
+apply plugin: 'com.github.dcendents.android-maven'
+
+buildDir = "$projectDir/build"
+
+if (!ext.has("packageSuffix"))
+ ext.packageSuffix = ""
+
+buildscript {{
+// repositories {{
+// jcenter()
+// mavenCentral()
+// }}
+
+ repositories {{
+ {maven_repos}
+ }}
+
+ dependencies {{
+ classpath 'com.android.tools.build:gradle:4.0.2'
+ classpath 'com.github.dcendents:android-maven-gradle-plugin:1.5'
+ }}
+}}
+
+apply plugin: LibraryPlugin
+
+repositories {{
+// flatDir {{
+// dirs System.env.PKG_ROOT + '/bundle'
+// }}
+// maven {{
+// url "http://maven.google.com/"
+// }}
+// maven {{
+// url "http://artifactory.yandex.net/artifactory/public/"
+// }}
+
+ {flat_dirs_repo}
+
+ {maven_repos}
+}}
+
+android {{
+ {keystore}
+
+ compileSdkVersion compileVersion
+ buildToolsVersion buildVersion
+
+ defaultConfig {{
+ minSdkVersion minVersion
+ targetSdkVersion targetVersion
+ consumerProguardFiles '{proguard_rules}'
+ }}
+
+ sourceSets {{
+ main {{
+ manifest.srcFile '{manifest}'
+ jniLibs.srcDirs = jniLibsDirs
+ res.srcDirs = resDirs
+ assets.srcDirs = assetsDirs
+ java.srcDirs = javaDirs
+ aidl.srcDirs = aidlDirs
+ }}
+ // We don't use this feature, so we set it to nonexisting directory
+ androidTest.setRoot('bundle/tests')
+ }}
+
+ {do_not_strip}
+
+ dependencies {{
+ for (bundle in bundles)
+ compile("$bundle") {{
+ transitive = true
+ }}
+ for (bundle in androidArs)
+ compile(bundle) {{
+ transitive = true
+ }}
+ for (bundle in compileOnlyAndroidArs)
+ compileOnly(bundle)
+ }}
+
+ android.libraryVariants.all {{ variant ->
+ def suffix = variant.buildType.name.capitalize()
+
+ def sourcesJarTask = project.tasks.create(name: "sourcesJar${{suffix}}", type: Jar) {{
+ classifier = 'sources'
+ from android.sourceSets.main.java.srcDirs
+ include '**/*.java'
+ eachFile {{ fcd ->
+ def segments = fcd.relativePath.segments
+ if (segments[0] == 'impl') {{
+ fcd.relativePath = new RelativePath(true, segments.drop(1))
+ }}
+ }}
+ includeEmptyDirs = false
+ }}
+
+ def manifestFile = android.sourceSets.main.manifest.srcFile
+ def manifestXml = new XmlParser().parse(manifestFile)
+
+ def packageName = manifestXml['@package']
+ def groupName = packageName.tokenize('.')[0..-2].join('.')
+
+ def androidNs = new groovy.xml.Namespace("http://schemas.android.com/apk/res/android")
+ def packageVersion = manifestXml.attributes()[androidNs.versionName]
+
+ def writePomTask = project.tasks.create(name: "writePom${{suffix}}") {{
+ pom {{
+ project {{
+ groupId groupName
+ version packageVersion
+ packaging 'aar'
+ }}
+ }}.writeTo("$buildDir/${{rootProject.name}}$packageSuffix-pom.xml")
+ }}
+
+ tasks["bundle${{suffix}}Aar"].dependsOn sourcesJarTask
+ tasks["bundle${{suffix}}Aar"].dependsOn writePomTask
+ }}
+}}
+
+"""
+
+
+def gen_build_script(args):
+
+ def wrap(items):
+ return ',\n '.join('"{}"'.format(x) for x in items)
+
+ bundles = []
+ bundles_dirs = set(args.flat_repos)
+ for bundle in args.bundles:
+ dir_name, base_name = os.path.split(bundle)
+ assert(len(dir_name) > 0 and len(base_name) > 0)
+ name, ext = os.path.splitext(base_name)
+ assert(len(name) > 0 and ext == '.aar')
+ bundles_dirs.add(dir_name)
+ bundles.append('com.yandex:{}@aar'.format(name))
+
+ if len(bundles_dirs) > 0:
+ flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs))
+ else:
+ flat_dirs_repo = ''
+
+ maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos)
+
+ if args.keystore:
+ keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore)
+ else:
+ keystore = ''
+
+ if args.do_not_strip:
+ do_not_strip = DO_NOT_STRIP
+ else:
+ do_not_strip = ''
+
+ return AAR_TEMPLATE.format(
+ aars=wrap(args.aars),
+ compile_only_aars=wrap(args.compile_only_aars),
+ aidl_dirs=wrap(args.aidl_dirs),
+ assets_dirs=wrap(args.assets_dirs),
+ bundles=wrap(bundles),
+ do_not_strip=do_not_strip,
+ flat_dirs_repo=flat_dirs_repo,
+ java_dirs=wrap(args.java_dirs),
+ jni_libs_dirs=wrap(args.jni_libs_dirs),
+ keystore=keystore,
+ manifest=args.manifest,
+ maven_repos=maven_repos,
+ proguard_rules=args.proguard_rules,
+ res_dirs=wrap(args.res_dirs),
+ )
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--aars', nargs='*', default=[])
+ parser.add_argument('--compile-only-aars', nargs='*', default=[])
+ parser.add_argument('--aidl-dirs', nargs='*', default=[])
+ parser.add_argument('--assets-dirs', nargs='*', default=[])
+ parser.add_argument('--bundle-name', nargs='?', default='default-bundle-name')
+ parser.add_argument('--bundles', nargs='*', default=[])
+ parser.add_argument('--do-not-strip', action='store_true')
+ parser.add_argument('--flat-repos', nargs='*', default=[])
+ parser.add_argument('--java-dirs', nargs='*', default=[])
+ parser.add_argument('--jni-libs-dirs', nargs='*', default=[])
+ parser.add_argument('--keystore', default=None)
+ parser.add_argument('--manifest', required=True)
+ parser.add_argument('--maven-repos', nargs='*', default=[])
+ parser.add_argument('--output-dir', required=True)
+ parser.add_argument('--peers', nargs='*', default=[])
+ parser.add_argument('--proguard-rules', nargs='?', default=None)
+ parser.add_argument('--res-dirs', nargs='*', default=[])
+ args = parser.parse_args()
+
+ if args.proguard_rules is None:
+ args.proguard_rules = os.path.join(args.output_dir, 'proguard-rules.txt')
+ with open(args.proguard_rules, 'w') as f:
+ pass
+
+ for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)):
+ jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index)))
+ os.makedirs(jsrc_dir)
+ with tarfile.open(jsrc, 'r') as tar:
+ tar.extractall(path=jsrc_dir)
+ args.java_dirs.append(jsrc_dir)
+
+ args.build_gradle = os.path.join(args.output_dir, 'build.gradle')
+ args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle')
+ args.gradle_properties = os.path.join(args.output_dir, 'gradle.properties')
+
+ content = gen_build_script(args)
+ with open(args.build_gradle, 'w') as f:
+ f.write(content)
+
+ with open(args.gradle_properties, 'w') as f:
+ f.write('android.useAndroidX=true')
+
+ if args.bundle_name:
+ with open(args.settings_gradle, 'w') as f:
+ f.write('rootProject.name = "{}"'.format(args.bundle_name))
diff --git a/build/scripts/gen_java_codenav_entry.py b/build/scripts/gen_java_codenav_entry.py
new file mode 100644
index 0000000000..ff0a5c737d
--- /dev/null
+++ b/build/scripts/gen_java_codenav_entry.py
@@ -0,0 +1,57 @@
+import argparse
+import datetime
+import os
+import subprocess
+import sys
+import tarfile
+
+
+def extract_kindexes(kindexes):
+ for kindex in kindexes:
+ with tarfile.TarFile(kindex) as tf:
+ for fname in [i for i in tf.getnames() if i.endswith('.kzip')]:
+ tf.extract(fname)
+ yield fname
+
+
+def just_do_it(java, kythe, kythe_to_proto, out_name, binding_only, kindexes):
+ temp_out_name = out_name + '.temp'
+ kindex_inputs = list(extract_kindexes(kindexes))
+ open(temp_out_name, 'w').close()
+ start = datetime.datetime.now()
+ for kindex in kindex_inputs:
+ print >> sys.stderr, '[INFO] Processing:', kindex
+ indexer_start = datetime.datetime.now()
+ p = subprocess.Popen([java, '-jar', os.path.join(kythe, 'indexers/java_indexer.jar'), kindex], stdout=subprocess.PIPE)
+ indexer_out, _ = p.communicate()
+ print >> sys.stderr, '[INFO] Indexer execution time:', (datetime.datetime.now() - indexer_start).total_seconds(), 'seconds'
+ if p.returncode:
+ raise Exception('java_indexer failed with exit code {}'.format(p.returncode))
+ dedup_start = datetime.datetime.now()
+ p = subprocess.Popen([os.path.join(kythe, 'tools/dedup_stream')], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ dedup_out, _ = p.communicate(indexer_out)
+ print >> sys.stderr, '[INFO] Dedup execution time:', (datetime.datetime.now() - dedup_start).total_seconds(), 'seconds'
+ if p.returncode:
+ raise Exception('dedup_stream failed with exit code {}'.format(p.returncode))
+ entrystream_start = datetime.datetime.now()
+ p = subprocess.Popen([os.path.join(kythe, 'tools/entrystream'), '--write_json'], stdin=subprocess.PIPE, stdout=open(temp_out_name, 'a'))
+ p.communicate(dedup_out)
+ if p.returncode:
+ raise Exception('entrystream failed with exit code {}'.format(p.returncode))
+ print >> sys.stderr, '[INFO] Entrystream execution time:', (datetime.datetime.now() - entrystream_start).total_seconds(), 'seconds'
+ preprocess_start = datetime.datetime.now()
+ subprocess.check_call([kythe_to_proto, '--preprocess-entry', '--entries', temp_out_name, '--out', out_name] + (['--only-binding-data'] if binding_only else []))
+ print >> sys.stderr, '[INFO] Preprocessing execution time:', (datetime.datetime.now() - preprocess_start).total_seconds(), 'seconds'
+ print >> sys.stderr, '[INFO] Total execution time:', (datetime.datetime.now() - start).total_seconds(), 'seconds'
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--java", help="java path")
+ parser.add_argument("--kythe", help="kythe path")
+ parser.add_argument("--kythe-to-proto", help="kythe_to_proto tool path")
+ parser.add_argument("--out-name", help="entries json out name")
+ parser.add_argument("--binding-only", action="store_true", default=False, help="filter only binding data")
+ parser.add_argument("kindexes", nargs='*')
+ args = parser.parse_args()
+ just_do_it(args.java, args.kythe, args.kythe_to_proto, args.out_name, args.binding_only, args.kindexes)
diff --git a/build/scripts/gen_java_codenav_protobuf.py b/build/scripts/gen_java_codenav_protobuf.py
new file mode 100644
index 0000000000..aee8cfe6c3
--- /dev/null
+++ b/build/scripts/gen_java_codenav_protobuf.py
@@ -0,0 +1,22 @@
+import argparse
+import os
+
+
+def just_do_it(kythe_to_proto, entries, out_name, build_file, source_root):
+ with open(build_file) as f:
+ classpath = os.pathsep.join([line.strip() for line in f])
+ os.execv(
+ kythe_to_proto,
+ [kythe_to_proto, '--sources-rel-root', 'fake_arcadia_root', '--entries', entries, '--out', out_name, '--classpath', classpath, '--arcadia-root', source_root]
+ )
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--kythe-to-proto", help="kythe_to_proto tool path")
+ parser.add_argument("--entries", help="entries json path")
+ parser.add_argument("--out-name", help="protbuf out name")
+ parser.add_argument("--build-file", help="build file( containing classpath )")
+ parser.add_argument("--source-root", help="source root")
+ args = parser.parse_args()
+ just_do_it(args.kythe_to_proto, args.entries, args.out_name, args.build_file, args.source_root)
diff --git a/build/scripts/gen_mx_table.py b/build/scripts/gen_mx_table.py
new file mode 100644
index 0000000000..187c21c539
--- /dev/null
+++ b/build/scripts/gen_mx_table.py
@@ -0,0 +1,75 @@
+import sys
+
+tmpl = """
+#include "yabs_mx_calc_table.h"
+
+#include <kernel/matrixnet/mn_sse.h>
+
+#include <library/cpp/archive/yarchive.h>
+
+#include <util/memory/blob.h>
+#include <util/generic/hash.h>
+#include <util/generic/ptr.h>
+#include <util/generic/singleton.h>
+
+using namespace NMatrixnet;
+
+extern "C" {
+ extern const unsigned char MxFormulas[];
+ extern const ui32 MxFormulasSize;
+}
+
+namespace {
+ struct TFml: public TBlob, public TMnSseInfo {
+ inline TFml(const TBlob& b)
+ : TBlob(b)
+ , TMnSseInfo(Data(), Size())
+ {
+ }
+ };
+
+ struct TFormulas: public THashMap<size_t, TAutoPtr<TFml>> {
+ inline TFormulas() {
+ TBlob b = TBlob::NoCopy(MxFormulas, MxFormulasSize);
+ TArchiveReader ar(b);
+ %s
+ }
+
+ inline const TMnSseInfo& at(size_t n) const noexcept {
+ return *find(n)->second;
+ }
+ };
+
+ %s
+
+ static func_descr_t yabs_funcs[] = {
+ %s
+ };
+}
+
+yabs_mx_calc_table_t yabs_mx_calc_table = {YABS_MX_CALC_VERSION, 10000, 0, yabs_funcs};
+"""
+
+if __name__ == '__main__':
+ init = []
+ body = []
+ defs = {}
+
+ for i in sys.argv[1:]:
+ name = i.replace('.', '_')
+ num = long(name.split('_')[1])
+
+ init.append('(*this)[%s] = new TFml(ar.ObjectBlobByKey("%s"));' % (num, '/' + i))
+
+ f1 = 'static void yabs_%s(size_t count, const float** args, double* res) {Singleton<TFormulas>()->at(%s).DoCalcRelevs(args, res, count);}' % (name, num)
+ f2 = 'static size_t yabs_%s_factor_count() {return Singleton<TFormulas>()->at(%s).MaxFactorIndex() + 1;}' % (name, num)
+
+ body.append(f1)
+ body.append(f2)
+
+ d1 = 'yabs_%s' % name
+ d2 = 'yabs_%s_factor_count' % name
+
+ defs[num] = '{%s, %s}' % (d1, d2)
+
+ print tmpl % ('\n'.join(init), '\n\n'.join(body), ',\n'.join((defs.get(i, '{nullptr, nullptr}') for i in range(0, 10000))))
diff --git a/build/scripts/gen_py3_reg.py b/build/scripts/gen_py3_reg.py
new file mode 100644
index 0000000000..149c094898
--- /dev/null
+++ b/build/scripts/gen_py3_reg.py
@@ -0,0 +1,34 @@
+import sys
+
+template = '''
+struct PyObject;
+extern "C" int PyImport_AppendInittab(const char* name, PyObject* (*initfunc)());
+extern "C" PyObject* {1}();
+
+namespace {
+ struct TRegistrar {
+ inline TRegistrar() {
+ // TODO Collect all modules and call PyImport_ExtendInittab once
+ PyImport_AppendInittab("{0}", {1});
+ }
+ } REG;
+}
+'''
+
+
+def mangle(name):
+ if '.' not in name:
+ return name
+ return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
+ print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
+ sys.exit(1)
+
+ with open(sys.argv[2], 'w') as f:
+ modname = sys.argv[1]
+ initname = 'PyInit_' + mangle(modname)
+ code = template.replace('{0}', modname).replace('{1}', initname)
+ f.write(code)
diff --git a/build/scripts/gen_py_protos.py b/build/scripts/gen_py_protos.py
new file mode 100644
index 0000000000..08397472f9
--- /dev/null
+++ b/build/scripts/gen_py_protos.py
@@ -0,0 +1,67 @@
+import os
+from os import path
+import shutil
+import subprocess
+import sys
+import tempfile
+import argparse
+import re
+
+
+OUT_DIR_ARG = '--python_out='
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--suffixes", nargs="*", default=[])
+ parser.add_argument("protoc_args", nargs=argparse.REMAINDER)
+ script_args = parser.parse_args()
+
+ args = script_args.protoc_args
+
+ if args[0] == "--":
+ args = args[1:]
+
+ out_dir_orig = None
+ out_dir_temp = None
+ plugin_out_dirs_orig = {}
+ for i in range(len(args)):
+ if args[i].startswith(OUT_DIR_ARG):
+ assert not out_dir_orig, 'Duplicate "{0}" param'.format(OUT_DIR_ARG)
+ out_dir_orig = args[i][len(OUT_DIR_ARG):]
+ out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig)
+ args[i] = OUT_DIR_ARG + out_dir_temp
+ continue
+
+ match = re.match(r"^(--(\w+)_out=).*", args[i])
+ if match:
+ plugin_out_dir_arg = match.group(1)
+ plugin = match.group(2)
+ assert plugin not in plugin_out_dirs_orig, 'Duplicate "{0}" param'.format(plugin_out_dir_arg)
+ plugin_out_dirs_orig[plugin] = args[i][len(plugin_out_dir_arg):]
+ assert plugin_out_dirs_orig[plugin] == out_dir_orig, 'Params "{0}" and "{1}" expected to have the same value'.format(OUT_DIR_ARG, plugin_out_dir_arg)
+ args[i] = plugin_out_dir_arg + out_dir_temp
+
+ assert out_dir_temp, 'Param "{0}" not found'.format(OUT_DIR_ARG)
+
+ retcode = subprocess.call(args)
+ assert not retcode, 'Protoc failed for command {}'.format(' '.join(args))
+
+ for root_temp, dirs, files in os.walk(out_dir_temp):
+ sub_dir = path.relpath(root_temp, out_dir_temp)
+ root_orig = path.join(out_dir_orig, sub_dir)
+ for d in dirs:
+ d_orig = path.join(root_orig, d)
+ if not path.exists(d_orig):
+ os.mkdir(d_orig)
+ for f in files:
+ f_orig = f
+ for suf in script_args.suffixes:
+ if f.endswith(suf):
+ f_orig = f[:-len(suf)] + "__int__" + suf
+ break
+ os.rename(path.join(root_temp, f), path.join(root_orig, f_orig))
+ shutil.rmtree(out_dir_temp)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/gen_py_reg.py b/build/scripts/gen_py_reg.py
new file mode 100644
index 0000000000..1560135ae8
--- /dev/null
+++ b/build/scripts/gen_py_reg.py
@@ -0,0 +1,32 @@
+import sys
+
+template = '''
+extern "C" void PyImport_AppendInittab(const char* name, void (*fn)(void));
+extern "C" void {1}();
+
+namespace {
+ struct TRegistrar {
+ inline TRegistrar() {
+ PyImport_AppendInittab("{0}", {1});
+ }
+ } REG;
+}
+'''
+
+
+def mangle(name):
+ if '.' not in name:
+ return name
+ return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
+ print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
+ sys.exit(1)
+
+ with open(sys.argv[2], 'w') as f:
+ modname = sys.argv[1]
+ initname = 'init' + mangle(modname)
+ code = template.replace('{0}', modname).replace('{1}', initname)
+ f.write(code)
diff --git a/build/scripts/gen_swiftc_output_map.py b/build/scripts/gen_swiftc_output_map.py
new file mode 100644
index 0000000000..01ce85f256
--- /dev/null
+++ b/build/scripts/gen_swiftc_output_map.py
@@ -0,0 +1,15 @@
+import json
+import sys
+
+
+def just_do_it(args):
+ source_root, build_root, out_file, srcs = args[0], args[1], args[2], args[3:]
+ assert(len(srcs))
+ result_obj = {}
+ for src in srcs:
+ result_obj[src] = {'object': src.replace(source_root, build_root) + '.o'}
+ with open(out_file, 'w') as of:
+ of.write(json.dumps(result_obj))
+
+if __name__ == '__main__':
+ just_do_it(sys.argv[1:])
diff --git a/build/scripts/gen_tasklet_reg.py b/build/scripts/gen_tasklet_reg.py
new file mode 100644
index 0000000000..0f7f66ad51
--- /dev/null
+++ b/build/scripts/gen_tasklet_reg.py
@@ -0,0 +1,51 @@
+import argparse
+
+TEMPLATE = '''\
+{includes}\
+#include <tasklet/v1/runtime/lib/{language}_wrapper.h>
+#include <tasklet/v1/runtime/lib/registry.h>
+
+static const NTasklet::TRegHelper REG(
+ "{name}",
+ new NTasklet::{wrapper}
+);
+'''
+
+WRAPPER = {
+ 'cpp': 'TCppWrapper<{impl}>()',
+ 'js': 'TJsWrapper("{impl}")',
+ 'go': 'TGoWrapper("{impl}")',
+ 'py': 'TPythonWrapper("{impl}")',
+ 'java': 'TJavaWrapper("{impl}", "{py_wrapper}")',
+}
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('name')
+ parser.add_argument('output')
+ parser.add_argument('-l', '--lang', choices=WRAPPER, required=True)
+ parser.add_argument('-i', '--impl', required=True)
+ parser.add_argument('-w', '--wrapper', required=False)
+ parser.add_argument('includes', nargs='*')
+
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ args = parse_args()
+
+ includes = ''.join(
+ '#include <{}>\n'.format(include)
+ for include in args.includes
+ )
+
+ code = TEMPLATE.format(
+ includes=includes,
+ language=args.lang,
+ name=args.name,
+ wrapper=WRAPPER[args.lang].format(impl=args.impl, py_wrapper=args.wrapper),
+ )
+
+ with open(args.output, 'w') as f:
+ f.write(code)
diff --git a/build/scripts/gen_test_apk_gradle_script.py b/build/scripts/gen_test_apk_gradle_script.py
new file mode 100644
index 0000000000..b3a4f89e46
--- /dev/null
+++ b/build/scripts/gen_test_apk_gradle_script.py
@@ -0,0 +1,193 @@
+import argparse
+import os
+import tarfile
+import xml.etree.ElementTree as etree
+
+FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n'
+MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n'
+KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n'
+
+TEST_APK_TEMPLATE = """\
+ext.jniLibsDirs = [
+ {jni_libs_dirs}
+]
+ext.resDirs = [
+ {res_dirs}
+]
+ext.javaDirs = [
+ {java_dirs}
+]
+ext.bundles = [
+ {bundles}
+]
+
+buildscript {{
+// repositories {{
+// jcenter()
+// }}
+
+ repositories {{
+ {maven_repos}
+ }}
+
+ dependencies {{
+ classpath 'com.android.tools.build:gradle:3.5.3'
+ }}
+}}
+
+apply plugin: 'com.android.application'
+
+repositories {{
+// maven {{
+// url "http://maven.google.com/"
+// }}
+// maven {{
+// url "http://artifactory.yandex.net/artifactory/public/"
+// }}
+// flatDir {{
+// dirs System.env.PKG_ROOT + '/bundle'
+// }}
+
+ {flat_dirs_repo}
+
+ {maven_repos}
+}}
+
+dependencies {{
+ for (bundle in bundles) {{
+ compile("$bundle")
+ }}
+}}
+
+android {{
+ {keystore}
+
+ compileSdkVersion 30
+ buildToolsVersion "30.0.3"
+
+
+ defaultConfig {{
+ minSdkVersion 21
+ targetSdkVersion 30
+ applicationId "{app_id}"
+ }}
+
+ sourceSets {{
+ main {{
+ manifest.srcFile 'Manifest.xml'
+ jniLibs.srcDirs = jniLibsDirs
+ res.srcDirs = resDirs
+ java.srcDirs = javaDirs
+ }}
+ }}
+
+ applicationVariants.all {{ variant ->
+ variant.outputs.each {{ output ->
+ def fileName = "$projectDir/output/{app_id}.apk"
+ output.outputFileName = new File(output.outputFile.parent, fileName).getName()
+ }}
+ }}
+
+ dependencies {{
+ implementation 'com.google.android.gms:play-services-location:21.0.1'
+ implementation 'com.google.android.gms:play-services-gcm:17.0.0'
+ implementation 'com.evernote:android-job:1.2.6'
+ implementation 'androidx.annotation:annotation:1.1.0'
+ implementation 'androidx.core:core:1.1.0'
+ }}
+}}
+"""
+
+
+def create_native_properties(output_dir, library_name):
+ native_properties_file = os.path.join(output_dir, 'native_library_name.xml')
+ resources = etree.Element('resources')
+ name = etree.SubElement(resources, 'item', dict(name='native_library_name', type='string'))
+ name.text = library_name
+ etree.ElementTree(resources).write(native_properties_file, xml_declaration=True, encoding='utf-8')
+
+
+def gen_build_script(args):
+ def wrap(items):
+ return ',\n '.join('"{}"'.format(x) for x in items)
+
+ bundles = []
+ bundles_dirs = set(args.flat_repos)
+ for bundle in args.bundles:
+ dir_name, base_name = os.path.split(bundle)
+ assert(len(dir_name) > 0 and len(base_name) > 0)
+ name, ext = os.path.splitext(base_name)
+ assert(len(name) > 0 and ext == '.aar')
+ bundles_dirs.add(dir_name)
+ bundles.append('com.yandex:{}@aar'.format(name))
+
+ if len(bundles_dirs) > 0:
+ flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs))
+ else:
+ flat_dirs_repo = ''
+
+ maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos)
+
+ if args.keystore:
+ keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore)
+ else:
+ keystore = ''
+
+ return TEST_APK_TEMPLATE.format(
+ app_id=args.app_id,
+ jni_libs_dirs=wrap(args.jni_libs_dirs),
+ res_dirs=wrap(args.res_dirs),
+ java_dirs=wrap(args.java_dirs),
+ maven_repos=maven_repos,
+ bundles=wrap(bundles),
+ flat_dirs_repo=flat_dirs_repo,
+ keystore=keystore,
+ )
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--aars', nargs='*', default=[])
+ parser.add_argument('--app-id', required=True)
+ parser.add_argument('--assets-dirs', nargs='*', default=[])
+ parser.add_argument('--bundles', nargs='*', default=[])
+ parser.add_argument('--bundle-name', nargs='?', default=None)
+ parser.add_argument('--java-dirs', nargs='*', default=[])
+ parser.add_argument('--jni-libs-dirs', nargs='*', default=[])
+ parser.add_argument('--library-name', required=True)
+ parser.add_argument('--manifest', required=True)
+ parser.add_argument('--flat-repos', nargs='*', default=[])
+ parser.add_argument('--maven-repos', nargs='*', default=[])
+ parser.add_argument('--output-dir', required=True)
+ parser.add_argument('--peers', nargs='*', default=[])
+ parser.add_argument('--keystore', default=None)
+ parser.add_argument('--res-dirs', nargs='*', default=[])
+ args = parser.parse_args()
+
+ for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)):
+ jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index)))
+ os.makedirs(jsrc_dir)
+ with tarfile.open(jsrc, 'r') as tar:
+ tar.extractall(path=jsrc_dir)
+ args.java_dirs.append(jsrc_dir)
+
+ args.build_gradle = os.path.join(args.output_dir, 'build.gradle')
+ args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle')
+ args.gradle_properties = os.path.join(args.output_dir, 'gradle.properties')
+
+ content = gen_build_script(args)
+ with open(args.build_gradle, 'w') as f:
+ f.write(content)
+
+ with open(args.gradle_properties, 'w') as f:
+ f.write('''android.enableJetifier=true
+ android.useAndroidX=true
+ org.gradle.jvmargs=-Xmx8192m -XX:MaxPermSize=512m''')
+
+ if args.bundle_name:
+ with open(args.settings_gradle, 'w') as f:
+ f.write('rootProject.name = "{}"'.format(args.bundle_name))
+
+ values_dir = os.path.join(args.output_dir, 'res', 'values')
+ os.makedirs(values_dir)
+ create_native_properties(values_dir, args.library_name)
diff --git a/build/scripts/gen_yql_python_udf.py b/build/scripts/gen_yql_python_udf.py
new file mode 100644
index 0000000000..13b5898117
--- /dev/null
+++ b/build/scripts/gen_yql_python_udf.py
@@ -0,0 +1,55 @@
+import sys
+
+TEMPLATE="""
+#include <yql/udfs/common/python/python_udf/python_udf.h>
+
+#include <ydb/library/yql/public/udf/udf_registrator.h>
+
+#if @WITH_LIBRA@
+#include <yql/udfs/quality/libra/module/module.h>
+#endif
+
+using namespace NKikimr::NUdf;
+
+#ifdef BUILD_UDF
+
+#if @WITH_LIBRA@
+LIBRA_MODULE(TLibraModule, "Libra@MODULE_NAME@");
+#endif
+
+extern "C" UDF_API void Register(IRegistrator& registrator, ui32 flags) {
+ RegisterYqlPythonUdf(registrator, flags, TStringBuf("@MODULE_NAME@"), TStringBuf("@PACKAGE_NAME@"), EPythonFlavor::@FLAVOR@);
+#if @WITH_LIBRA@
+ RegisterHelper<TLibraModule>(registrator);
+#endif
+}
+
+extern "C" UDF_API ui32 AbiVersion() {
+ return CurrentAbiVersion();
+}
+
+extern "C" UDF_API void SetBackTraceCallback(TBackTraceCallback callback) {
+ SetBackTraceCallbackImpl(callback);
+}
+
+#endif
+"""
+
+
+def main():
+ assert len(sys.argv) == 6
+ flavor, module_name, package_name, path, libra_flag = sys.argv[1:]
+ with open(path, 'w') as f:
+ f.write(
+ TEMPLATE
+ .strip()
+ .replace('@MODULE_NAME@', module_name)
+ .replace('@PACKAGE_NAME@', package_name)
+ .replace('@FLAVOR@', flavor)
+ .replace('@WITH_LIBRA@', libra_flag)
+ )
+ f.write('\n')
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build/scripts/generate_mf.py b/build/scripts/generate_mf.py
new file mode 100644
index 0000000000..a44a969980
--- /dev/null
+++ b/build/scripts/generate_mf.py
@@ -0,0 +1,113 @@
+import json
+import logging
+import optparse
+import os
+import sys
+import io
+
+import process_command_files as pcf
+
+class BadMfError(Exception):
+ pass
+
+
+class GplNotAllowed(Exception):
+ pass
+
+
+def process_quotes(s):
+ for quote_char in '\'"':
+ if s.startswith(quote_char) and s.endswith(quote_char):
+ return s[1:-1]
+ return s
+
+
+def parse_args():
+ args = pcf.get_args(sys.argv[1:])
+ lics, peers, free_args, credits = [], [], [], []
+ current_list = free_args
+ for a in args:
+ if a == '-Ya,lics':
+ current_list = lics
+ elif a == '-Ya,peers':
+ current_list = peers
+ elif a == '-Ya,credits':
+ current_list = credits
+ elif a and a.startswith('-'):
+ current_list = free_args
+ current_list.append(a)
+ else:
+ current_list.append(a)
+
+ parser = optparse.OptionParser()
+ parser.add_option('--build-root')
+ parser.add_option('--module-name')
+ parser.add_option('-o', '--output')
+ parser.add_option('-c', '--credits-output')
+ parser.add_option('-t', '--type')
+ opts, _ = parser.parse_args(free_args)
+ return lics, peers, credits, opts,
+
+
+def generate_header(meta):
+ return '-' * 20 + meta.get('path', 'Unknown module') + '-' * 20
+
+
+def generate_mf():
+ lics, peers, credits, options = parse_args()
+
+ meta = {
+ 'module_name': options.module_name,
+ 'path': os.path.dirname(options.output),
+ 'licenses': lics,
+ 'dependencies': [],
+ 'license_texts': ''
+ }
+
+ build_root = options.build_root
+ file_name = os.path.join(build_root, options.output)
+
+ if options.type != 'LIBRARY':
+ for rel_filename in peers:
+ with open(os.path.join(build_root, rel_filename + '.mf')) as peer_file:
+ peer_meta = json.load(peer_file)
+ meta['dependencies'].append(peer_meta)
+
+ if credits:
+ union_texts = []
+ for texts_file in credits:
+ with open(process_quotes(texts_file)) as f:
+ union_texts.append(f.read())
+ meta['license_texts'] = '\n\n'.join(union_texts)
+
+ if options.credits_output:
+ final_credits = []
+ if meta['license_texts']:
+ final_credits.append(generate_header(meta) + '\n' + meta['license_texts'])
+ for peer in peers:
+ candidate = os.path.join(build_root, peer + '.mf')
+ with open(candidate) as src:
+ data = json.loads(src.read())
+ texts = data.get('license_texts')
+ if texts:
+ candidate_text = generate_header(data) + '\n' + texts
+ if isinstance(candidate_text, unicode):
+ candidate_text = candidate_text.encode('utf-8')
+ final_credits.append(candidate_text)
+
+ with io.open(options.credits_output, 'w', encoding='utf-8') as f:
+ data = '\n\n'.join(final_credits)
+ if isinstance(data, str):
+ data = data.decode('utf-8')
+ f.write(data)
+
+ with open(file_name, 'w') as mf_file:
+ json.dump(meta, mf_file, indent=4)
+
+
+if __name__ == '__main__':
+ try:
+ generate_mf()
+ except Exception as e:
+ sys.stderr.write(str(e) + '\n')
+ sys.exit(1)
diff --git a/build/scripts/generate_pom.py b/build/scripts/generate_pom.py
new file mode 100644
index 0000000000..d91bce6249
--- /dev/null
+++ b/build/scripts/generate_pom.py
@@ -0,0 +1,336 @@
+from __future__ import print_function
+
+import sys
+import xml.etree.ElementTree as et
+import argparse
+import os
+import json
+import base64
+import re
+
+
+DEFAULT_YANDEX_GROUP_ID = 'ru.yandex'
+DEFAULT_NAMESPACE = 'http://maven.apache.org/POM/4.0.0'
+XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
+SCHEMA_LOCATION = 'http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd'
+MODEL_VERSION = '4.0.0'
+
+MAVEN_PLUGIN_GROUP_ID = 'org.apache.maven.plugins'
+MAVEN_PLUGIN_ARTIFACT_ID = 'maven-compiler-plugin'
+MAVEN_PLUGIN_VERSION = '3.3'
+JAVA_LANGUAGE_LEVEL = '1.8'
+
+MAVEN_BUILD_HELPER_GROUP_ID = 'org.codehaus.mojo'
+MAVEN_BUILD_HELPER_ARTIFACT_ID = 'build-helper-maven-plugin'
+MAVEN_BUILD_HELPER_VERSION = '1.9.1'
+
+MAVEN_EXEC_GROUP_ID = 'org.codehaus.mojo'
+MAVEN_EXEC_ARTIFACT_ID = 'exec-maven-plugin'
+MAVEN_EXEC_VERSION = '1.5.0'
+
+MAVEN_SUREFIRE_GROUP_ID = 'org.apache.maven.plugins'
+MAVEN_SUREFIRE_ARTIFACT_ID = 'maven-surefire-plugin'
+MAVEN_SUREFIRE_VERSION = '2.12.2'
+
+
+def target_from_contrib(target_path):
+ return target_path.startswith('contrib')
+
+
+def split_artifacts(s):
+ m = re.match('^([^:]*:[^:]*:[^:]*:[^:]*)(.*)$', s)
+ if not m or not m.groups():
+ return []
+ if not m.groups()[1].startswith('::'):
+ return [m.groups()[0]]
+ return [m.groups()[0]] + m.groups()[1].split('::')[1:]
+
+
+def parse_coord_file(target_coords):
+ deps = set()
+ excludes = set()
+ target = None
+
+ with open(target_coords, 'r') as f:
+ for ln in f:
+ ln = ln.strip('\n')
+ if ln.startswith('D='):
+ if target is None:
+ target = ln[2:]
+ else:
+ group, artifact = ln[2:].split(':')[0:2]
+ deps.add('{}:{}'.format(group, artifact))
+ elif ln.startswith('E='):
+ group, artifact = ln[2:].split(':')[0:2]
+ excludes.add('{}:{}'.format(group, artifact))
+ return target, deps, excludes
+
+
+def applied_excludes(deps, excludes):
+ return sorted(list(deps & excludes))
+
+
+def build_pom_and_export_to_maven(**kwargs):
+ target_path = kwargs.get('target_path')
+ target = kwargs.get('target')
+ pom_path = kwargs.get('pom_path')
+ source_dirs = kwargs.get('source_dirs')
+ output_dir = kwargs.get('output_dir')
+ final_name = kwargs.get('final_name')
+ packaging = kwargs.get('packaging')
+ test_target_dependencies = kwargs.get('test_target_dependencies')
+ test_target_dependencies_exclude = kwargs.get('test_target_dependencies_exclude')
+ modules_path = kwargs.get('modules_path')
+ base64_prop_vars = kwargs.get('properties')
+ prop_vars = kwargs.get('property')
+ external_jars = kwargs.get('external_jars')
+ resources = kwargs.get('resources')
+ run_java_programs = [json.loads(base64.b64decode(i)) for i in kwargs.get('run_java_programs')]
+ test_source_dirs = kwargs.get('test_source_dirs')
+ test_resource_dirs = kwargs.get('test_resource_dirs')
+
+ from_coord_files = kwargs.get('from_coord_files')
+ deps_coords = kwargs.get('deps_coords')
+ target_coords = kwargs.get('target_coords')
+ if from_coord_files:
+ target, _, all_excludes = parse_coord_file(target_coords)
+ # TODO: ymake java -> jbuild java values format conversion must be removed
+ target = ':'.join(target.split(':')[:3])
+ target_dependencies = []
+ for dep in deps_coords:
+ dep_coord, dep_deps, _ = parse_coord_file(dep)
+ excluded = applied_excludes(dep_deps, all_excludes)
+ if len(excluded) > 0:
+ target_dependencies.append('{}::{}'.format(dep_coord, '::'.join(excluded)))
+ else:
+ target_dependencies.append(dep_coord)
+ else:
+ target_dependencies = kwargs.get('target_dependencies')
+
+ if kwargs.get('vcs_info') is not None:
+ with open(kwargs.get('vcs_info'), 'r') as vcs_json:
+ vcs_revision = json.load(vcs_json).get('ARCADIA_SOURCE_LAST_CHANGE')
+ target = target.format(vcs_revision=vcs_revision)
+ target_dependencies = [dep.format(vcs_revision=vcs_revision) for dep in target_dependencies]
+
+ modules = []
+
+ def _indent(elem, level=0):
+ ind = "\n" + level * " "
+ if len(elem):
+ if not elem.text or not elem.text.strip():
+ elem.text = ind + " "
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = ind
+ for elem in elem:
+ _indent(elem, level + 1)
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = ind
+ else:
+ if level and (not elem.tail or not elem.tail.strip()):
+ elem.tail = ind
+
+ project = et.Element(
+ '{}{}{}project'.format('{', DEFAULT_NAMESPACE, '}'),
+ attrib={'{}{}{}schemaLocation'.format('{', XSI_NAMESPACE, '}'): SCHEMA_LOCATION}
+ )
+
+ group_id, artifact_id, version = target.split(':')
+
+ et.SubElement(project, 'modelVersion').text = MODEL_VERSION
+ et.SubElement(project, 'groupId').text = group_id
+ et.SubElement(project, 'artifactId').text = artifact_id
+ et.SubElement(project, 'version').text = version
+ et.SubElement(project, 'packaging').text = packaging
+
+ properties = et.SubElement(project, 'properties')
+ et.SubElement(properties, 'project.build.sourceEncoding').text = 'UTF-8'
+
+ if base64_prop_vars:
+ for property, value in json.loads(base64.b64decode(base64_prop_vars)).items():
+ et.SubElement(properties, property).text = value
+ for rawprop in prop_vars:
+ property, sep, value = rawprop.partition('=')
+ if sep != '=':
+ print("Can't find propertyr name and property value in {}. No '=' symbol found".format(rawprop))
+ sys.exit(1)
+ et.SubElement(properties, property).text = value
+
+ if modules_path:
+ with open(modules_path) as f:
+ modules = [i.strip() for i in f if i.strip()]
+
+ if modules:
+ modules_el = et.SubElement(project, 'modules')
+ for module in modules:
+ et.SubElement(modules_el, 'module').text = module
+
+ build = et.SubElement(project, 'build')
+ if source_dirs:
+ et.SubElement(build, 'sourceDirectory').text = source_dirs[0]
+ source_dirs = source_dirs[1:]
+ if test_source_dirs:
+ et.SubElement(build, 'testSourceDirectory').text = test_source_dirs[0]
+ test_source_dirs = test_source_dirs[1:]
+ if output_dir:
+ et.SubElement(build, 'outputDirectory').text = output_dir
+ if final_name:
+ et.SubElement(build, 'finalName').text = final_name
+ if resources:
+ resource_element = et.SubElement(et.SubElement(build, 'resources'), 'resource')
+ et.SubElement(resource_element, 'directory').text = '${basedir}'
+ includes = et.SubElement(resource_element, 'includes')
+ for resource in resources:
+ et.SubElement(includes, 'include').text = resource
+ if test_resource_dirs:
+ test_resource_element = et.SubElement(build, 'testResources')
+ for test_resource_dir in test_resource_dirs:
+ et.SubElement(et.SubElement(test_resource_element, 'testResource'), 'directory').text = '${basedir}' + (('/' + test_resource_dir) if test_resource_dir != '.' else '')
+
+ plugins = et.SubElement(build, 'plugins')
+
+ if packaging != 'pom':
+ maven_plugin = et.SubElement(plugins, 'plugin')
+ et.SubElement(maven_plugin, 'groupId').text = MAVEN_PLUGIN_GROUP_ID
+ et.SubElement(maven_plugin, 'artifactId').text = MAVEN_PLUGIN_ARTIFACT_ID
+ et.SubElement(maven_plugin, 'version').text = MAVEN_PLUGIN_VERSION
+ configuration = et.SubElement(maven_plugin, 'configuration')
+ et.SubElement(configuration, 'source').text = JAVA_LANGUAGE_LEVEL
+ et.SubElement(configuration, 'target').text = JAVA_LANGUAGE_LEVEL
+
+ if source_dirs or external_jars or test_source_dirs:
+ build_helper_plugin = et.SubElement(plugins, 'plugin')
+ et.SubElement(build_helper_plugin, 'groupId').text = MAVEN_BUILD_HELPER_GROUP_ID
+ et.SubElement(build_helper_plugin, 'artifactId').text = MAVEN_BUILD_HELPER_ARTIFACT_ID
+ et.SubElement(build_helper_plugin, 'version').text = MAVEN_BUILD_HELPER_VERSION
+ executions = et.SubElement(build_helper_plugin, 'executions')
+ if source_dirs:
+ execution = et.SubElement(executions, 'execution')
+ et.SubElement(execution, 'id').text = 'add-source'
+ et.SubElement(execution, 'phase').text = 'generate-sources'
+ et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'add-source'
+ sources = et.SubElement(et.SubElement(execution, 'configuration'), 'sources')
+ for source_dir in source_dirs:
+ et.SubElement(sources, 'source').text = source_dir
+ if external_jars:
+ execution = et.SubElement(executions, 'execution')
+ et.SubElement(execution, 'id').text = 'attach-artifacts'
+ et.SubElement(execution, 'phase').text = 'generate-sources'
+ et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'attach-artifact'
+ artifacts = et.SubElement(et.SubElement(execution, 'configuration'), 'artifacts')
+ for external_jar in external_jars:
+ external_artifact = et.SubElement(artifacts, 'artifact')
+ et.SubElement(external_artifact, 'file').text = '${basedir}/' + external_jar
+ et.SubElement(external_artifact, 'type').text = 'jar'
+ if test_source_dirs:
+ execution = et.SubElement(executions, 'execution')
+ et.SubElement(execution, 'id').text = 'add-test-source'
+ et.SubElement(execution, 'phase').text = 'generate-test-sources'
+ et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'add-test-source'
+ sources = et.SubElement(et.SubElement(execution, 'configuration'), 'sources')
+ for source_dir in source_dirs:
+ et.SubElement(sources, 'source').text = source_dir
+
+ if run_java_programs:
+ exec_plugin = et.SubElement(plugins, 'plugin')
+ et.SubElement(exec_plugin, 'groupId').text = MAVEN_EXEC_GROUP_ID
+ et.SubElement(exec_plugin, 'artifactId').text = MAVEN_EXEC_ARTIFACT_ID
+ et.SubElement(exec_plugin, 'version').text = MAVEN_EXEC_VERSION
+ jp_dependencies = et.SubElement(exec_plugin, 'dependencies')
+ executions = et.SubElement(exec_plugin, 'executions')
+ for java_program in run_java_programs:
+ execution = et.SubElement(executions, 'execution')
+ et.SubElement(execution, 'phase').text = 'generate-sources'
+ et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'java'
+ jp_configuration = et.SubElement(execution, 'configuration')
+ main_cls, args = None, []
+ for word in java_program['cmd']:
+ if not main_cls and not word.startswith('-'):
+ main_cls = word
+ else:
+ args.append(word)
+ et.SubElement(jp_configuration, 'mainClass').text = main_cls
+ et.SubElement(jp_configuration, 'includePluginDependencies').text = 'true'
+ et.SubElement(jp_configuration, 'includeProjectDependencies').text = 'false'
+ if args:
+ jp_arguments = et.SubElement(jp_configuration, 'arguments')
+ for arg in args:
+ et.SubElement(jp_arguments, 'argument').text = arg
+ if java_program['deps']:
+ for jp_dep in java_program['deps']:
+ jp_dependency = et.SubElement(jp_dependencies, 'dependency')
+ jp_g, jp_a, jp_v = jp_dep.split(':')
+ et.SubElement(jp_dependency, 'groupId').text = jp_g
+ et.SubElement(jp_dependency, 'artifactId').text = jp_a
+ et.SubElement(jp_dependency, 'version').text = jp_v
+ et.SubElement(jp_dependency, 'type').text = 'jar'
+
+ if target_dependencies + test_target_dependencies:
+ dependencies = et.SubElement(project, 'dependencies')
+ for target_dependency in target_dependencies + test_target_dependencies:
+ dependency = et.SubElement(dependencies, 'dependency')
+ dependency_info = split_artifacts(target_dependency)
+
+ group_id, artifact_id, version, classifier = dependency_info[0].split(':')
+
+ et.SubElement(dependency, 'groupId').text = group_id
+ et.SubElement(dependency, 'artifactId').text = artifact_id
+ et.SubElement(dependency, 'version').text = version
+ if classifier:
+ et.SubElement(dependency, 'classifier').text = classifier
+ if target_dependency in test_target_dependencies:
+ et.SubElement(dependency, 'scope').text = 'test'
+
+ if len(dependency_info) > 1:
+ exclusions = et.SubElement(dependency, 'exclusions')
+ for exclude in dependency_info[1:]:
+ group_id, artifact_id = exclude.split(':')
+ exclusion_el = et.SubElement(exclusions, 'exclusion')
+ et.SubElement(exclusion_el, 'groupId').text = group_id
+ et.SubElement(exclusion_el, 'artifactId').text = artifact_id
+
+ if test_target_dependencies_exclude:
+ surefire_plugin = et.SubElement(plugins, 'plugin')
+ et.SubElement(surefire_plugin, 'groupId').text = MAVEN_SUREFIRE_GROUP_ID
+ et.SubElement(surefire_plugin, 'artifactId').text = MAVEN_SUREFIRE_ARTIFACT_ID
+ et.SubElement(surefire_plugin, 'version').text = MAVEN_SUREFIRE_VERSION
+ classpath_excludes = et.SubElement(et.SubElement(surefire_plugin, 'configuration'), 'classpathDependencyExcludes')
+ for classpath_exclude in test_target_dependencies_exclude:
+ et.SubElement(classpath_excludes, 'classpathDependencyExclude').text = classpath_exclude
+
+ et.register_namespace('', DEFAULT_NAMESPACE)
+ et.register_namespace('xsi', XSI_NAMESPACE)
+
+ _indent(project)
+
+ et.ElementTree(project).write(pom_path)
+ sys.stderr.write("[MAVEN EXPORT] Generated {} file for target {}\n".format(os.path.basename(pom_path), target_path))
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--from-coord-files', action='store_true')
+ parser.add_argument('--deps-coords', action='append', default=[])
+ parser.add_argument('--target-coords', action='store')
+ parser.add_argument('--target-path', action='store', default='')
+ parser.add_argument('--target', action='store')
+ parser.add_argument('--pom-path', action='store')
+ parser.add_argument('--source-dirs', action='append', default=[])
+ parser.add_argument('--external-jars', action='append', default=[])
+ parser.add_argument('--resources', action='append', default=[])
+ parser.add_argument('--run-java-programs', action='append', default=[])
+ parser.add_argument('--output-dir')
+ parser.add_argument('--final-name')
+ parser.add_argument('--packaging', default='jar')
+ parser.add_argument('--target-dependencies', action='append', default=[])
+ parser.add_argument('--test-target-dependencies', action='append', default=[])
+ parser.add_argument('--test-target-dependencies-exclude', action='append', default=[])
+ parser.add_argument('--modules-path', action='store')
+ parser.add_argument('--properties')
+ parser.add_argument('--property', action='append', default=[])
+ parser.add_argument('--test-source-dirs', action='append', default=[])
+ parser.add_argument('--test-resource-dirs', action='append', default=[])
+ parser.add_argument('--vcs-info', action='store', default=None)
+ args = parser.parse_args()
+
+ build_pom_and_export_to_maven(**vars(args))
diff --git a/build/scripts/go_fake_include/go_asm.h b/build/scripts/go_fake_include/go_asm.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/build/scripts/go_fake_include/go_asm.h
diff --git a/build/scripts/go_proto_wrapper.py b/build/scripts/go_proto_wrapper.py
new file mode 100644
index 0000000000..159bd42c93
--- /dev/null
+++ b/build/scripts/go_proto_wrapper.py
@@ -0,0 +1,81 @@
+from __future__ import absolute_import, unicode_literals
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+OUT_DIR_FLAG_PATTERN = re.compile(r'^(--go(([-_]\w+))*_out=)')
+
+
+def move_tree(src_root, dst_root):
+ for root, _, files in os.walk(src_root):
+ rel_dir = os.path.relpath(root, src_root)
+ dst_dir = os.path.join(dst_root, rel_dir)
+ if not os.path.exists(dst_dir):
+ os.mkdir(dst_dir)
+ for file in files:
+ os.rename(os.path.join(root, file), os.path.join(dst_dir, file))
+
+
+def main(arcadia_prefix, contrib_prefix, proto_namespace, args):
+ out_dir_orig = None
+ out_dir_temp = None
+ for i in range(len(args)):
+ m = re.match(OUT_DIR_FLAG_PATTERN, args[i])
+ if m:
+ out_dir_flag = m.group(1)
+ index = max(len(out_dir_flag), args[i].rfind(':')+1)
+ out_dir = args[i][index:]
+ if out_dir_orig:
+ assert out_dir_orig == out_dir, 'Output directories do not match: [{}] and [{}]'.format(out_dir_orig, out_dir)
+ else:
+ out_dir_orig = out_dir
+ out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig)
+ args[i] = (args[i][:index] + out_dir_temp).replace('|', ',')
+ assert out_dir_temp is not None, 'Output directory is not specified'
+
+ try:
+ subprocess.check_output(args, stdin=None, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output.decode('utf-8')))
+ return e.returncode
+
+ # All Arcadia GO projects should have 'a.yandex-team.ru/' namespace prefix.
+ # If the namespace doesn't start with 'a.yandex-team.ru/' prefix then this
+ # project is from vendor directory under the root of Arcadia.
+ out_dir_src = os.path.normpath(os.path.join(out_dir_temp, arcadia_prefix, proto_namespace))
+ out_dir_dst = out_dir_orig
+ is_from_contrib = False
+ if not os.path.isdir(out_dir_src):
+ is_from_contrib = True
+ out_dir_src = out_dir_temp
+ out_dir_dst = os.path.join(out_dir_orig, contrib_prefix)
+
+ if not os.path.exists(out_dir_src) or is_from_contrib:
+ protos = [x for x in args if x.endswith('.proto')]
+ if not is_from_contrib or not all(x.startswith(contrib_prefix) for x in protos):
+ proto_list = []
+ option_re = re.compile(r'^\s*option\s+go_package\s*=\s*')
+ for arg in protos:
+ with open(arg, 'r') as f:
+ if not any([re.match(option_re, line) for line in f]):
+ proto_list.append(arg)
+ if proto_list:
+ sys.stderr.write(
+ '\nError: Option go_package is not specified in the following proto files: {}\n'
+ '\nNOTE! You can find detailed description of how to properly set go_package '
+ 'option here https://wiki.yandex-team.ru/devrules/Go/#protobufigrpc'.format(', '.join(proto_list)))
+ return 1
+
+ move_tree(out_dir_src, out_dir_dst)
+
+ shutil.rmtree(out_dir_temp)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(os.path.normpath(sys.argv[1]), os.path.normpath(sys.argv[2]), os.path.normpath(sys.argv[3]), sys.argv[4:]))
diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py
new file mode 100644
index 0000000000..e57a12cc0f
--- /dev/null
+++ b/build/scripts/go_tool.py
@@ -0,0 +1,891 @@
+import argparse
+import copy
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import threading
+import traceback
+from contextlib import contextmanager
+from functools import reduce
+
+import process_command_files as pcf
+import process_whole_archive_option as pwa
+
+arc_project_prefix = 'a.yandex-team.ru/'
+# FIXME: make version-independent
+std_lib_prefix = 'contrib/go/_std_1.19/src/'
+vendor_prefix = 'vendor/'
+vet_info_ext = '.vet.out'
+vet_report_ext = '.vet.txt'
+
+FIXED_CGO1_SUFFIX='.fixed.cgo1.go'
+
+COMPILE_OPTIMIZATION_FLAGS=('-N',)
+
+
+def get_trimpath_args(args):
+ return ['-trimpath', args.trimpath] if args.trimpath else []
+
+
+def preprocess_cgo1(src_path, dst_path, source_root):
+ with open(src_path, 'r') as f:
+ content = f.read()
+ content = content.replace('__ARCADIA_SOURCE_ROOT_PREFIX__', source_root)
+ with open(dst_path, 'w') as f:
+ f.write(content)
+
+
+def preprocess_args(args):
+ # Temporary work around for noauto
+ if args.cgo_srcs and len(args.cgo_srcs) > 0:
+ cgo_srcs_set = set(args.cgo_srcs)
+ args.srcs = [x for x in args.srcs if x not in cgo_srcs_set]
+
+ args.pkg_root = os.path.join(args.toolchain_root, 'pkg')
+ toolchain_tool_root = os.path.join(args.pkg_root, 'tool', '{}_{}'.format(args.host_os, args.host_arch))
+ args.go_compile = os.path.join(toolchain_tool_root, 'compile')
+ args.go_cgo = os.path.join(toolchain_tool_root, 'cgo')
+ args.go_link = os.path.join(toolchain_tool_root, 'link')
+ args.go_asm = os.path.join(toolchain_tool_root, 'asm')
+ args.go_pack = os.path.join(toolchain_tool_root, 'pack')
+ args.go_vet = os.path.join(toolchain_tool_root, 'vet') if args.vet is True else args.vet
+ args.output = os.path.normpath(args.output)
+ args.vet_report_output = vet_report_output_name(args.output, args.vet_report_ext)
+ args.trimpath = None
+ if args.debug_root_map:
+ roots = {'build': args.build_root, 'source': args.source_root, 'tools': args.tools_root}
+ replaces = []
+ for root in args.debug_root_map.split(';'):
+ src, dst = root.split('=', 1)
+ assert src in roots
+ replaces.append('{}=>{}'.format(roots[src], dst))
+ del roots[src]
+ assert len(replaces) > 0
+ args.trimpath = ';'.join(replaces)
+ args.build_root = os.path.normpath(args.build_root)
+ args.build_root_dir = args.build_root + os.path.sep
+ args.source_root = os.path.normpath(args.source_root)
+ args.source_root_dir = args.source_root + os.path.sep
+ args.output_root = os.path.normpath(args.output_root)
+ args.import_map = {}
+ args.module_map = {}
+ if args.cgo_peers:
+ args.cgo_peers = [x for x in args.cgo_peers if not x.endswith('.fake.pkg')]
+
+ srcs = []
+ for f in args.srcs:
+ if f.endswith('.gosrc'):
+ with tarfile.open(f, 'r') as tar:
+ srcs.extend(os.path.join(args.output_root, src) for src in tar.getnames())
+ tar.extractall(path=args.output_root)
+ else:
+ srcs.append(f)
+ args.srcs = srcs
+
+ assert args.mode == 'test' or args.test_srcs is None and args.xtest_srcs is None
+ # add lexical oreder by basename for go sources
+ args.srcs.sort(key=lambda x: os.path.basename(x))
+ if args.test_srcs:
+ args.srcs += sorted(args.test_srcs, key=lambda x: os.path.basename(x))
+ del args.test_srcs
+ if args.xtest_srcs:
+ args.xtest_srcs.sort(key=lambda x: os.path.basename(x))
+
+ # compute root relative module dir path
+ assert args.output is None or args.output_root == os.path.dirname(args.output)
+ assert args.output_root.startswith(args.build_root_dir)
+ args.module_path = args.output_root[len(args.build_root_dir):]
+ args.source_module_dir = os.path.join(args.source_root, args.test_import_path or args.module_path) + os.path.sep
+ assert len(args.module_path) > 0
+ args.import_path, args.is_std = get_import_path(args.module_path)
+
+ assert args.asmhdr is None or args.word == 'go'
+
+ srcs = []
+ for f in args.srcs:
+ if f.endswith(FIXED_CGO1_SUFFIX) and f.startswith(args.build_root_dir):
+ path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[:-len(FIXED_CGO1_SUFFIX)])))
+ srcs.append(path)
+ preprocess_cgo1(f, path, args.source_root)
+ else:
+ srcs.append(f)
+ args.srcs = srcs
+
+ if args.extldflags:
+ args.extldflags = pwa.ProcessWholeArchiveOption(args.targ_os).construct_cmd(args.extldflags)
+
+ classify_srcs(args.srcs, args)
+
+
+def compare_versions(version1, version2):
+ def last_index(version):
+ index = version.find('beta')
+ return len(version) if index < 0 else index
+
+ v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.'))
+ v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.'))
+ if v1 == v2:
+ return 0
+ return 1 if v1 < v2 else -1
+
+
+def get_symlink_or_copyfile():
+ os_symlink = getattr(os, 'symlink', None)
+ if os_symlink is None or os.name == 'nt':
+ os_symlink = shutil.copyfile
+ return os_symlink
+
+
+def copy_args(args):
+ return copy.copy(args)
+
+
+def get_vendor_index(import_path):
+ index = import_path.rfind('/' + vendor_prefix)
+ if index < 0:
+ index = 0 if import_path.startswith(vendor_prefix) else index
+ else:
+ index = index + 1
+ return index
+
+
+def get_import_path(module_path):
+ assert len(module_path) > 0
+ import_path = module_path.replace('\\', '/')
+ is_std_module = import_path.startswith(std_lib_prefix)
+ if is_std_module:
+ import_path = import_path[len(std_lib_prefix):]
+ elif import_path.startswith(vendor_prefix):
+ import_path = import_path[len(vendor_prefix):]
+ else:
+ import_path = arc_project_prefix + import_path
+ assert len(import_path) > 0
+ return import_path, is_std_module
+
+
+def call(cmd, cwd, env=None):
+ # sys.stderr.write('{}\n'.format(' '.join(cmd)))
+ return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env, text=True)
+
+
+def classify_srcs(srcs, args):
+ args.go_srcs = [x for x in srcs if x.endswith('.go')]
+ args.asm_srcs = [x for x in srcs if x.endswith('.s')]
+ args.objects = [x for x in srcs if x.endswith('.o') or x.endswith('.obj')]
+ args.symabis = [x for x in srcs if x.endswith('.symabis')]
+ args.sysos = [x for x in srcs if x.endswith('.syso')]
+
+
+def get_import_config_info(peers, gen_importmap, import_map={}, module_map={}):
+ info = {'importmap': [], 'packagefile': [], 'standard': {}}
+ if gen_importmap:
+ for key, value in import_map.items():
+ info['importmap'].append((key, value))
+ for peer in peers:
+ peer_import_path, is_std = get_import_path(os.path.dirname(peer))
+ if gen_importmap:
+ index = get_vendor_index(peer_import_path)
+ if index >= 0:
+ index += len(vendor_prefix)
+ info['importmap'].append((peer_import_path[index:], peer_import_path))
+ info['packagefile'].append((peer_import_path, os.path.join(args.build_root, peer)))
+ if is_std:
+ info['standard'][peer_import_path] = True
+ for key, value in module_map.items():
+ info['packagefile'].append((key, value))
+ return info
+
+
+def create_import_config(peers, gen_importmap, import_map={}, module_map={}):
+ lines = []
+ info = get_import_config_info(peers, gen_importmap, import_map, module_map)
+ for key in ('importmap', 'packagefile'):
+ for item in info[key]:
+ lines.append('{} {}={}'.format(key, *item))
+ if len(lines) > 0:
+ lines.append('')
+ content = '\n'.join(lines)
+ # sys.stderr.writelines('{}\n'.format(l) for l in lines)
+ with tempfile.NamedTemporaryFile(delete=False) as f:
+ f.write(content.encode('UTF-8'))
+ return f.name
+ return None
+
+
+def create_embed_config(args):
+ data = {
+ 'Patterns': {},
+ 'Files': {},
+ }
+ for info in args.embed:
+ pattern = info[0]
+ if pattern.endswith('/**/*'):
+ pattern = pattern[:-3]
+ files = {os.path.relpath(f, args.source_module_dir).replace('\\', '/'): f for f in info[1:]}
+ data['Patterns'][pattern] = list(files.keys())
+ data['Files'].update(files)
+ # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4)))
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.embedcfg') as f:
+ f.write(json.dumps(data).encode('UTF-8'))
+ return f.name
+
+
+def vet_info_output_name(path, ext=None):
+ return '{}{}'.format(path, ext or vet_info_ext)
+
+
+def vet_report_output_name(path, ext=None):
+ return '{}{}'.format(path, ext or vet_report_ext)
+
+
+def get_source_path(args):
+ return args.test_import_path or args.module_path
+
+
+def gen_vet_info(args):
+ import_path = args.real_import_path if hasattr(args, 'real_import_path') else args.import_path
+ info = get_import_config_info(args.peers, True, args.import_map, args.module_map)
+
+ import_map = dict(info['importmap'])
+ # FIXME(snermolaev): it seems that adding import map for 'fake' package
+ # does't make any harm (it needs to be revised later)
+ import_map['unsafe'] = 'unsafe'
+
+ for (key, _) in info['packagefile']:
+ if key not in import_map:
+ import_map[key] = key
+
+ data = {
+ 'ID': import_path,
+ 'Compiler': 'gc',
+ 'Dir': os.path.join(args.source_root, get_source_path(args)),
+ 'ImportPath': import_path,
+ 'GoFiles': [x for x in args.go_srcs if x.endswith('.go')],
+ 'NonGoFiles': [x for x in args.go_srcs if not x.endswith('.go')],
+ 'ImportMap': import_map,
+ 'PackageFile': dict(info['packagefile']),
+ 'Standard': dict(info['standard']),
+ 'PackageVetx': dict((key, vet_info_output_name(value)) for key, value in info['packagefile']),
+ 'VetxOnly': False,
+ 'VetxOutput': vet_info_output_name(args.output),
+ 'SucceedOnTypecheckFailure': False
+ }
+ # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4)))
+ return data
+
+
+def create_vet_config(args, info):
+ with tempfile.NamedTemporaryFile(delete=False, suffix='.cfg') as f:
+ f.write(json.dumps(info).encode('UTF-8'))
+ return f.name
+
+
+def decode_vet_report(json_report):
+ report = ''
+ if json_report:
+ try:
+ full_diags = json.JSONDecoder().decode(json_report.decode('UTF-8'))
+ except ValueError:
+ report = json_report
+ else:
+ messages = []
+ for _, module_diags in full_diags.items():
+ for _, type_diags in module_diags.items():
+ for diag in type_diags:
+ messages.append('{}: {}'.format(diag['posn'], json.dumps(diag['message'])))
+ report = '\n'.join(messages)
+
+ return report
+
+
+def dump_vet_report(args, report):
+ if report:
+ report = report.replace(args.build_root, '$B')
+ report = report.replace(args.source_root, '$S')
+ with open(args.vet_report_output, 'w') as f:
+ f.write(report)
+
+
+def read_vet_report(args):
+ assert args
+ report = ''
+ if os.path.exists(args.vet_report_output):
+ with open(args.vet_report_output, 'r') as f:
+ report += f.read()
+ return report
+
+
+def dump_vet_report_for_tests(args, *test_args_list):
+ dump_vet_report(args, reduce(lambda x, y: x + read_vet_report(y), [_f for _f in test_args_list if _f], ''))
+
+
+def do_vet(args):
+ assert args.vet
+ info = gen_vet_info(args)
+ vet_config = create_vet_config(args, info)
+ cmd = [args.go_vet, '-json']
+ if args.vet_flags:
+ cmd.extend(args.vet_flags)
+ cmd.append(vet_config)
+ # sys.stderr.write('>>>> [{}]\n'.format(' '.join(cmd)))
+ p_vet = subprocess.Popen(cmd, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=args.source_root)
+ vet_out, vet_err = p_vet.communicate()
+ report = decode_vet_report(vet_out) if vet_out else ''
+ dump_vet_report(args, report)
+ if p_vet.returncode:
+ raise subprocess.CalledProcessError(returncode=p_vet.returncode, cmd=cmd, output=vet_err)
+
+
+def _do_compile_go(args):
+ import_path, is_std_module = args.import_path, args.is_std
+ cmd = [
+ args.go_compile,
+ '-o',
+ args.output,
+ '-p',
+ import_path if import_path != "unsafe" else "",
+ '-D',
+ '""',
+ ]
+ if args.lang:
+ cmd.append('-lang=go{}'.format(args.lang))
+ cmd.extend(get_trimpath_args(args))
+ compiling_runtime = False
+ if is_std_module:
+ cmd.append('-std')
+ if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith('runtime/internal/'):
+ cmd.append('-+')
+ compiling_runtime = True
+ import_config_name = create_import_config(args.peers, True, args.import_map, args.module_map)
+ if import_config_name:
+ cmd += ['-importcfg', import_config_name]
+ else:
+ if import_path == 'unsafe' or len(args.objects) > 0 or args.asmhdr:
+ pass
+ else:
+ cmd.append('-complete')
+ # if compare_versions('1.16', args.goversion) >= 0:
+ if args.embed:
+ embed_config_name = create_embed_config(args)
+ cmd.extend(['-embedcfg', embed_config_name])
+ if args.asmhdr:
+ cmd += ['-asmhdr', args.asmhdr]
+ # Use .symabis (starting from 1.12 version)
+ if args.symabis:
+ cmd += ['-symabis'] + args.symabis
+ # If 1.12 <= version < 1.13 we have to pass -allabis for 'runtime' and 'runtime/internal/atomic'
+ # if compare_versions('1.13', args.goversion) >= 0:
+ # pass
+ # elif import_path in ('runtime', 'runtime/internal/atomic'):
+ # cmd.append('-allabis')
+ compile_workers = '4'
+ if args.compile_flags:
+ if compiling_runtime:
+ cmd.extend(x for x in args.compile_flags if x not in COMPILE_OPTIMIZATION_FLAGS)
+ else:
+ cmd.extend(args.compile_flags)
+ if any([x in ('-race', '-shared') for x in args.compile_flags]):
+ compile_workers = '1'
+ cmd += ['-pack', '-c={}'.format(compile_workers)]
+ cmd += args.go_srcs
+ call(cmd, args.build_root)
+
+
+class VetThread(threading.Thread):
+
+ def __init__(self, target, args):
+ super(VetThread, self).__init__(target=target, args=args)
+ self.exc_info = None
+
+ def run(self):
+ try:
+ super(VetThread, self).run()
+ except:
+ self.exc_info = sys.exc_info()
+
+ def join_with_exception(self, reraise_exception):
+ self.join()
+ if reraise_exception and self.exc_info:
+ raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2])
+
+
+def do_compile_go(args):
+ raise_exception_from_vet = False
+ if args.vet:
+ run_vet = VetThread(target=do_vet, args=(args,))
+ run_vet.start()
+ try:
+ _do_compile_go(args)
+ raise_exception_from_vet = True
+ finally:
+ if args.vet:
+ run_vet.join_with_exception(raise_exception_from_vet)
+
+
+def do_compile_asm(args):
+ def need_compiling_runtime(import_path):
+ return import_path in ('runtime', 'reflect', 'syscall') or \
+ import_path.startswith('runtime/internal/') or \
+ compare_versions('1.17', args.goversion) >= 0 and import_path == 'internal/bytealg'
+
+ assert(len(args.srcs) == 1 and len(args.asm_srcs) == 1)
+ cmd = [args.go_asm]
+ cmd += get_trimpath_args(args)
+ cmd += ['-I', args.output_root, '-I', os.path.join(args.pkg_root, 'include')]
+ cmd += ['-D', 'GOOS_' + args.targ_os, '-D', 'GOARCH_' + args.targ_arch, '-o', args.output]
+
+ # if compare_versions('1.16', args.goversion) >= 0:
+ cmd += ['-p', args.import_path]
+ if need_compiling_runtime(args.import_path):
+ cmd += ['-compiling-runtime']
+
+ if args.asm_flags:
+ cmd += args.asm_flags
+ cmd += args.asm_srcs
+ call(cmd, args.build_root)
+
+
+def do_link_lib(args):
+ if len(args.asm_srcs) > 0:
+ asmargs = copy_args(args)
+ asmargs.asmhdr = os.path.join(asmargs.output_root, 'go_asm.h')
+ do_compile_go(asmargs)
+ for src in asmargs.asm_srcs:
+ asmargs.srcs = [src]
+ asmargs.asm_srcs = [src]
+ asmargs.output = os.path.join(asmargs.output_root, os.path.basename(src) + '.o')
+ do_compile_asm(asmargs)
+ args.objects.append(asmargs.output)
+ else:
+ do_compile_go(args)
+ if args.objects or args.sysos:
+ cmd = [args.go_pack, 'r', args.output] + args.objects + args.sysos
+ call(cmd, args.build_root)
+
+
+def do_link_exe(args):
+ assert args.extld is not None
+ assert args.non_local_peers is not None
+ compile_args = copy_args(args)
+ compile_args.output = os.path.join(args.output_root, 'main.a')
+ compile_args.real_import_path = compile_args.import_path
+ compile_args.import_path = 'main'
+
+ if args.vcs and os.path.isfile(compile_args.vcs):
+ build_info = os.path.join('library', 'go', 'core', 'buildinfo')
+ if any([x.startswith(build_info) for x in compile_args.peers]):
+ compile_args.go_srcs.append(compile_args.vcs)
+
+ do_link_lib(compile_args)
+ cmd = [args.go_link, '-o', args.output]
+ import_config_name = create_import_config(args.peers + args.non_local_peers, False, args.import_map, args.module_map)
+ if import_config_name:
+ cmd += ['-importcfg', import_config_name]
+ if args.link_flags:
+ cmd += args.link_flags
+
+ if args.buildmode:
+ cmd.append('-buildmode={}'.format(args.buildmode))
+ elif args.mode in ('exe', 'test'):
+ cmd.append('-buildmode=exe')
+ elif args.mode == 'dll':
+ cmd.append('-buildmode=c-shared')
+ else:
+ assert False, 'Unexpected mode: {}'.format(args.mode)
+ cmd.append('-extld={}'.format(args.extld))
+
+ extldflags = []
+ if args.extldflags is not None:
+ filter_musl = bool
+ if args.musl:
+ cmd.append('-linkmode=external')
+ extldflags.append('-static')
+ filter_musl = lambda x: x not in ('-lc', '-ldl', '-lm', '-lpthread', '-lrt')
+ extldflags += [x for x in args.extldflags if filter_musl(x)]
+ cgo_peers = []
+ if args.cgo_peers is not None and len(args.cgo_peers) > 0:
+ is_group = args.targ_os == 'linux'
+ if is_group:
+ cgo_peers.append('-Wl,--start-group')
+ cgo_peers.extend(args.cgo_peers)
+ if is_group:
+ cgo_peers.append('-Wl,--end-group')
+ try:
+ index = extldflags.index('--cgo-peers')
+ extldflags = extldflags[:index] + cgo_peers + extldflags[index+1:]
+ except ValueError:
+ extldflags.extend(cgo_peers)
+ if len(extldflags) > 0:
+ cmd.append('-extldflags={}'.format(' '.join(extldflags)))
+ cmd.append(compile_args.output)
+ call(cmd, args.build_root)
+
+
+def gen_cover_info(args):
+ lines = []
+ lines.extend([
+ """
+var (
+ coverCounters = make(map[string][]uint32)
+ coverBlocks = make(map[string][]testing.CoverBlock)
+)
+ """,
+ 'func init() {',
+ ])
+ for var, file in (x.split(':') for x in args.cover_info):
+ lines.append(' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format(file=file, var=var))
+ lines.extend([
+ '}',
+ """
+func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts []uint16) {
+ if 3*len(counter) != len(pos) || len(counter) != len(numStmts) {
+ panic("coverage: mismatched sizes")
+ }
+ if coverCounters[fileName] != nil {
+ // Already registered.
+ return
+ }
+ coverCounters[fileName] = counter
+ block := make([]testing.CoverBlock, len(counter))
+ for i := range counter {
+ block[i] = testing.CoverBlock{
+ Line0: pos[3*i+0],
+ Col0: uint16(pos[3*i+2]),
+ Line1: pos[3*i+1],
+ Col1: uint16(pos[3*i+2]>>16),
+ Stmts: numStmts[i],
+ }
+ }
+ coverBlocks[fileName] = block
+}
+ """,
+ ])
+ return lines
+
+
+def filter_out_skip_tests(tests, skip_tests):
+ skip_set = set()
+ star_skip_set = set()
+ for t in skip_tests:
+ work_set = star_skip_set if '*' in t else skip_set
+ work_set.add(t)
+
+ re_star_tests = None
+ if len(star_skip_set) > 0:
+ re_star_tests = re.compile(re.sub(r'(\*)+', r'.\1', '^({})$'.format('|'.join(star_skip_set))))
+
+ return [x for x in tests if not (x in skip_tests or re_star_tests and re_star_tests.match(x))]
+
+
+@contextmanager
+def create_strip_symlink():
+ # This function creates symlink of llvm-strip as strip for golink needs.
+ # We believe that cc-binaries path is a first element in PATH enviroment variable.
+ tmpdir = None
+ if os.getenv("CC") == "clang":
+ tmpdir = tempfile.mkdtemp()
+ cc_path = os.getenv("PATH").split(os.pathsep)[0]
+ os.environ["PATH"] += os.pathsep + tmpdir
+ src_strip_path = os.path.join(cc_path, 'llvm-strip')
+ dst_strip_path = os.path.join(tmpdir, 'strip')
+ os.symlink(src_strip_path, dst_strip_path)
+ try:
+ yield
+ finally:
+ if tmpdir:
+ shutil.rmtree(tmpdir)
+
+
+def gen_test_main(args, test_lib_args, xtest_lib_args):
+ assert args and (test_lib_args or xtest_lib_args)
+ test_miner = args.test_miner
+ test_module_path = test_lib_args.import_path if test_lib_args else xtest_lib_args.import_path
+ is_cover = args.cover_info and len(args.cover_info) > 0
+
+ # Prepare GOPATH
+ # $BINDIR
+ # |- __go__
+ # |- src
+ # |- pkg
+ # |- ${TARGET_OS}_${TARGET_ARCH}
+ go_path_root = os.path.join(args.output_root, '__go__')
+ test_src_dir = os.path.join(go_path_root, 'src')
+ target_os_arch = '_'.join([args.targ_os, args.targ_arch])
+ test_pkg_dir = os.path.join(go_path_root, 'pkg', target_os_arch, os.path.dirname(test_module_path))
+ os.makedirs(test_pkg_dir)
+
+ my_env = os.environ.copy()
+ my_env['GOROOT'] = ''
+ my_env['GOPATH'] = go_path_root
+ my_env['GOARCH'] = args.targ_arch
+ my_env['GOOS'] = args.targ_os
+
+ tests = []
+ xtests = []
+ os_symlink = get_symlink_or_copyfile()
+
+ # Get the list of "internal" tests
+ if test_lib_args:
+ os.makedirs(os.path.join(test_src_dir, test_module_path))
+ os_symlink(test_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(test_module_path) + '.a'))
+ cmd = [test_miner, '-benchmarks', '-tests', test_module_path]
+ tests = [x for x in (call(cmd, test_lib_args.output_root, my_env) or '').strip().split('\n') if len(x) > 0]
+ if args.skip_tests:
+ tests = filter_out_skip_tests(tests, args.skip_tests)
+ test_main_found = '#TestMain' in tests
+
+ # Get the list of "external" tests
+ if xtest_lib_args:
+ xtest_module_path = xtest_lib_args.import_path
+ os.makedirs(os.path.join(test_src_dir, xtest_module_path))
+ os_symlink(xtest_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(xtest_module_path) + '.a'))
+ cmd = [test_miner, '-benchmarks', '-tests', xtest_module_path]
+ xtests = [x for x in (call(cmd, xtest_lib_args.output_root, my_env) or '').strip().split('\n') if len(x) > 0]
+ if args.skip_tests:
+ xtests = filter_out_skip_tests(xtests, args.skip_tests)
+ xtest_main_found = '#TestMain' in xtests
+
+ test_main_package = None
+ if test_main_found and xtest_main_found:
+ assert False, 'multiple definition of TestMain'
+ elif test_main_found:
+ test_main_package = '_test'
+ elif xtest_main_found:
+ test_main_package = '_xtest'
+
+ shutil.rmtree(go_path_root)
+
+ lines = ['package main', '', 'import (']
+ if test_main_package is None:
+ lines.append(' "os"')
+ lines.extend([' "testing"', ' "testing/internal/testdeps"'])
+
+ if len(tests) > 0:
+ lines.append(' _test "{}"'.format(test_module_path))
+ elif test_lib_args:
+ lines.append(' _ "{}"'.format(test_module_path))
+
+ if len(xtests) > 0:
+ lines.append(' _xtest "{}"'.format(xtest_module_path))
+ elif xtest_lib_args:
+ lines.append(' _ "{}"'.format(xtest_module_path))
+
+ if is_cover:
+ lines.append(' _cover0 "{}"'.format(test_module_path))
+ lines.extend([')', ''])
+
+ if compare_versions('1.18', args.goversion) < 0:
+ kinds = ['Test', 'Benchmark', 'Example']
+ else:
+ kinds = ['Test', 'Benchmark', 'FuzzTarget', 'Example']
+
+ var_names = []
+ for kind in kinds:
+ var_name = '{}s'.format(kind.lower())
+ var_names.append(var_name)
+ lines.append('var {} = []testing.Internal{}{{'.format(var_name, kind))
+ for test in [x for x in tests if x.startswith(kind)]:
+ lines.append(' {{"{test}", _test.{test}}},'.format(test=test))
+ for test in [x for x in xtests if x.startswith(kind)]:
+ lines.append(' {{"{test}", _xtest.{test}}},'.format(test=test))
+ lines.extend(['}', ''])
+
+ if is_cover:
+ lines.extend(gen_cover_info(args))
+
+ lines.append('func main() {')
+ if is_cover:
+ lines.extend([
+ ' testing.RegisterCover(testing.Cover{',
+ ' Mode: "set",',
+ ' Counters: coverCounters,',
+ ' Blocks: coverBlocks,',
+ ' CoveredPackages: "",',
+ ' })',
+ ])
+ lines.extend([
+ ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)),
+ '',
+ ])
+
+ if test_main_package:
+ lines.append(' {}.TestMain(m)'.format(test_main_package))
+ else:
+ lines.append(' os.Exit(m.Run())')
+ lines.extend(['}', ''])
+
+ content = '\n'.join(lines)
+ # sys.stderr.write('{}\n'.format(content))
+ return content
+
+
+def do_link_test(args):
+ assert args.srcs or args.xtest_srcs
+ assert args.test_miner is not None
+
+ test_module_path = get_source_path(args)
+ test_import_path, _ = get_import_path(test_module_path)
+
+ test_lib_args = copy_args(args) if args.srcs else None
+ xtest_lib_args = copy_args(args) if args.xtest_srcs else None
+ if xtest_lib_args is not None:
+ xtest_lib_args.embed = args.embed_xtest if args.embed_xtest else None
+
+ ydx_file_name = None
+ xtest_ydx_file_name = None
+ need_append_ydx = test_lib_args and xtest_lib_args and args.ydx_file and args.vet_flags
+ if need_append_ydx:
+ def find_ydx_file_name(name, flags):
+ for i, elem in enumerate(flags):
+ if elem.endswith(name):
+ return (i, elem)
+ assert False, 'Unreachable code'
+
+ idx, ydx_file_name = find_ydx_file_name(xtest_lib_args.ydx_file, xtest_lib_args.vet_flags)
+ xtest_ydx_file_name = '{}_xtest'.format(ydx_file_name)
+ xtest_lib_args.vet_flags = copy.copy(xtest_lib_args.vet_flags)
+ xtest_lib_args.vet_flags[idx] = xtest_ydx_file_name
+
+ if test_lib_args:
+ test_lib_args.output = os.path.join(args.output_root, 'test.a')
+ test_lib_args.vet_report_output = vet_report_output_name(test_lib_args.output)
+ test_lib_args.module_path = test_module_path
+ test_lib_args.import_path = test_import_path
+ do_link_lib(test_lib_args)
+
+ if xtest_lib_args:
+ xtest_lib_args.srcs = xtest_lib_args.xtest_srcs
+ classify_srcs(xtest_lib_args.srcs, xtest_lib_args)
+ xtest_lib_args.output = os.path.join(args.output_root, 'xtest.a')
+ xtest_lib_args.vet_report_output = vet_report_output_name(xtest_lib_args.output)
+ xtest_lib_args.module_path = test_module_path + '_test'
+ xtest_lib_args.import_path = test_import_path + '_test'
+ if test_lib_args:
+ xtest_lib_args.module_map[test_import_path] = test_lib_args.output
+ need_append_ydx = args.ydx_file and args.srcs and args.vet_flags
+ do_link_lib(xtest_lib_args)
+
+ if need_append_ydx:
+ with open(os.path.join(args.build_root, ydx_file_name), 'ab') as dst_file:
+ with open(os.path.join(args.build_root, xtest_ydx_file_name), 'rb') as src_file:
+ dst_file.write(src_file.read())
+
+ test_main_content = gen_test_main(args, test_lib_args, xtest_lib_args)
+ test_main_name = os.path.join(args.output_root, '_test_main.go')
+ with open(test_main_name, "w") as f:
+ f.write(test_main_content)
+ test_args = copy_args(args)
+ test_args.embed = None
+ test_args.srcs = [test_main_name]
+ if test_args.test_import_path is None:
+ # it seems that we can do it unconditionally, but this kind
+ # of mangling doesn't really looks good to me and we leave it
+ # for pure GO_TEST module
+ test_args.module_path = test_args.module_path + '___test_main__'
+ test_args.import_path = test_args.import_path + '___test_main__'
+ classify_srcs(test_args.srcs, test_args)
+ if test_lib_args:
+ test_args.module_map[test_lib_args.import_path] = test_lib_args.output
+ if xtest_lib_args:
+ test_args.module_map[xtest_lib_args.import_path] = xtest_lib_args.output
+
+ if args.vet:
+ dump_vet_report_for_tests(test_args, test_lib_args, xtest_lib_args)
+ test_args.vet = False
+
+ do_link_exe(test_args)
+
+
+if __name__ == '__main__':
+ args = pcf.get_args(sys.argv[1:])
+
+ parser = argparse.ArgumentParser(prefix_chars='+')
+ parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True)
+ parser.add_argument('++buildmode', choices=['c-shared', 'exe', 'pie'])
+ parser.add_argument('++srcs', nargs='*', required=True)
+ parser.add_argument('++cgo-srcs', nargs='*')
+ parser.add_argument('++test_srcs', nargs='*')
+ parser.add_argument('++xtest_srcs', nargs='*')
+ parser.add_argument('++cover_info', nargs='*')
+ parser.add_argument('++output', nargs='?', default=None)
+ parser.add_argument('++source-root', default=None)
+ parser.add_argument('++build-root', required=True)
+ parser.add_argument('++tools-root', default=None)
+ parser.add_argument('++output-root', required=True)
+ parser.add_argument('++toolchain-root', required=True)
+ parser.add_argument('++host-os', choices=['linux', 'darwin', 'windows'], required=True)
+ parser.add_argument('++host-arch', choices=['amd64', 'arm64'], required=True)
+ parser.add_argument('++targ-os', choices=['linux', 'darwin', 'windows'], required=True)
+ parser.add_argument('++targ-arch', choices=['amd64', 'x86', 'arm64'], required=True)
+ parser.add_argument('++peers', nargs='*')
+ parser.add_argument('++non-local-peers', nargs='*')
+ parser.add_argument('++cgo-peers', nargs='*')
+ parser.add_argument('++asmhdr', nargs='?', default=None)
+ parser.add_argument('++test-import-path', nargs='?')
+ parser.add_argument('++test-miner', nargs='?')
+ parser.add_argument('++arc-project-prefix', nargs='?', default=arc_project_prefix)
+ parser.add_argument('++std-lib-prefix', nargs='?', default=std_lib_prefix)
+ parser.add_argument('++vendor-prefix', nargs='?', default=vendor_prefix)
+ parser.add_argument('++extld', nargs='?', default=None)
+ parser.add_argument('++extldflags', nargs='+', default=None)
+ parser.add_argument('++goversion', required=True)
+ parser.add_argument('++lang', nargs='?', default=None)
+ parser.add_argument('++asm-flags', nargs='*')
+ parser.add_argument('++compile-flags', nargs='*')
+ parser.add_argument('++link-flags', nargs='*')
+ parser.add_argument('++vcs', nargs='?', default=None)
+ parser.add_argument('++vet', nargs='?', const=True, default=False)
+ parser.add_argument('++vet-flags', nargs='*', default=None)
+ parser.add_argument('++vet-info-ext', default=vet_info_ext)
+ parser.add_argument('++vet-report-ext', default=vet_report_ext)
+ parser.add_argument('++musl', action='store_true')
+ parser.add_argument('++skip-tests', nargs='*', default=None)
+ parser.add_argument('++ydx-file', default='')
+ parser.add_argument('++debug-root-map', default=None)
+ parser.add_argument('++embed', action='append', nargs='*')
+ parser.add_argument('++embed_xtest', action='append', nargs='*')
+ args = parser.parse_args(args)
+
+ arc_project_prefix = args.arc_project_prefix
+ std_lib_prefix = args.std_lib_prefix
+ vendor_prefix = args.vendor_prefix
+ vet_info_ext = args.vet_info_ext
+ vet_report_ext = args.vet_report_ext
+
+ preprocess_args(args)
+
+ try:
+ os.unlink(args.output)
+ except OSError:
+ pass
+
+ # We are going to support only 'lib', 'exe' and 'cgo' build modes currently
+ # and as a result we are going to generate only one build node per module
+ # (or program)
+ dispatch = {
+ 'exe': do_link_exe,
+ 'dll': do_link_exe,
+ 'lib': do_link_lib,
+ 'test': do_link_test
+ }
+
+ exit_code = 1
+ try:
+ with create_strip_symlink():
+ dispatch[args.mode](args)
+ exit_code = 0
+ except KeyError:
+ sys.stderr.write('Unknown build mode [{}]...\n'.format(args.mode))
+ except subprocess.CalledProcessError as e:
+ sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output))
+ exit_code = e.returncode
+ except AssertionError as e:
+ traceback.print_exc(file=sys.stderr)
+ except Exception as e:
+ sys.stderr.write('Unhandled exception [{}]...\n'.format(str(e)))
+ sys.exit(exit_code)
diff --git a/build/scripts/ios_wrapper.py b/build/scripts/ios_wrapper.py
new file mode 100644
index 0000000000..d3aa48387a
--- /dev/null
+++ b/build/scripts/ios_wrapper.py
@@ -0,0 +1,180 @@
+import errno
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import plistlib
+
+
+def ensure_dir(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST or not os.path.isdir(path):
+ raise
+
+
+def just_do_it(args):
+ if not args:
+ raise Exception('Not enough args!')
+ parts = [[]]
+ for arg in args:
+ if arg == '__DELIM__':
+ parts.append([])
+ else:
+ parts[-1].append(arg)
+ if len(parts) != 3 or len(parts[0]) != 5:
+ raise Exception('Bad call')
+ bin_name, ibtool_path, main_out, app_name, module_dir = parts[0]
+ bin_name = os.path.basename(bin_name)
+ inputs, storyboard_user_flags = parts[1:]
+ plists, storyboards, signs, nibs, resources, signed_resources, plist_jsons, strings = [], [], [], [], [], [], [], []
+ for i in inputs:
+ if i.endswith('.plist') or i.endswith('.partial_plist'):
+ plists.append(i)
+ elif i.endswith('.compiled_storyboard_tar'):
+ storyboards.append(i)
+ elif i.endswith('.xcent'):
+ signs.append(i)
+ elif i.endswith('.nib'):
+ nibs.append(i)
+ elif i.endswith('.resource_tar'):
+ resources.append(i)
+ elif i.endswith('.signed_resource_tar'):
+ signed_resources.append(i)
+ elif i.endswith('.plist_json'):
+ plist_jsons.append(i)
+ elif i.endswith('.strings_tar'):
+ strings.append(i)
+ else:
+ print >> sys.stderr, 'Unknown input:', i, 'ignoring'
+ if not plists:
+ raise Exception("Can't find plist files")
+ if not plists[0].endswith('.plist'):
+ print >> sys.stderr, "Main plist file can be defined incorretly"
+ if not storyboards:
+ print >> sys.stderr, "Storyboards list are empty"
+ if len(signs) > 1:
+ raise Exception("Too many .xcent files")
+ app_dir = os.path.join(module_dir, app_name + '.app')
+ ensure_dir(app_dir)
+ copy_nibs(nibs, module_dir, app_dir)
+ replaced_parameters = {
+ 'DEVELOPMENT_LANGUAGE': 'en',
+ 'EXECUTABLE_NAME': bin_name,
+ 'PRODUCT_BUNDLE_IDENTIFIER': 'Yandex.' + app_name,
+ 'PRODUCT_NAME': app_name,
+ }
+ replaced_templates = {}
+ for plist_json in plist_jsons:
+ with open(plist_json) as jsonfile:
+ for k, v in json.loads(jsonfile.read()).items():
+ replaced_parameters[k] = v
+ for k, v in replaced_parameters.items():
+ replaced_templates['$(' + k + ')'] = v
+ replaced_templates['${' + k + '}'] = v
+ make_main_plist(plists, os.path.join(app_dir, 'Info.plist'), replaced_templates)
+ link_storyboards(ibtool_path, storyboards, app_name, app_dir, storyboard_user_flags)
+ if resources:
+ extract_resources(resources, app_dir)
+ if signed_resources:
+ extract_resources(signed_resources, app_dir, sign=True)
+ if strings:
+ extract_resources(strings, app_dir, strings=True)
+ if not signs:
+ sign_file = os.path.join(module_dir, app_name + '.xcent')
+ with open(sign_file, 'w') as f:
+ f.write('''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>com.apple.security.get-task-allow</key>
+ <true/>
+</dict>
+</plist>
+ ''')
+ else:
+ sign_file = signs[0]
+ sign_application(sign_file, app_dir)
+ make_archive(app_dir, main_out)
+
+
+def is_exe(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+
+def copy_nibs(nibs, module_dir, app_dir):
+ for nib in nibs:
+ dst = os.path.join(app_dir, os.path.relpath(nib, module_dir))
+ ensure_dir(os.path.dirname(dst))
+ shutil.copyfile(nib, dst)
+
+
+def make_main_plist(inputs, out, replaced_parameters):
+ united_data = {}
+ for i in inputs:
+ united_data.update(plistlib.readPlist(i))
+
+ def scan_n_replace(root):
+ if not isinstance(root, dict):
+ raise Exception('Invalid state')
+ for k in root:
+ if isinstance(root[k], list):
+ for i in xrange(len(root[k])):
+ if isinstance(root[k][i], dict):
+ scan_n_replace(root[k][i])
+ elif root[k][i] in replaced_parameters:
+ root[k][i] = replaced_parameters[root[k][i]]
+ elif isinstance(root[k], dict):
+ scan_n_replace(root[k])
+ else:
+ if root[k] in replaced_parameters:
+ root[k] = replaced_parameters[root[k]]
+ scan_n_replace(united_data)
+ plistlib.writePlist(united_data, out)
+ subprocess.check_call(['/usr/bin/plutil', '-convert', 'binary1', out])
+
+
+def link_storyboards(ibtool, archives, app_name, app_dir, flags):
+ unpacked = []
+ for arc in archives:
+ unpacked.append(os.path.splitext(arc)[0] + 'c')
+ ensure_dir(unpacked[-1])
+ with tarfile.open(arc) as a:
+ a.extractall(path=unpacked[-1])
+ flags += [
+ '--module', app_name,
+ '--link', app_dir,
+ ]
+ subprocess.check_call([ibtool] + flags +
+ ['--errors', '--warnings', '--notices', '--output-format', 'human-readable-text'] +
+ unpacked)
+
+
+def sign_application(xcent, app_dir):
+ subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', '--entitlements', xcent, '--timestamp=none', app_dir])
+
+
+def extract_resources(resources, app_dir, strings=False, sign=False):
+ for res in resources:
+ with tarfile.open(res) as tf:
+ for tfinfo in tf:
+ tf.extract(tfinfo.name, app_dir)
+ if strings:
+ subprocess.check_call(['/usr/bin/plutil', '-convert', 'binary1', os.path.join(app_dir, tfinfo.name)])
+ if sign:
+ subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', os.path.join(app_dir, tfinfo.name)])
+
+
+def make_archive(app_dir, output):
+ with tarfile.open(output, "w") as tar_handle:
+ for root, _, files in os.walk(app_dir):
+ for f in files:
+ tar_handle.add(os.path.join(root, f), arcname=os.path.join(os.path.basename(app_dir),
+ os.path.relpath(os.path.join(root, f), app_dir)))
+
+
+if __name__ == '__main__':
+ just_do_it(sys.argv[1:])
diff --git a/build/scripts/java_pack_to_file.py b/build/scripts/java_pack_to_file.py
new file mode 100644
index 0000000000..c8ab7c311b
--- /dev/null
+++ b/build/scripts/java_pack_to_file.py
@@ -0,0 +1,43 @@
+import os
+import re
+import optparse
+
+JAVA_PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL)
+KOTLIN_PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?)^', flags=re.MULTILINE | re.DOTALL)
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.add_option('-o', '--output')
+ parser.add_option('-a', '--source-root', dest='source_root')
+ return parser.parse_args()
+
+
+def get_package_name(filename):
+ with open(filename) as afile:
+ content = afile.read()
+ if filename.endswith(".kt"):
+ match = KOTLIN_PACKAGE_REGEX.search(content)
+ if match:
+ return match.group(1).strip().replace('.', '/')
+ else:
+ match = JAVA_PACKAGE_REGEX.search(content)
+ if match:
+ return match.group(1).replace('\n\t ', '').replace('.', '/')
+ return ''
+
+
+def write_coverage_sources(output, srcroot, files):
+ with open(output, 'w') as afile:
+ for filename in files:
+ pname = get_package_name(os.path.join(srcroot, filename))
+ afile.write(os.path.join(pname, os.path.basename(filename)) + ':' + filename + '\n')
+
+
+def main():
+ opts, files = parse_args()
+ write_coverage_sources(opts.output, opts.source_root, files)
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/build/scripts/jni_swig.py b/build/scripts/jni_swig.py
new file mode 100644
index 0000000000..4b2220430b
--- /dev/null
+++ b/build/scripts/jni_swig.py
@@ -0,0 +1,46 @@
+import argparse
+import subprocess
+import re
+import os
+import tarfile
+
+def parse_args():
+ parser = argparse.ArgumentParser(description='Wrapper script to invoke swig.')
+ parser.add_argument('--swig', help='path to the swig executable')
+ parser.add_argument('--default-module', type=str, help='swig -module argument value for inputs without %module statement')
+ parser.add_argument('--package-by-file', help='path to file which dir must be converted to swig -package argument')
+ parser.add_argument('--jsrc', help='jsrc output archive filename')
+ parser.add_argument('--src', help='input .swg file path')
+ parser.add_argument('--out-header', help='header file which must exist even if it was not generated by swig')
+ parser.add_argument('args', nargs="*", help='regular swig arguments')
+
+ return parser.parse_args()
+
+
+def path2pkg(path):
+ return path.replace('/', '.').replace('-', '_')
+
+
+def main(args):
+ package = path2pkg(os.path.dirname(args.package_by_file))
+ outdir = None
+ if args.jsrc:
+ outdir = package.replace('.', '/')
+ outdir_abs = os.path.join(os.path.dirname(args.jsrc), outdir)
+ if not os.path.exists(outdir_abs):
+ os.makedirs(outdir_abs)
+ cmd = [args.swig, '-c++', '-java', '-package', package] + (['-outdir', outdir_abs] if outdir is not None else []) + args.args
+ if '-module' not in args.args and args.default_module:
+ with open(args.src, 'r') as f:
+ if not re.search(r'(?m)^%module\b', f.read()):
+ cmd += ['-module', args.default_module]
+ subprocess.check_call(cmd + [args.src])
+ if args.out_header and not os.path.exists(args.out_header):
+ open(args.out_header, 'w').close()
+ if args.jsrc:
+ with tarfile.open(args.jsrc, 'a') as tf:
+ tf.add(outdir_abs, arcname=outdir)
+
+
+if __name__ == '__main__':
+ main(parse_args())
diff --git a/build/scripts/kt_copy.py b/build/scripts/kt_copy.py
new file mode 100644
index 0000000000..f833c24ef4
--- /dev/null
+++ b/build/scripts/kt_copy.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+import sys
+
+if __name__ == '__main__':
+ source = sys.argv[1]
+ destination = sys.argv[2]
+ source_root = sys.argv[3]
+ build_root = sys.argv[4]
+ with open(source, 'r') as afile:
+ src_content = afile.read()
+ src_content = src_content.replace(source_root + '/', "")
+ result_srcs = ""
+ for line in src_content.split("\n"):
+ if not line.startswith(build_root):
+ result_srcs += line + "\n"
+ with open(destination, 'w') as afile:
+ afile.write(result_srcs)
diff --git a/build/scripts/link_asrc.py b/build/scripts/link_asrc.py
new file mode 100644
index 0000000000..eec5fe09a8
--- /dev/null
+++ b/build/scripts/link_asrc.py
@@ -0,0 +1,84 @@
+import argparse
+import itertools
+import os
+import tarfile
+
+
+DELIM_JAVA = '__DELIM_JAVA__'
+DELIM_RES = '__DELIM_RES__'
+DELIM_ASSETS = '__DELIM_ASSETS__'
+DELIM_AIDL = '__DELIM_AIDL__'
+
+DELIMS = (
+ DELIM_JAVA,
+ DELIM_RES,
+ DELIM_ASSETS,
+ DELIM_AIDL,
+)
+
+DESTS = {
+ DELIM_JAVA: 'src',
+ DELIM_RES: 'res',
+ DELIM_ASSETS: 'assets',
+ DELIM_AIDL: 'aidl',
+}
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--asrcs', nargs='*')
+ parser.add_argument('--input', nargs='*')
+ parser.add_argument('--jsrcs', nargs='*')
+ parser.add_argument('--output', required=True)
+ parser.add_argument('--work', required=True)
+
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+
+ files = []
+ parts = []
+
+ if args.input and len(args.input) > 0:
+ for x in args.input:
+ if x in DELIMS:
+ assert(len(parts) == 0 or len(parts[-1]) > 1)
+ parts.append([x])
+ else:
+ assert(len(parts) > 0)
+ parts[-1].append(x)
+ assert(len(parts[-1]) > 1)
+
+ if args.jsrcs and len(args.jsrcs):
+ src_dir = os.path.join(args.work, DESTS[DELIM_JAVA])
+ os.makedirs(src_dir)
+
+ for jsrc in filter(lambda x: x.endswith('.jsrc'), args.jsrcs):
+ with tarfile.open(jsrc, 'r') as tar:
+ names = tar.getnames()
+ if names and len(names) > 0:
+ parts.append([DELIM_JAVA, src_dir])
+ parts[-1].extend(itertools.imap(lambda x: os.path.join(src_dir, x), names))
+ tar.extractall(path=src_dir)
+
+ if args.asrcs and len(args.asrcs):
+ for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.asrcs):
+ with tarfile.open(asrc, 'r') as tar:
+ files.extend(tar.getnames())
+ tar.extractall(path=args.work)
+
+ with tarfile.open(args.output, 'w') as out:
+ for part in parts:
+ dest = DESTS[part[0]]
+ prefix = part[1]
+ for f in part[2:]:
+ out.add(f, arcname=os.path.join(dest, os.path.relpath(f, prefix)))
+
+ for f in files:
+ out.add(os.path.join(args.work, f), arcname=f)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py
new file mode 100644
index 0000000000..00215dfcae
--- /dev/null
+++ b/build/scripts/link_dyn_lib.py
@@ -0,0 +1,353 @@
+import sys
+import os
+import subprocess
+import tempfile
+import collections
+import optparse
+import pipes
+
+from process_whole_archive_option import ProcessWholeArchiveOption
+
+
+def shlex_join(cmd):
+ # equivalent to shlex.join() in python 3
+ return ' '.join(
+ pipes.quote(part)
+ for part in cmd
+ )
+
+
+def parse_export_file(p):
+ with open(p, 'r') as f:
+ for l in f:
+ l = l.strip()
+
+ if l and '#' not in l:
+ words = l.split()
+ if len(words) == 2 and words[0] == 'linux_version':
+ yield {'linux_version': words[1]}
+ elif len(words) == 2:
+ yield {'lang': words[0], 'sym': words[1]}
+ elif len(words) == 1:
+ yield {'lang': 'C', 'sym': words[0]}
+ else:
+ raise Exception('unsupported exports line: ' + l)
+
+
+def to_c(sym):
+ symbols = collections.deque(sym.split('::'))
+ c_prefixes = [ # demangle prefixes for c++ symbols
+ '_ZN', # namespace
+ '_ZTIN', # typeinfo for
+ '_ZTSN', # typeinfo name for
+ '_ZTTN', # VTT for
+ '_ZTVN', # vtable for
+ '_ZNK', # const methods
+ ]
+ c_sym = ''
+ while symbols:
+ s = symbols.popleft()
+ if s == '*':
+ c_sym += '*'
+ break
+ if '*' in s and len(s) > 1:
+ raise Exception('Unsupported format, cannot guess length of symbol: ' + s)
+ c_sym += str(len(s)) + s
+ if symbols:
+ raise Exception('Unsupported format: ' + sym)
+ if c_sym[-1] != '*':
+ c_sym += 'E*'
+ return ['{prefix}{sym}'.format(prefix=prefix, sym=c_sym) for prefix in c_prefixes]
+
+
+def fix_darwin_param(ex):
+ for item in ex:
+ if item.get('linux_version'):
+ continue
+
+ if item['lang'] == 'C':
+ yield '-Wl,-exported_symbol,_' + item['sym']
+ elif item['lang'] == 'C++':
+ for sym in to_c(item['sym']):
+ yield '-Wl,-exported_symbol,_' + sym
+ else:
+ raise Exception('unsupported lang: ' + item['lang'])
+
+
+def fix_gnu_param(arch, ex):
+ d = collections.defaultdict(list)
+ version = None
+ for item in ex:
+ if item.get('linux_version'):
+ if not version:
+ version = item.get('linux_version')
+ else:
+ raise Exception('More than one linux_version defined')
+ elif item['lang'] == 'C++':
+ d['C'].extend(to_c(item['sym']))
+ else:
+ d[item['lang']].append(item['sym'])
+
+ with tempfile.NamedTemporaryFile(mode='wt', delete=False) as f:
+ if version:
+ f.write('{} {{\nglobal:\n'.format(version))
+ else:
+ f.write('{\nglobal:\n')
+
+ for k, v in d.items():
+ f.write(' extern "' + k + '" {\n')
+
+ for x in v:
+ f.write(' ' + x + ';\n')
+
+ f.write(' };\n')
+
+ f.write('local: *;\n};\n')
+
+ ret = ['-Wl,--version-script=' + f.name]
+
+ if arch == 'ANDROID':
+ ret += ['-Wl,--export-dynamic']
+
+ return ret
+
+
+def fix_windows_param(ex):
+ with tempfile.NamedTemporaryFile(delete=False) as def_file:
+ exports = []
+ for item in ex:
+ if item.get('lang') == 'C':
+ exports.append(item.get('sym'))
+ def_file.write('EXPORTS\n')
+ for export in exports:
+ def_file.write(' {}\n'.format(export))
+ return ['/DEF:{}'.format(def_file.name)]
+
+
+MUSL_LIBS = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
+
+CUDA_LIBRARIES = {
+ '-lcublas_static': '-lcublas',
+ '-lcublasLt_static': '-lcublasLt',
+ '-lcudart_static': '-lcudart',
+ '-lcudnn_static': '-lcudnn',
+ '-lcufft_static_nocallback': '-lcufft',
+ '-lcurand_static': '-lcurand',
+ '-lcusolver_static': '-lcusolver',
+ '-lcusparse_static': '-lcusparse',
+ '-lmyelin_compiler_static': '-lmyelin',
+ '-lmyelin_executor_static': '-lnvcaffe_parser',
+ '-lmyelin_pattern_library_static': '',
+ '-lmyelin_pattern_runtime_static': '',
+ '-lnvinfer_static': '-lnvinfer',
+ '-lnvinfer_plugin_static': '-lnvinfer_plugin',
+ '-lnvonnxparser_static': '-lnvonnxparser',
+ '-lnvparsers_static': '-lnvparsers'
+}
+
+
+def fix_cmd(arch, c):
+ if arch == 'WINDOWS':
+ prefix = '/DEF:'
+ f = fix_windows_param
+ else:
+ prefix = '-Wl,--version-script='
+ if arch in ('DARWIN', 'IOS', 'IOSSIM'):
+ f = fix_darwin_param
+ else:
+ f = lambda x: fix_gnu_param(arch, x)
+
+ def do_fix(p):
+ if p.startswith(prefix) and p.endswith('.exports'):
+ fname = p[len(prefix):]
+
+ return list(f(list(parse_export_file(fname))))
+
+ if p.endswith('.supp'):
+ return []
+
+ if p.endswith('.pkg.fake'):
+ return []
+
+ return [p]
+
+ return sum((do_fix(x) for x in c), [])
+
+
+def fix_cmd_for_musl(cmd):
+ flags = []
+ for flag in cmd:
+ if flag not in MUSL_LIBS:
+ flags.append(flag)
+ return flags
+
+
+def fix_cmd_for_dynamic_cuda(cmd):
+ flags = []
+ for flag in cmd:
+ if flag in CUDA_LIBRARIES:
+ flags.append(CUDA_LIBRARIES[flag])
+ else:
+ flags.append(flag)
+ return flags
+
+
+def fix_blas_resolving(cmd):
+ # Intel mkl comes as a precompiled static library and thus can not be recompiled with sanitizer runtime instrumentation.
+ # That's why we prefer to use cblas instead of Intel mkl as a drop-in replacement under sanitizers.
+ # But if the library has dependencies on mkl and cblas simultaneously, it will get a linking error.
+ # Hence we assume that it's probably compiling without sanitizers and we can easily remove cblas to prevent multiple definitions of the same symbol at link time.
+ for arg in cmd:
+ if arg.startswith('contrib/libs') and arg.endswith('mkl-lp64.a'):
+ return [arg for arg in cmd if not arg.endswith('libcontrib-libs-cblas.a')]
+ return cmd
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.disable_interspersed_args()
+ parser.add_option('--arch')
+ parser.add_option('--target')
+ parser.add_option('--soname')
+ parser.add_option('--fix-elf')
+ parser.add_option('--linker-output')
+ parser.add_option('--musl', action='store_true')
+ parser.add_option('--dynamic-cuda', action='store_true')
+ parser.add_option('--whole-archive-peers', action='append')
+ parser.add_option('--whole-archive-libs', action='append')
+ parser.add_option('--custom-step')
+ parser.add_option('--python')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ opts, args = parse_args()
+
+ assert opts.arch
+ assert opts.target
+
+ cmd = fix_blas_resolving(args)
+ cmd = fix_cmd(opts.arch, cmd)
+
+ if opts.musl:
+ cmd = fix_cmd_for_musl(cmd)
+ if opts.dynamic_cuda:
+ cmd = fix_cmd_for_dynamic_cuda(cmd)
+
+ cmd = ProcessWholeArchiveOption(opts.arch, opts.whole_archive_peers, opts.whole_archive_libs).construct_cmd(cmd)
+
+ if opts.custom_step:
+ assert opts.python
+ subprocess.check_call([opts.python] + [opts.custom_step] + cmd)
+
+ if opts.linker_output:
+ stdout = open(opts.linker_output, 'w')
+ else:
+ stdout = sys.stdout
+
+ proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=stdout)
+ proc.communicate()
+
+ if proc.returncode:
+ print >>sys.stderr, 'linker has failed with retcode:', proc.returncode
+ print >>sys.stderr, 'linker command:', shlex_join(cmd)
+ sys.exit(proc.returncode)
+
+ if opts.fix_elf:
+ cmd = [opts.fix_elf, opts.target]
+ proc = subprocess.Popen(cmd, shell=False, stderr=sys.stderr, stdout=sys.stdout)
+ proc.communicate()
+
+ if proc.returncode:
+ print >>sys.stderr, 'fix_elf has failed with retcode:', proc.returncode
+ print >>sys.stderr, 'fix_elf command:', shlex_join(cmd)
+ sys.exit(proc.returncode)
+
+ if opts.soname and opts.soname != opts.target:
+ if os.path.exists(opts.soname):
+ os.unlink(opts.soname)
+ os.link(opts.target, opts.soname)
+
+
+# -----------------Test---------------- #
+def write_temp_file(content):
+ import yatest.common as yc
+ filename = yc.output_path('test.exports')
+ with open(filename, 'w') as f:
+ f.write(content)
+ return filename
+
+
+def test_fix_cmd_darwin():
+ export_file_content = """
+C++ geobase5::details::lookup_impl::*
+C++ geobase5::hardcoded_service
+"""
+ filename = write_temp_file(export_file_content)
+ args = ['-Wl,--version-script={}'.format(filename)]
+ assert fix_cmd('DARWIN', args) == [
+ '-Wl,-exported_symbol,__ZN8geobase57details11lookup_impl*',
+ '-Wl,-exported_symbol,__ZTIN8geobase57details11lookup_impl*',
+ '-Wl,-exported_symbol,__ZTSN8geobase57details11lookup_impl*',
+ '-Wl,-exported_symbol,__ZTTN8geobase57details11lookup_impl*',
+ '-Wl,-exported_symbol,__ZTVN8geobase57details11lookup_impl*',
+ '-Wl,-exported_symbol,__ZNK8geobase57details11lookup_impl*',
+ '-Wl,-exported_symbol,__ZN8geobase517hardcoded_serviceE*',
+ '-Wl,-exported_symbol,__ZTIN8geobase517hardcoded_serviceE*',
+ '-Wl,-exported_symbol,__ZTSN8geobase517hardcoded_serviceE*',
+ '-Wl,-exported_symbol,__ZTTN8geobase517hardcoded_serviceE*',
+ '-Wl,-exported_symbol,__ZTVN8geobase517hardcoded_serviceE*',
+ '-Wl,-exported_symbol,__ZNK8geobase517hardcoded_serviceE*',
+ ]
+
+
+def run_fix_gnu_param(export_file_content):
+ filename = write_temp_file(export_file_content)
+ result = fix_gnu_param('LINUX', list(parse_export_file(filename)))[0]
+ version_script_path = result[len('-Wl,--version-script='):]
+ with open(version_script_path) as f:
+ content = f.read()
+ return content
+
+
+def test_fix_gnu_param():
+ export_file_content = """
+C++ geobase5::details::lookup_impl::*
+C getFactoryMap
+"""
+ assert run_fix_gnu_param(export_file_content) == """{
+global:
+ extern "C" {
+ _ZN8geobase57details11lookup_impl*;
+ _ZTIN8geobase57details11lookup_impl*;
+ _ZTSN8geobase57details11lookup_impl*;
+ _ZTTN8geobase57details11lookup_impl*;
+ _ZTVN8geobase57details11lookup_impl*;
+ _ZNK8geobase57details11lookup_impl*;
+ getFactoryMap;
+ };
+local: *;
+};
+"""
+
+
+def test_fix_gnu_param_with_linux_version():
+ export_file_content = """
+C++ geobase5::details::lookup_impl::*
+linux_version ver1.0
+C getFactoryMap
+"""
+ assert run_fix_gnu_param(export_file_content) == """ver1.0 {
+global:
+ extern "C" {
+ _ZN8geobase57details11lookup_impl*;
+ _ZTIN8geobase57details11lookup_impl*;
+ _ZTSN8geobase57details11lookup_impl*;
+ _ZTTN8geobase57details11lookup_impl*;
+ _ZTVN8geobase57details11lookup_impl*;
+ _ZNK8geobase57details11lookup_impl*;
+ getFactoryMap;
+ };
+local: *;
+};
+"""
diff --git a/build/scripts/link_exe.py b/build/scripts/link_exe.py
new file mode 100644
index 0000000000..eec8b20e1d
--- /dev/null
+++ b/build/scripts/link_exe.py
@@ -0,0 +1,186 @@
+import sys
+import subprocess
+import optparse
+
+from process_whole_archive_option import ProcessWholeArchiveOption
+
+
+def get_leaks_suppressions(cmd):
+ supp, newcmd = [], []
+ for arg in cmd:
+ if arg.endswith(".supp"):
+ supp.append(arg)
+ else:
+ newcmd.append(arg)
+ return supp, newcmd
+
+
+MUSL_LIBS = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
+
+
+CUDA_LIBRARIES = {
+ '-lcublas_static': '-lcublas',
+ '-lcublasLt_static': '-lcublasLt',
+ '-lcudart_static': '-lcudart',
+ '-lcudnn_static': '-lcudnn',
+ '-lcufft_static_nocallback': '-lcufft',
+ '-lcurand_static': '-lcurand',
+ '-lcusolver_static': '-lcusolver',
+ '-lcusparse_static': '-lcusparse',
+ '-lmyelin_compiler_static': '-lmyelin',
+ '-lmyelin_executor_static': '-lnvcaffe_parser',
+ '-lmyelin_pattern_library_static': '',
+ '-lmyelin_pattern_runtime_static': '',
+ '-lnvinfer_static': '-lnvinfer',
+ '-lnvinfer_plugin_static': '-lnvinfer_plugin',
+ '-lnvonnxparser_static': '-lnvonnxparser',
+ '-lnvparsers_static': '-lnvparsers'
+}
+
+
+def remove_excessive_flags(cmd):
+ flags = []
+ for flag in cmd:
+ if not flag.endswith('.ios.interface') and not flag.endswith('.pkg.fake'):
+ flags.append(flag)
+ return flags
+
+
+def fix_sanitize_flag(cmd):
+ """
+ Remove -fsanitize=address flag if sanitazers are linked explicitly for linux target.
+ """
+ for flag in cmd:
+ if flag.startswith('--target') and 'linux' not in flag.lower():
+ # use toolchained sanitize libraries
+ return cmd
+
+ CLANG_RT = 'contrib/libs/clang14-rt/lib/'
+ sanitize_flags = {
+ '-fsanitize=address': CLANG_RT + 'asan',
+ '-fsanitize=memory': CLANG_RT + 'msan',
+ '-fsanitize=leak': CLANG_RT + 'lsan',
+ '-fsanitize=undefined': CLANG_RT + 'ubsan',
+ '-fsanitize=thread': CLANG_RT + 'tsan'
+ }
+
+ used_sanitize_libs = []
+ aux = []
+ for flag in cmd:
+ if flag.startswith('-fsanitize-coverage='):
+ # do not link sanitizer libraries from clang
+ aux.append('-fno-sanitize-link-runtime')
+ if flag in sanitize_flags and any(s.startswith(sanitize_flags[flag]) for s in cmd):
+ # exclude '-fsanitize=' if appropriate library is linked explicitly
+ continue
+ if any(flag.startswith(lib) for lib in sanitize_flags.values()):
+ used_sanitize_libs.append(flag)
+ continue
+ aux.append(flag)
+
+ # move sanitize libraries out of the repeatedly searched group of archives
+ flags = []
+ for flag in aux:
+ if flag == '-Wl,--start-group':
+ flags += ['-Wl,--whole-archive'] + used_sanitize_libs + ['-Wl,--no-whole-archive']
+ flags.append(flag)
+
+ return flags
+
+
+def fix_cmd_for_musl(cmd):
+ flags = []
+ for flag in cmd:
+ if flag not in MUSL_LIBS:
+ flags.append(flag)
+ return flags
+
+
+def fix_cmd_for_dynamic_cuda(cmd):
+ flags = []
+ for flag in cmd:
+ if flag in CUDA_LIBRARIES:
+ flags.append(CUDA_LIBRARIES[flag])
+ else:
+ flags.append(flag)
+ return flags
+
+
+def gen_default_suppressions(inputs, output, source_root):
+ import collections
+ import os
+
+ supp_map = collections.defaultdict(set)
+ for filename in inputs:
+ sanitizer = os.path.basename(filename).split('.', 1)[0]
+ with open(os.path.join(source_root, filename)) as src:
+ for line in src:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ supp_map[sanitizer].add(line)
+
+ with open(output, "wb") as dst:
+ for supp_type, supps in supp_map.items():
+ dst.write('extern "C" const char *__%s_default_suppressions() {\n' % supp_type)
+ dst.write(' return "{}";\n'.format('\\n'.join(sorted(supps))))
+ dst.write('}\n')
+
+
+def fix_blas_resolving(cmd):
+ # Intel mkl comes as a precompiled static library and thus can not be recompiled with sanitizer runtime instrumentation.
+ # That's why we prefer to use cblas instead of Intel mkl as a drop-in replacement under sanitizers.
+ # But if the library has dependencies on mkl and cblas simultaneously, it will get a linking error.
+ # Hence we assume that it's probably compiling without sanitizers and we can easily remove cblas to prevent multiple definitions of the same symbol at link time.
+ for arg in cmd:
+ if arg.startswith('contrib/libs') and arg.endswith('mkl-lp64.a'):
+ return [arg for arg in cmd if not arg.endswith('libcontrib-libs-cblas.a')]
+ return cmd
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.disable_interspersed_args()
+ parser.add_option('--musl', action='store_true')
+ parser.add_option('--custom-step')
+ parser.add_option('--python')
+ parser.add_option('--source-root')
+ parser.add_option('--dynamic-cuda', action='store_true')
+ parser.add_option('--arch')
+ parser.add_option('--linker-output')
+ parser.add_option('--whole-archive-peers', action='append')
+ parser.add_option('--whole-archive-libs', action='append')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ opts, args = parse_args()
+
+ cmd = fix_blas_resolving(args)
+ cmd = remove_excessive_flags(cmd)
+ if opts.musl:
+ cmd = fix_cmd_for_musl(cmd)
+
+ cmd = fix_sanitize_flag(cmd)
+
+ if opts.dynamic_cuda:
+ cmd = fix_cmd_for_dynamic_cuda(cmd)
+ cmd = ProcessWholeArchiveOption(opts.arch, opts.whole_archive_peers, opts.whole_archive_libs).construct_cmd(cmd)
+
+ if opts.custom_step:
+ assert opts.python
+ subprocess.check_call([opts.python] + [opts.custom_step] + args)
+
+ supp, cmd = get_leaks_suppressions(cmd)
+ if supp:
+ src_file = "default_suppressions.cpp"
+ gen_default_suppressions(supp, src_file, opts.source_root)
+ cmd += [src_file]
+
+ if opts.linker_output:
+ stdout = open(opts.linker_output, 'w')
+ else:
+ stdout = sys.stdout
+
+ rc = subprocess.call(cmd, shell=False, stderr=sys.stderr, stdout=stdout)
+ sys.exit(rc)
diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py
new file mode 100644
index 0000000000..45df247d10
--- /dev/null
+++ b/build/scripts/link_fat_obj.py
@@ -0,0 +1,97 @@
+import argparse
+import subprocess
+import sys
+
+from process_whole_archive_option import ProcessWholeArchiveOption
+
+YA_ARG_PREFIX = '-Ya,'
+
+
+def get_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--obj')
+ parser.add_argument('--globals-lib')
+ parser.add_argument('--lib', required=True)
+ parser.add_argument('--arch', required=True)
+ parser.add_argument('--build-root', default=None)
+ parser.add_argument('--with-own-obj', action='store_true', default=False)
+ parser.add_argument('--with-global-srcs', action='store_true', default=False)
+
+ groups = {}
+ args_list = groups.setdefault('default', [])
+ for arg in sys.argv[1:]:
+ if arg == '--with-own-obj':
+ groups['default'].append(arg)
+ elif arg == '--globals-lib':
+ groups['default'].append(arg)
+ elif arg == '--with-global-srcs':
+ groups['default'].append(arg)
+ elif arg.startswith(YA_ARG_PREFIX):
+ group_name = arg[len(YA_ARG_PREFIX):]
+ args_list = groups.setdefault(group_name, [])
+ else:
+ args_list.append(arg)
+
+ return parser.parse_args(groups['default']), groups
+
+
+def strip_suppression_files(srcs):
+ return [s for s in srcs if not s.endswith('.supp')]
+
+
+def strip_forceload_prefix(srcs):
+ force_load_prefix = '-Wl,-force_load,'
+ return list(map(lambda lib: lib[lib.startswith(force_load_prefix) and len(force_load_prefix):], srcs))
+
+
+def main():
+ args, groups = get_args()
+
+ # Inputs
+ auto_input = groups['input']
+
+ # Outputs
+ lib_output = args.lib
+ obj_output = args.obj
+
+ # Dependencies
+ global_srcs = groups['global_srcs']
+ global_srcs = strip_suppression_files(global_srcs)
+ global_srcs = ProcessWholeArchiveOption(args.arch).construct_cmd(global_srcs)
+ global_srcs = strip_forceload_prefix(global_srcs)
+ peers = groups['peers']
+
+ # Tools
+ linker = groups['linker']
+ archiver = groups['archiver']
+
+ do_link = linker + ['-o', obj_output, '-Wl,-r', '-nodefaultlibs', '-nostartfiles'] + global_srcs + auto_input
+ do_archive = archiver + [lib_output] + peers
+ do_globals = None
+ if args.globals_lib:
+ do_globals = archiver + [args.globals_lib] + auto_input + global_srcs
+ if args.with_own_obj:
+ do_archive += auto_input
+ if args.with_global_srcs:
+ do_archive += global_srcs
+
+ def call(c):
+ proc = subprocess.Popen(c, shell=False, stderr=sys.stderr, stdout=sys.stdout, cwd=args.build_root)
+ proc.communicate()
+ return proc.returncode
+
+ if obj_output:
+ link_res = call(do_link)
+ if link_res:
+ sys.exit(link_res)
+
+ if do_globals:
+ glob_res = call(do_globals)
+ if glob_res:
+ sys.exit(glob_res)
+
+ sys.exit(call(do_archive))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/link_jsrc.py b/build/scripts/link_jsrc.py
new file mode 100644
index 0000000000..feae72fe4e
--- /dev/null
+++ b/build/scripts/link_jsrc.py
@@ -0,0 +1,27 @@
+import argparse
+import tarfile
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input', nargs='*')
+ parser.add_argument('--output', required=True)
+
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+
+ with tarfile.open(args.output, 'w') as dest:
+ for jsrc in [j for j in args.input if j.endswith('.jsrc')]:
+ with tarfile.open(jsrc, 'r') as src:
+ for item in [m for m in src.getmembers() if m.name != '']:
+ if item.isdir():
+ dest.addfile(item)
+ else:
+ dest.addfile(item, src.extractfile(item))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/link_lib.py b/build/scripts/link_lib.py
new file mode 100644
index 0000000000..e73c02027a
--- /dev/null
+++ b/build/scripts/link_lib.py
@@ -0,0 +1,101 @@
+import sys
+import subprocess
+import tempfile
+import os
+import shutil
+
+
+class Opts(object):
+ def __init__(self, args):
+ self.archiver = args[0]
+ self.arch_type = args[1]
+ self.llvm_ar_format = args[2]
+ self.build_root = args[3]
+ self.plugin = args[4]
+ self.output = args[5]
+ auto_input = args[6:]
+
+ self.need_modify = False
+ self.extra_args = []
+
+ if self.arch_type.endswith('_AR'):
+ if self.arch_type == 'GNU_AR':
+ self.create_flags = ['rcs']
+ self.modify_flags = ['-M']
+ elif self.arch_type == 'LLVM_AR':
+ self.create_flags = ['rcs', '--format=%s' % self.llvm_ar_format]
+ self.modify_flags = ['-M']
+ self.need_modify = any(item.endswith('.a') for item in auto_input)
+ if self.need_modify:
+ self.objs = list( filter(lambda x: x.endswith('.o'), auto_input) )
+ self.libs = list( filter(lambda x: x.endswith('.a'), auto_input) )
+ else:
+ self.objs = auto_input
+ self.libs = []
+ self.output_opts = [self.output]
+ elif self.arch_type == 'LIBTOOL':
+ self.create_flags = ['-static']
+ self.objs = auto_input
+ self.libs = []
+ self.output_opts = ['-o', self.output]
+ elif self.arch_type == 'LIB':
+ self.create_flags = []
+ self.extra_args = list( filter(lambda x: x.startswith('/'), auto_input) )
+ self.objs = list( filter(lambda x: not x.startswith('/'), auto_input) )
+ self.libs = []
+ self.output_opts = ['/OUT:' + self.output]
+
+ self.plugin_flags = ['--plugin', self.plugin] if self.plugin != 'None' else []
+
+
+def get_opts(args):
+ return Opts(args)
+
+
+if __name__ == "__main__":
+ opts = get_opts(sys.argv[1:])
+
+ # There is a bug in llvm-ar. Some files with size slightly greater 2^32
+ # still have GNU format instead of GNU64 and cause link problems.
+ # Workaround just lowers llvm-ar's GNU64 threshold to 2^31.
+ if opts.arch_type == 'LLVM_AR':
+ os.environ['SYM64_THRESHOLD'] = '31'
+
+ def call():
+ try:
+ p = subprocess.Popen(cmd, stdin=stdin, cwd=opts.build_root)
+ rc = p.wait()
+ return rc
+ except OSError as e:
+ raise Exception('while running %s: %s' % (' '.join(cmd), e))
+
+ try:
+ os.unlink(opts.output)
+ except OSError:
+ pass
+
+ if not opts.need_modify:
+ cmd = [opts.archiver] + opts.create_flags + opts.plugin_flags + opts.extra_args + opts.output_opts + opts.objs
+ stdin = None
+ exit_code = call()
+ elif len(opts.objs) == 0 and len(opts.libs) == 1:
+ shutil.copy(opts.libs[0], opts.output)
+ exit_code = 0
+ else:
+ temp = tempfile.NamedTemporaryFile(dir=os.path.dirname(opts.output), delete=False)
+
+ with open(temp.name, 'w') as tmp:
+ tmp.write('CREATE {0}\n'.format(opts.output))
+ for lib in opts.libs:
+ tmp.write('ADDLIB {0}\n'.format(lib))
+ for obj in opts.objs:
+ tmp.write('ADDMOD {0}\n'.format(obj))
+ tmp.write('SAVE\n')
+ tmp.write('END\n')
+ cmd = [opts.archiver] + opts.modify_flags + opts.plugin_flags
+ stdin = open(temp.name)
+ exit_code = call()
+ os.remove(temp.name)
+
+ if exit_code != 0:
+ raise Exception('{0} returned non-zero exit code {1}. Stop.'.format(' '.join(cmd), exit_code))
diff --git a/build/scripts/list.py b/build/scripts/list.py
new file mode 100644
index 0000000000..7c3b2ae695
--- /dev/null
+++ b/build/scripts/list.py
@@ -0,0 +1,4 @@
+import sys
+
+if __name__ == "__main__":
+ print(' '.join(sys.argv[1:]))
diff --git a/build/scripts/llvm_opt_wrapper.py b/build/scripts/llvm_opt_wrapper.py
new file mode 100644
index 0000000000..38ca3004af
--- /dev/null
+++ b/build/scripts/llvm_opt_wrapper.py
@@ -0,0 +1,18 @@
+import subprocess
+import sys
+
+
+def fix(s):
+ # we use '#' instead of ',' because ymake always splits args by comma
+ if 'internalize' in s:
+ return s.replace('#', ',')
+
+ return s
+
+
+if __name__ == '__main__':
+ path = sys.argv[1]
+ args = [fix(s) for s in [path] + sys.argv[2:]]
+
+ rc = subprocess.call(args, shell=False, stderr=sys.stderr, stdout=sys.stdout)
+ sys.exit(rc)
diff --git a/build/scripts/make_container.py b/build/scripts/make_container.py
new file mode 100644
index 0000000000..a485baffdd
--- /dev/null
+++ b/build/scripts/make_container.py
@@ -0,0 +1,94 @@
+import os
+import shutil
+import stat
+import struct
+import subprocess
+import sys
+
+import container # 1
+
+
+def main(output_path, entry_path, input_paths, squashfs_path):
+ output_tmp_path = output_path + '.tmp'
+ shutil.copy2(entry_path, output_tmp_path)
+ st = os.stat(output_tmp_path)
+ os.chmod(output_tmp_path, st.st_mode | stat.S_IWUSR)
+
+ layer_paths = []
+ other_paths = []
+ for input_path in input_paths:
+ (layer_paths if input_path.endswith('.container_layer') else other_paths).append(input_path)
+
+ if len(other_paths) == 0:
+ raise Exception('No program in container dependencies')
+
+ if len(other_paths) > 1:
+ raise Exception('Multiple non-layer inputs')
+
+ program_path = other_paths[0]
+ program_container_path = os.path.basename(program_path)
+
+ os.symlink(program_container_path, 'entry')
+ add_cmd = [ os.path.join(squashfs_path, 'mksquashfs') ]
+ add_cmd.extend([program_path, 'entry', 'program_layer'])
+ subprocess.run(add_cmd)
+
+ layer_paths.append('program_layer')
+
+ container.join_layers(layer_paths, 'container_data', squashfs_path)
+
+ size = 0
+ block_size = 1024 * 1024
+
+ with open(output_tmp_path, 'ab') as output:
+ with open('container_data', 'rb') as input_:
+ while True:
+ data = input_.read(block_size)
+ output.write(data)
+ size += len(data)
+
+ if len(data) < block_size:
+ break
+
+ with open(os.path.join(squashfs_path, 'unsquashfs'), 'rb') as input_:
+ while True:
+ data = input_.read(block_size)
+ output.write(data)
+ size += len(data)
+
+ if len(data) == 0:
+ break
+
+
+ output.write(struct.pack('<Q', size))
+
+ os.rename(output_tmp_path, output_path)
+
+
+def entry():
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', '--output', required=True)
+ parser.add_argument('-s', '--squashfs-path', required=True)
+ parser.add_argument('input', nargs='*')
+
+ args = parser.parse_args()
+
+ def is_container_entry(path):
+ return os.path.basename(path) == '_container_entry'
+
+ input_paths = []
+ entry_paths = []
+
+ for input_path in args.input:
+ (entry_paths if is_container_entry(input_path) else input_paths).append(input_path)
+
+ if len(entry_paths) != 1:
+ raise Exception('Could not select container entry from {}'.format(entry_paths))
+
+ return main(args.output, entry_paths[0], input_paths, args.squashfs_path)
+
+
+if __name__ == '__main__':
+ sys.exit(entry())
diff --git a/build/scripts/make_container_layer.py b/build/scripts/make_container_layer.py
new file mode 100644
index 0000000000..4f61f5a2e5
--- /dev/null
+++ b/build/scripts/make_container_layer.py
@@ -0,0 +1,24 @@
+import sys
+
+import container # 1
+
+
+class UserError(Exception):
+ pass
+
+
+def entry():
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-o', '--output', required=True)
+ parser.add_argument('-s', '--squashfs-path', required=True)
+ parser.add_argument('input', nargs='*')
+
+ args = parser.parse_args()
+
+ return container.join_layers(args.input, args.output, args.squashfs_path)
+
+
+if __name__ == '__main__':
+ sys.exit(entry())
diff --git a/build/scripts/make_java_classpath_file.py b/build/scripts/make_java_classpath_file.py
new file mode 100644
index 0000000000..c70a7876d7
--- /dev/null
+++ b/build/scripts/make_java_classpath_file.py
@@ -0,0 +1,26 @@
+import os
+import sys
+
+import process_command_files as pcf
+
+
+def make_cp_file(args):
+ source = args[0]
+ destination = args[1]
+ with open(source) as src:
+ lines = [l.strip() for l in src if l.strip()]
+ with open(destination, 'w') as dst:
+ dst.write(os.pathsep.join(lines))
+
+def make_cp_file_from_args(args):
+ destination = args[0]
+ with open(destination, 'w') as dst:
+ dst.write(os.pathsep.join(args[1:]))
+
+
+if __name__ == '__main__':
+ args = pcf.get_args(sys.argv[1:])
+ if sys.argv[1] != '--from-args':
+ make_cp_file(args)
+ else:
+ make_cp_file_from_args(args[1:])
diff --git a/build/scripts/make_java_srclists.py b/build/scripts/make_java_srclists.py
new file mode 100644
index 0000000000..65174bafd7
--- /dev/null
+++ b/build/scripts/make_java_srclists.py
@@ -0,0 +1,128 @@
+import os
+import sys
+import argparse
+
+import process_command_files as pcf
+import java_pack_to_file as jcov
+
+
+def writelines(f, rng):
+ f.writelines(item + '\n' for item in rng)
+
+
+def add_rel_src_to_coverage(coverage, src, source_root):
+ rel = os.path.relpath(src, source_root)
+ if not rel.startswith('..' + os.path.sep):
+ coverage.append(rel)
+
+
+def main():
+ args = pcf.get_args(sys.argv[1:])
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--moddir')
+ parser.add_argument('--java')
+ parser.add_argument('--groovy')
+ parser.add_argument('--kotlin')
+ parser.add_argument('--coverage')
+ parser.add_argument('--source-root')
+ args, remaining_args = parser.parse_known_args(args)
+
+ java = []
+ kotlin = []
+ groovy = []
+ coverage = []
+
+ cur_resources_list_file = None
+ cur_jsources_list_file = None
+ cur_srcdir = None
+ cur_resources = []
+ cur_jsources = []
+
+ FILE_ARG = 1
+ RESOURCES_DIR_ARG = 2
+ SRCDIR_ARG = 3
+ JSOURCES_DIR_ARG = 4
+
+ next_arg=FILE_ARG
+
+ for src in remaining_args:
+ if next_arg == RESOURCES_DIR_ARG:
+ assert cur_resources_list_file is None
+ cur_resources_list_file = src
+ next_arg = FILE_ARG
+ continue
+ elif next_arg == JSOURCES_DIR_ARG:
+ assert cur_jsources_list_file is None
+ cur_jsources_list_file = src
+ next_arg = FILE_ARG
+ continue
+ elif next_arg == SRCDIR_ARG:
+ assert cur_srcdir is None
+ cur_srcdir = src if os.path.isabs(src) else os.path.join(args.moddir, src)
+ next_arg = FILE_ARG
+ continue
+
+ if src.endswith(".java"):
+ java.append(src)
+ kotlin.append(src)
+ if args.coverage and args.source_root:
+ add_rel_src_to_coverage(coverage, src, args.source_root)
+ elif args.kotlin and src.endswith(".kt"):
+ kotlin.append(src)
+ if args.coverage and args.source_root:
+ add_rel_src_to_coverage(coverage, src, args.source_root)
+ elif args.groovy and src.endswith(".groovy"):
+ groovy.append(src)
+ else:
+ if src == '--resources':
+ if cur_resources_list_file is not None:
+ with open(cur_resources_list_file, 'w') as f:
+ writelines(f, cur_resources)
+ cur_resources_list_file = None
+ cur_srcdir = None
+ cur_resources = []
+ next_arg = RESOURCES_DIR_ARG
+ continue
+ if src == '--jsources':
+ if cur_jsources_list_file is not None:
+ with open(cur_jsources_list_file, 'w') as f:
+ writelines(f, cur_jsources)
+ cur_jsources_list_file = None
+ cur_jsources = []
+ next_arg = JSOURCES_DIR_ARG
+ continue
+ elif src == '--srcdir':
+ next_arg = SRCDIR_ARG
+ continue
+ else:
+ assert cur_srcdir is not None and cur_resources_list_file is not None
+ cur_resources.append(os.path.relpath(src, cur_srcdir))
+
+ if cur_jsources_list_file is not None:
+ assert cur_srcdir is not None
+ cur_jsources.append(os.path.relpath(src, cur_srcdir))
+
+ if cur_resources_list_file is not None:
+ with open(cur_resources_list_file, 'w') as f:
+ writelines(f, cur_resources)
+ if cur_jsources_list_file is not None:
+ with open(cur_jsources_list_file, 'w') as f:
+ writelines(f, cur_jsources)
+
+ if args.java:
+ with open(args.java, 'w') as f:
+ writelines(f, java)
+ if args.kotlin:
+ with open(args.kotlin, 'w') as f:
+ writelines(f, kotlin)
+ if args.groovy:
+ with open(args.groovy, 'w') as f:
+ writelines(f, groovy)
+ if args.coverage:
+ jcov.write_coverage_sources(args.coverage, args.source_root, coverage)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/build/scripts/make_manifest_from_bf.py b/build/scripts/make_manifest_from_bf.py
new file mode 100644
index 0000000000..bfea3ba3de
--- /dev/null
+++ b/build/scripts/make_manifest_from_bf.py
@@ -0,0 +1,28 @@
+import sys
+import zipfile
+import os
+import re
+
+
+def prepare_path(path):
+ return ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path
+
+
+def main(args):
+ bf, mf = args[0], args[1]
+ if not os.path.exists(os.path.dirname(mf)):
+ os.makedirs(os.path.dirname(mf))
+ with open(bf) as f:
+ class_path = f.read().strip()
+ class_path = ' '.join(map(prepare_path, class_path.split('\n')))
+ with zipfile.ZipFile(mf, 'w') as zf:
+ lines = []
+ while class_path:
+ lines.append(class_path[:60])
+ class_path = class_path[60:]
+ if lines:
+ zf.writestr('META-INF/MANIFEST.MF', 'Manifest-Version: 1.0\nClass-Path: \n ' + '\n '.join(lines) + ' \n\n')
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/build/scripts/mangle_typeinfo_names.py b/build/scripts/mangle_typeinfo_names.py
new file mode 100755
index 0000000000..8f30a46552
--- /dev/null
+++ b/build/scripts/mangle_typeinfo_names.py
@@ -0,0 +1,317 @@
+#!/usr/bin/env python
+
+import base64
+import hashlib
+import io
+import os
+import struct
+import subprocess
+import sys
+from collections import namedtuple
+
+
+"""
+ELF-64 Object File Format: https://uclibc.org/docs/elf-64-gen.pdf
+ELF-32: https://uclibc.org/docs/elf.pdf
+"""
+
+
+MANGLED_HASH_SIZE = 15
+
+# len(base64(sha1(name)[:MANGLED_HASH_SIZE]) + '\x00')
+MANGLED_NAME_SIZE = 21
+
+
+ArObject = namedtuple('ArObject', ['header', 'data'])
+
+ElfSection = namedtuple('ElfSection', ['header_offset', 'name', 'data_offset', 'size', 'link', 'entry_size'])
+
+
+def find(it, pred):
+ return next(iter(filter(pred, it)), None)
+
+
+def mangle_name(name_bytes):
+ sha1 = hashlib.sha1()
+ sha1.update(name_bytes)
+ dgst = sha1.digest()
+ return base64.b64encode(dgst[:MANGLED_HASH_SIZE])
+
+
+def unpack(format, buffer, offset=0):
+ return struct.unpack(format, buffer[offset : offset + struct.calcsize(format)])
+
+
+def unpack_section_header(buffer, offset, elf64):
+ # read sh_name, sh_offset, sh_size, sh_link, sh_entsize from section headers (Elf64_Shdr/Elf32_Shdr):
+ #
+ # typedef struct
+ # {
+ # Elf64_Word sh_name; /* Section name */
+ # Elf64_Word sh_type; /* Section type */
+ # Elf64_Xword sh_flags; /* Section attributes */
+ # Elf64_Addr sh_addr; /* Virtual address in memory */
+ # Elf64_Off sh_offset; /* Offset in file */
+ # Elf64_Xword sh_size; /* Size of section */
+ # Elf64_Word sh_link; /* Link to other section */
+ # Elf64_Word sh_info; /* Miscellaneous information */
+ # Elf64_Xword sh_addralign; /* Address alignment boundary */
+ # Elf64_Xword sh_entsize; /* Size of entries, if section has table */
+ # } Elf64_Shdr;
+ #
+ # typedef struct {
+ # Elf32_Word sh_name;
+ # Elf32_Word sh_type;
+ # Elf32_Word sh_flags;
+ # Elf32_Addr sh_addr;
+ # Elf32_Off sh_offset;
+ # Elf32_Word sh_size;
+ # Elf32_Word sh_link;
+ # Elf32_Word sh_info;
+ # Elf32_Word sh_addralign;
+ # Elf32_Word sh_entsize;
+ # } Elf32_Shdr;
+
+ section_header_format = '< L 20x Q Q L 12x Q' if elf64 else '< L 12x L L L 8x L'
+ return ElfSection(offset, *unpack(section_header_format, buffer, offset))
+
+
+def read_elf_sections(elf_data, elf64):
+ # read e_shoff, e_shentsize, e_shnum, e_shstrndx from elf header (Elf64_Ehdr/Elf32_Ehdr):
+ #
+ # typedef struct
+ # {
+ # unsigned char e_ident[16]; /* ELF identification */
+ # Elf64_Half e_type; /* Object file type */
+ # Elf64_Half e_machine; /* Machine type */
+ # Elf64_Word e_version; /* Object file version */
+ # Elf64_Addr e_entry; /* Entry point address */
+ # Elf64_Off e_phoff; /* Program header offset */
+ # Elf64_Off e_shoff; /* Section header offset */
+ # Elf64_Word e_flags; /* Processor-specific flags */
+ # Elf64_Half e_ehsize; /* ELF header size */
+ # Elf64_Half e_phentsize; /* Size of program header entry */
+ # Elf64_Half e_phnum; /* Number of program header entries */
+ # Elf64_Half e_shentsize; /* Size of section header entry */
+ # Elf64_Half e_shnum; /* Number of section header entries */
+ # Elf64_Half e_shstrndx; /* Section name string table index */
+ # } Elf64_Ehdr;
+ #
+ # #define EI_NIDENT 16
+ #
+ # typedef struct {
+ # unsigned char e_ident[EI_NIDENT];
+ # Elf32_Half e_type;
+ # Elf32_Half e_machine;
+ # Elf32_Word e_version;
+ # Elf32_Addr e_entry;
+ # Elf32_Off e_phoff;
+ # Elf32_Off e_shoff;
+ # Elf32_Word e_flags;
+ # Elf32_Half e_ehsize;
+ # Elf32_Half e_phentsize;
+ # Elf32_Half e_phnum;
+ # Elf32_Half e_shentsize;
+ # Elf32_Half e_shnum;
+ # Elf32_Half e_shstrndx;
+ # } Elf32_Ehdr;
+
+ section_header_offset, section_header_entry_size, section_header_entries_number,\
+ section_name_string_table_index = unpack('< Q 10x 3H', elf_data, 40) if elf64 else unpack('< L 10x 3H', elf_data, 32)
+
+ # https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.sheader.html
+ # If the number of sections is greater than or equal to SHN_LORESERVE (0xff00), e_shnum has the value SHN_UNDEF (0)
+ # and the actual number of section header table entries is contained in the sh_size field of the section header
+ # at index 0 (otherwise, the sh_size member of the initial entry contains 0).
+ if section_header_entries_number == 0:
+ section_header_entries_number = unpack_section_header(elf_data, section_header_offset, elf64).size
+
+ sections = [unpack_section_header(elf_data, section_header_offset + i * section_header_entry_size, elf64)
+ for i in range(section_header_entries_number)]
+
+ # section names data
+ section_names_section = sections[section_name_string_table_index]
+ section_names_data = elf_data[section_names_section.data_offset : section_names_section.data_offset + section_names_section.size]
+
+ # read section names
+ for i, section in enumerate(sections):
+ sections[i] = section._replace(
+ name=section_names_data[section.name : section_names_data.find(b'\x00', section.name)].decode())
+
+ return sections
+
+
+def mangle_elf_typeinfo_names(elf_data, elf64, sections):
+ symbol_sizes = {}
+
+ for sect_i, section in enumerate(sections):
+ if not section.name.startswith('.rodata._ZTS') or section.size <= MANGLED_NAME_SIZE:
+ continue
+
+ typeinfo_name = elf_data[section.data_offset : section.data_offset + section.size]
+ mangled = mangle_name(typeinfo_name.rstrip(b'\x00')) + b'\x00'
+ if len(mangled) >= len(typeinfo_name):
+ continue
+
+ # patch section data
+ elf_data[section.data_offset : section.data_offset + len(mangled)] = mangled
+ # patch section size (sh_size in Elf64_Shdr/Elf32_Shdr)
+ if elf64:
+ elf_data[section.header_offset + 32 : section.header_offset + 40] = struct.pack('< Q', len(mangled))
+ else:
+ elf_data[section.header_offset + 20 : section.header_offset + 24] = struct.pack('< L', len(mangled))
+
+ symbol_sizes[section.name[len('.rodata.'):]] = len(mangled)
+
+ return symbol_sizes
+
+
+def patch_elf_symbol_sizes(elf_data, elf64, sections, symbol_sizes):
+ symtab = find(sections, lambda s: s.name == '.symtab')
+ if not symtab:
+ return
+
+ for sym_i in range(symtab.size / symtab.entry_size):
+ symtab_entry_offset = symtab.data_offset + symtab.entry_size * sym_i
+ symtab_entry = elf_data[symtab_entry_offset : symtab_entry_offset + symtab.entry_size]
+
+ # unpack symbol name offset in symbols name section (st_name) from Elf64_Sym/Elf32_Sym:
+ #
+ # typedef struct
+ # {
+ # Elf64_Word st_name; /* Symbol name */
+ # unsigned char st_info; /* Type and Binding attributes */
+ # unsigned char st_other; /* Reserved */
+ # Elf64_Half st_shndx; /* Section table index */
+ # Elf64_Addr st_value; /* Symbol value */
+ # Elf64_Xword st_size; /* Size of object (e.g., common) */
+ # } Elf64_Sym;
+ #
+ # typedef struct {
+ # Elf32_Word st_name;
+ # Elf32_Addr st_value;
+ # Elf32_Word st_size;
+ # unsigned char st_info;
+ # unsigned char st_other;
+ # Elf32_Half st_shndx;
+ # } Elf32_Sym;
+ symbol_name_offset = unpack('< L', symtab_entry)[0]
+
+ # symbol name offset from start of elf file
+ global_name_offset = sections[symtab.link].data_offset + symbol_name_offset
+
+ name = elf_data[global_name_offset : elf_data.find(b'\x00', global_name_offset)].decode()
+ symbol_size = symbol_sizes.get(name)
+ if symbol_size:
+ # patch st_size in Elf64_Sym/Elf32_Sym
+ if elf64:
+ elf_data[symtab_entry_offset + 16 : symtab_entry_offset + 24] = struct.pack('< Q', symbol_size)
+ else:
+ elf_data[symtab_entry_offset + 8 : symtab_entry_offset + 12] = struct.pack('< L', symbol_size)
+
+
+def mangle_elf(elf_data):
+ elf_data = bytearray(elf_data)
+
+ ei_mag, ei_class = unpack('4s B', elf_data)
+ assert ei_mag == b'\x7fELF'
+ if ei_class == 1: # ELFCLASS32
+ elf64 = False
+ elif ei_class == 2: # ELFCLASS64
+ elf64 = True
+ else:
+ raise Exception('unknown ei_class: ' + str(ei_class))
+
+ sections = read_elf_sections(elf_data, elf64)
+
+ symbol_sizes = mangle_elf_typeinfo_names(elf_data, elf64, sections)
+
+ if len(symbol_sizes) != 0:
+ patch_elf_symbol_sizes(elf_data, elf64, sections, symbol_sizes)
+
+ return elf_data
+
+
+def read_ar_object(ar):
+ # ar format: https://docs.oracle.com/cd/E36784_01/html/E36873/ar.h-3head.html
+ #
+ # #define ARFMAG "`\n" /* header trailer string */
+ #
+ # struct ar_hdr /* file member header */
+ # {
+ # char ar_name[16]; /* '/' terminated file member name */
+ # char ar_date[12]; /* file member date */
+ # char ar_uid[6] /* file member user identification */
+ # char ar_gid[6] /* file member group identification */
+ # char ar_mode[8] /* file member mode (octal) */
+ # char ar_size[10]; /* file member size */
+ # char ar_fmag[2]; /* header trailer string */
+ # };
+
+ header = ar.read(60)
+ if len(header) == 0:
+ return None
+ assert header[58:] == b'`\n'
+
+ size = int(bytes(header[48:58]).decode().rstrip(' '))
+ data = ar.read(size)
+ return ArObject(header, data)
+
+
+def is_elf_data(data):
+ return data[:4] == b'\x7fELF'
+
+
+def mangle_ar_impl(ar, out):
+ ar_magic = ar.read(8)
+ if ar_magic != b'!<arch>\n':
+ raise Exception('bad ar magic: {}'.format(ar_magic))
+
+ out.write(ar_magic)
+
+ string_table = None
+
+ while True:
+ obj = read_ar_object(ar)
+ if not obj:
+ break
+
+ data = mangle_elf(obj.data) if is_elf_data(obj.data) else obj.data
+
+ out.write(obj.header)
+ out.write(data)
+
+
+def mangle_ar(path):
+ out_path = path + '.mangled'
+ with open(path, 'rb') as ar:
+ try:
+ with open(out_path, 'wb') as out:
+ mangle_ar_impl(ar, out)
+ except:
+ os.unlink(out_path)
+ raise
+
+ os.rename(out_path, path)
+
+
+def main():
+ for arg in sys.argv[1:]:
+ if not ((arg.endswith('.o') or arg.endswith('.a')) and os.path.exists(arg)):
+ continue
+
+ if arg.endswith('.o'):
+ with open(arg, 'rb') as o:
+ data = o.read()
+ mangled = mangle_elf(data) if is_elf_data(data) else None
+
+ if mangled:
+ os.unlink(arg)
+ with open(arg, 'wb') as o:
+ o.write(mangled)
+ elif arg.endswith('.a'):
+ mangle_ar(arg)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/merge_coverage_data.py b/build/scripts/merge_coverage_data.py
new file mode 100644
index 0000000000..b7fa3c6a86
--- /dev/null
+++ b/build/scripts/merge_coverage_data.py
@@ -0,0 +1,32 @@
+import sys
+import tarfile
+import copy
+import os
+import uuid
+
+
+def main(args):
+ output_file, args = args[0], args[1:]
+ # heretic@: Splits files on which could be merged( files ) and which should not be merged( expendables )
+ # expendables will be in output_file in form {name}{ordinal number of archive in args[]}.{extension}
+ try:
+ split_i = args.index('-no-merge')
+ except ValueError:
+ split_i = len(args)
+ files, expendables = args[:split_i], args[split_i + 1:]
+
+ with tarfile.open(output_file, 'w') as outf:
+ for x in files:
+ with tarfile.open(x) as tf:
+ for tarinfo in tf:
+ new_tarinfo = copy.deepcopy(tarinfo)
+ if new_tarinfo.name in expendables:
+ dirname, basename = os.path.split(new_tarinfo.name)
+ basename_parts = basename.split('.', 1)
+ new_basename = '.'.join([basename_parts[0] + str(uuid.uuid4())] + basename_parts[1:])
+ new_tarinfo.name = os.path.join(dirname, new_basename)
+ outf.addfile(new_tarinfo, tf.extractfile(tarinfo))
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/build/scripts/merge_files.py b/build/scripts/merge_files.py
new file mode 100644
index 0000000000..d42d6a2139
--- /dev/null
+++ b/build/scripts/merge_files.py
@@ -0,0 +1,8 @@
+import sys
+
+
+if __name__ == "__main__":
+ with open(sys.argv[1], "w") as f:
+ for appended in sys.argv[2:]:
+ with open(appended) as a:
+ f.write(a.read())
diff --git a/build/scripts/mkdir.py b/build/scripts/mkdir.py
new file mode 100755
index 0000000000..a326b29300
--- /dev/null
+++ b/build/scripts/mkdir.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import os
+import sys
+
+
+def mkdir_p(directory):
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+if __name__ == "__main__":
+ for directory in sys.argv[1:]:
+ mkdir_p(directory)
diff --git a/build/scripts/mkdocs_builder_wrapper.py b/build/scripts/mkdocs_builder_wrapper.py
new file mode 100644
index 0000000000..7a0df04190
--- /dev/null
+++ b/build/scripts/mkdocs_builder_wrapper.py
@@ -0,0 +1,36 @@
+from __future__ import unicode_literals
+import os
+import subprocess
+import sys
+
+
+def main():
+ cmd = []
+ build_root = sys.argv[1]
+ length = len(build_root)
+ is_dep = False
+ for arg in sys.argv[2:]:
+ if is_dep:
+ is_dep = False
+ if not arg.endswith('.tar.gz'):
+ continue
+ basename = os.path.basename(arg)
+ assert arg.startswith(build_root) and len(arg) > length + len(basename) and arg[length] in ('/', '\\')
+ cmd.extend([str('--dep'), str('{}:{}:{}'.format(build_root, os.path.dirname(arg[length+1:]), basename))])
+ elif arg == '--dep':
+ is_dep = True
+ else:
+ cmd.append(arg)
+ assert not is_dep
+ p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ if p.returncode:
+ if out:
+ sys.stderr.write('stdout:\n{}\n'.format(out.decode('utf-8')))
+ if err:
+ sys.stderr.write('stderr:\n{}\n'.format(err.decode('utf-8')))
+ sys.exit(p.returncode)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/mkver.py b/build/scripts/mkver.py
new file mode 100755
index 0000000000..321cdaade1
--- /dev/null
+++ b/build/scripts/mkver.py
@@ -0,0 +1,12 @@
+import sys
+
+if __name__ == '__main__':
+ with open(sys.argv[1], 'r') as f:
+ data = f.readline()
+
+ beg = data.find('(') + 1
+ end = data.find(')')
+ version = data[beg:end]
+
+ print '#pragma once'
+ print '#define DEBIAN_VERSION "%s"' % version
diff --git a/build/scripts/move.py b/build/scripts/move.py
new file mode 100644
index 0000000000..3f611fbc2e
--- /dev/null
+++ b/build/scripts/move.py
@@ -0,0 +1,15 @@
+import os
+import sys
+
+# /script/move.py <src-1> <tgt-1> <src-2> <tgt-2> ... <src-n> <tgt-n>
+# renames src-1 to tgt-1, src-2 to tgt-2, ..., src-n to tgt-n.
+
+
+def main():
+ assert len(sys.argv) % 2 == 1
+ for index in range(1, len(sys.argv), 2):
+ os.rename(sys.argv[index], sys.argv[index + 1])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/pack_ios.py b/build/scripts/pack_ios.py
new file mode 100644
index 0000000000..37c36d1f95
--- /dev/null
+++ b/build/scripts/pack_ios.py
@@ -0,0 +1,48 @@
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+
+
+def just_do_it():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--binary", required=True, help="executable file")
+ parser.add_argument("--target", required=True, help="target archive path")
+ parser.add_argument("--temp-dir", required=True, help="temp dir")
+ parser.add_argument("peers", nargs='*')
+ args = parser.parse_args()
+ app_tar = [p for p in args.peers if p.endswith('.ios.interface')]
+ if not app_tar:
+ print >> sys.stderr, 'No one IOS_INTERFACE module found'
+ shutil.copyfile(args.binary, os.path.join(args.temp_dir, 'bin'))
+ if os.path.exists(args.target):
+ os.remove(args.target)
+ with tarfile.open(args.target, 'w') as tf:
+ tf.add(os.path.join(args.temp_dir, 'bin'), arcname=os.path.join(os.path.basename(args.binary) + '.app', 'bin'))
+ return
+ if len(app_tar) > 1:
+ app_tar = [p for p in args.peers if not p.endswith('.default.ios.interface')]
+ if len(app_tar) > 1:
+ print >> sys.stderr, 'Many IOS_INTERFACE modules found, {} will be used'.format(app_tar[-1])
+ app_tar = app_tar[-1]
+ with tarfile.open(app_tar) as tf:
+ tf.extractall(args.temp_dir)
+ tar_suffix = '.default.ios.interface' if app_tar.endswith('.default.ios.interface') else '.ios.interface'
+ app_unpacked_path = os.path.join(args.temp_dir, os.path.basename(app_tar)[:-len(tar_suffix)] + '.app')
+ if not os.path.exists(app_unpacked_path):
+ raise Exception('Bad IOS_INTERFACE resource: {}'.format(app_tar))
+ shutil.copyfile(args.binary, os.path.join(app_unpacked_path, 'bin'))
+ subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', app_unpacked_path])
+ if os.path.exists(args.target):
+ os.remove(args.target)
+ binary_origin_name = os.path.basename(args.binary)
+ while os.path.splitext(binary_origin_name)[1]:
+ binary_origin_name = os.path.splitext(binary_origin_name)[0]
+ with tarfile.open(args.target, 'w') as tf:
+ tf.add(app_unpacked_path, arcname=binary_origin_name + '.app', recursive=True)
+
+
+if __name__ == '__main__':
+ just_do_it()
diff --git a/build/scripts/pack_jcoverage_resources.py b/build/scripts/pack_jcoverage_resources.py
new file mode 100644
index 0000000000..f6e181067a
--- /dev/null
+++ b/build/scripts/pack_jcoverage_resources.py
@@ -0,0 +1,24 @@
+import sys
+import tarfile
+import os
+import subprocess
+
+
+def main(args):
+ output_file = args[0]
+ report_file = args[1]
+
+ res = subprocess.call(args[args.index('-end') + 1:])
+
+ if not os.path.exists(report_file):
+ print>>sys.stderr, 'Can\'t find jacoco exec file'
+ return res
+
+ with tarfile.open(output_file, 'w') as outf:
+ outf.add(report_file, arcname=os.path.basename(report_file))
+
+ return res
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/scripts/perl_wrapper.py b/build/scripts/perl_wrapper.py
new file mode 100644
index 0000000000..cb4027f1d3
--- /dev/null
+++ b/build/scripts/perl_wrapper.py
@@ -0,0 +1,24 @@
+import os
+import sys
+import shutil
+
+if __name__ == '__main__':
+ path = sys.argv[1]
+ to = sys.argv[-1]
+ fr = sys.argv[-2]
+ to_dir = os.path.dirname(to)
+
+ os.chdir(to_dir)
+
+ f1 = os.path.basename(fr)
+ fr_ = os.path.dirname(fr)
+ f2 = os.path.basename(fr_)
+ fr_ = os.path.dirname(fr_)
+
+ os.makedirs(f2)
+ shutil.copyfile(fr, os.path.join(f2, f1))
+
+ if path[0] != '/':
+ path = os.path.join(os.path.dirname(__file__), path)
+
+ os.execv(path, [path] + sys.argv[2:])
diff --git a/build/scripts/postprocess_go_fbs.py b/build/scripts/postprocess_go_fbs.py
new file mode 100644
index 0000000000..e6d5184a5e
--- /dev/null
+++ b/build/scripts/postprocess_go_fbs.py
@@ -0,0 +1,72 @@
+import argparse
+import re
+import os
+
+
+# very simple regexp to find go import statement in the source code
+# NOTE! only one-line comments are somehow considered
+IMPORT_DECL=re.compile(r'''
+ \bimport
+ (
+ \s+((\.|\w+)\s+)?"[^"]+" ( \s+//[^\n]* )?
+ | \s* \( \s* ( ( \s+ ((\.|\w+)\s+)? "[^"]+" )? ( \s* //[^\n]* )? )* \s* \)
+ )''', re.MULTILINE | re.DOTALL | re.VERBOSE)
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input-dir', required=True)
+ parser.add_argument('--map', nargs='*', default=None)
+
+ return parser.parse_args()
+
+
+def process_go_file(file_name, import_map):
+ content = ''
+ with open(file_name, 'r') as f:
+ content = f.read()
+
+ start = -1
+ end = -1
+ for it in IMPORT_DECL.finditer(content):
+ if start < 0:
+ start = it.start()
+ end = it.end()
+
+ if start < 0:
+ return
+
+ imports = content[start:end]
+ for namespace, path in import_map.items():
+ ns = namespace.split('.')
+ name = '__'.join(ns)
+ import_path = '/'.join(ns)
+ imports = imports.replace('{} "{}"'.format(name, import_path), '{} "a.yandex-team.ru/{}"'.format(name, path))
+
+ if imports != content[start:end]:
+ with open(file_name, 'w') as f:
+ f.write(content[:start])
+ f.write(imports)
+ f.write(content[end:])
+
+
+def main():
+ args = parse_args()
+
+ if not args.map:
+ return
+
+ raw_import_map = sorted(set(args.map))
+ import_map = dict(z.split('=', 1) for z in raw_import_map)
+ if len(raw_import_map) != len(import_map):
+ for k, v in (z.split('=', 1) for z in raw_import_map):
+ if v != import_map[k]:
+ raise Exception('import map [{}] contains different values for key [{}]: [{}] and [{}].'.format(args.map, k, v, import_map[k]))
+
+ for root, _, files in os.walk(args.input_dir):
+ for src in (f for f in files if f.endswith('.go')):
+ process_go_file(os.path.join(root, src), import_map)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/preprocess.py b/build/scripts/preprocess.py
new file mode 100644
index 0000000000..4657bef732
--- /dev/null
+++ b/build/scripts/preprocess.py
@@ -0,0 +1,48 @@
+import sys
+import os
+
+
+def load_file(p):
+ with open(p, 'r') as f:
+ return f.read()
+
+
+def step(base, data, hh):
+ def flt():
+ for l in data.split('\n'):
+ if l in hh:
+ pp = os.path.join(base, hh[l])
+
+ yield '\n\n' + load_file(pp) + '\n\n'
+
+ os.unlink(pp)
+ else:
+ yield l
+
+ return '\n'.join(flt())
+
+
+def subst_headers(path, headers):
+ hh = dict()
+
+ for h in headers:
+ hh['# include "' + h + '"'] = h
+
+ data = load_file(path)
+ prev = data
+
+ while True:
+ ret = step(os.path.dirname(path), prev, hh)
+
+ if ret == prev:
+ break
+
+ prev = ret
+
+ if data != prev:
+ with open(path, 'w') as f:
+ f.write(prev)
+
+
+if __name__ == '__main__':
+ subst_headers(sys.argv[1], ['stack.hh', 'position.hh', 'location.hh'])
diff --git a/build/scripts/process_command_files.pyc b/build/scripts/process_command_files.pyc
new file mode 100644
index 0000000000..7ea813740a
--- /dev/null
+++ b/build/scripts/process_command_files.pyc
Binary files differ
diff --git a/build/scripts/process_whole_archive_option.py b/build/scripts/process_whole_archive_option.py
new file mode 100644
index 0000000000..84d29869e9
--- /dev/null
+++ b/build/scripts/process_whole_archive_option.py
@@ -0,0 +1,183 @@
+import os
+
+import process_command_files as pcf
+
+
+class ProcessWholeArchiveOption():
+ def __init__(self, arch, peers=None, libs=None):
+ self.arch = arch.upper()
+ self.peers = { x : 0 for x in peers } if peers else None
+ self.libs = { x : 0 for x in libs } if libs else None
+ self.start_wa_marker = '--start-wa'
+ self.end_wa_marker = '--end-wa'
+
+ def _match_peer_lib(self, arg, ext):
+ key = None
+ if arg.endswith(ext):
+ key = os.path.dirname(arg)
+ return key if key and self.peers and key in self.peers else None
+
+ def _match_lib(self, arg):
+ return arg if self.libs and arg in self.libs else None
+
+ def _process_arg(self, arg, ext='.a'):
+ peer_key = self._match_peer_lib(arg, ext)
+ lib_key = self._match_lib(arg)
+ if peer_key:
+ self.peers[peer_key] += 1
+ if lib_key:
+ self.libs[lib_key] += 1
+ return peer_key if peer_key else lib_key
+
+ def _check_peers(self):
+ if self.peers:
+ for key, value in self.peers.items():
+ assert value > 0, '"{}" specified in WHOLE_ARCHIVE() macro is not used on link command'.format(key)
+
+ def _construct_cmd_apple(self, args):
+ force_load_flag = '-Wl,-force_load,'
+ is_inside_wa_markers = False
+
+ cmd = []
+ for arg in args:
+ if arg.startswith(force_load_flag):
+ cmd.append(arg)
+ elif arg == self.start_wa_marker:
+ is_inside_wa_markers = True
+ elif arg == self.end_wa_marker:
+ is_inside_wa_markers = False
+ elif is_inside_wa_markers:
+ cmd.append(force_load_flag + arg)
+ else:
+ key = self._process_arg(arg)
+ cmd.append(force_load_flag + arg if key else arg)
+
+ self._check_peers()
+
+ return cmd
+
+ def _construct_cmd_win(self, args):
+ whole_archive_prefix = '/WHOLEARCHIVE:'
+ is_inside_wa_markers = False
+
+ def add_prefix(arg, need_check_peers_and_libs):
+ key = self._process_arg(arg, '.lib') if need_check_peers_and_libs else arg
+ return whole_archive_prefix + arg if key else arg
+
+ def add_whole_archive_prefix(arg, need_check_peers_and_libs):
+ if not pcf.is_cmdfile_arg(arg):
+ return add_prefix(arg, need_check_peers_and_libs)
+
+ cmd_file_path = pcf.cmdfile_path(arg)
+ cf_args = pcf.read_from_command_file(cmd_file_path)
+ with open(cmd_file_path, 'w') as afile:
+ for cf_arg in cf_args:
+ afile.write(add_prefix(cf_arg, need_check_peers_and_libs) + "\n")
+ return arg
+
+ cmd = []
+ for arg in args:
+ if arg == self.start_wa_marker:
+ is_inside_wa_markers = True
+ elif arg == self.end_wa_marker:
+ is_inside_wa_markers = False
+ elif is_inside_wa_markers:
+ cmd.append(add_whole_archive_prefix(arg, False))
+ continue
+ elif self.peers or self.libs:
+ cmd.append(add_whole_archive_prefix(arg, True))
+ else:
+ cmd.append(arg)
+
+ self._check_peers()
+
+ return cmd
+
+ def _construct_cmd_linux(self, args):
+ whole_archive_flag = '-Wl,--whole-archive'
+ no_whole_archive_flag = '-Wl,--no-whole-archive'
+
+ def replace_markers(arg):
+ if arg == self.start_wa_marker:
+ return whole_archive_flag
+ if arg == self.end_wa_marker:
+ return no_whole_archive_flag
+ return arg
+
+ args = [replace_markers(arg) for arg in args]
+
+ cmd = []
+ is_inside_whole_archive = False
+ is_whole_archive = False
+ # We are trying not to create excessive sequences of consecutive flags
+ # -Wl,--no-whole-archive -Wl,--whole-archive ('externally' specified
+ # flags -Wl,--[no-]whole-archive are not taken for consideration in this
+ # optimization intentionally)
+ for arg in args:
+ if arg == whole_archive_flag:
+ is_inside_whole_archive = True
+ is_whole_archive = False
+ elif arg == no_whole_archive_flag:
+ is_inside_whole_archive = False
+ is_whole_archive = False
+ else:
+ key = self._process_arg(arg)
+ if not is_inside_whole_archive:
+ if key:
+ if not is_whole_archive:
+ cmd.append(whole_archive_flag)
+ is_whole_archive = True
+ elif is_whole_archive:
+ cmd.append(no_whole_archive_flag)
+ is_whole_archive = False
+
+ cmd.append(arg)
+
+ if is_whole_archive:
+ cmd.append(no_whole_archive_flag)
+
+ # There can be an empty sequence of archive files between
+ # -Wl, --whole-archive and -Wl, --no-whole-archive flags.
+ # As a result an unknown option error may occur, therefore to
+ # prevent this case we need to remove both flags from cmd.
+ # These flags affects only on subsequent archive files.
+ if len(cmd) == 2:
+ return []
+
+ self._check_peers()
+
+ return cmd
+
+ def construct_cmd(self, args):
+ if self.arch in ('DARWIN', 'IOS', 'IOSSIM'):
+ return self._construct_cmd_apple(args)
+
+ if self.arch == 'WINDOWS':
+ return self._construct_cmd_win(args)
+
+ return self._construct_cmd_linux(args)
+
+
+def get_whole_archive_peers_and_libs(args):
+ remaining_args = []
+ peers = []
+ libs = []
+ peers_flag = '--whole-archive-peers'
+ libs_flag = '--whole-archive-libs'
+
+ next_is_peer = False
+ next_is_lib = False
+ for arg in args:
+ if arg == peers_flag:
+ next_is_peer = True
+ elif arg == libs_flag:
+ next_is_lib = True
+ elif next_is_peer:
+ peers.append(arg)
+ next_is_peer = False
+ elif next_is_lib:
+ libs.append(arg)
+ next_is_lib = False
+ else:
+ remaining_args.append(arg)
+ return remaining_args, peers, libs
diff --git a/build/scripts/process_whole_archive_option.pyc b/build/scripts/process_whole_archive_option.pyc
new file mode 100644
index 0000000000..318f50bc36
--- /dev/null
+++ b/build/scripts/process_whole_archive_option.pyc
Binary files differ
diff --git a/build/scripts/py_compile.py b/build/scripts/py_compile.py
new file mode 100755
index 0000000000..936dbe8816
--- /dev/null
+++ b/build/scripts/py_compile.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function, absolute_import, division
+
+import marshal
+import sys
+
+
+def main():
+ srcpathx, in_fname, out_fname = sys.argv[1:]
+ srcpath = srcpathx[:-1]
+
+ with open(in_fname, 'r') as in_file:
+ source = in_file.read()
+
+ code = compile(source, srcpath, 'exec', dont_inherit=True)
+
+ with open(out_fname, 'wb') as out_file:
+ marshal.dump(code, out_file)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build/scripts/python_yndexer.py b/build/scripts/python_yndexer.py
new file mode 100644
index 0000000000..3180665387
--- /dev/null
+++ b/build/scripts/python_yndexer.py
@@ -0,0 +1,53 @@
+import os
+import sys
+import threading
+import subprocess
+
+
+def _try_to_kill(process):
+ try:
+ process.kill()
+ except Exception:
+ pass
+
+
+def touch(path):
+ if not os.path.exists(path):
+ with open(path, 'w') as _:
+ pass
+
+
+class Process(object):
+ def __init__(self, args):
+ self._process = subprocess.Popen(args)
+ self._event = threading.Event()
+ self._result = None
+ thread = threading.Thread(target=self._run)
+ thread.setDaemon(True)
+ thread.start()
+
+ def _run(self):
+ self._process.communicate()
+ self._result = self._process.returncode
+ self._event.set()
+
+ def wait(self, timeout):
+ self._event.wait(timeout=timeout)
+ _try_to_kill(self._process)
+ return self._result
+
+
+if __name__ == '__main__':
+ yndexer = sys.argv[1]
+ timeout = int(sys.argv[2])
+ output_file = sys.argv[3]
+ input_file = sys.argv[4]
+ partition_count = sys.argv[5]
+ partition_index = sys.argv[6]
+
+ process = Process([yndexer, '-f', input_file, '-y', output_file, '-c', partition_count, '-i', partition_index])
+ result = process.wait(timeout=timeout)
+
+ if result != 0:
+ print >> sys.stderr, 'Yndexing process finished with code', result
+ touch(output_file)
diff --git a/build/scripts/resolve_java_srcs.py b/build/scripts/resolve_java_srcs.py
new file mode 100644
index 0000000000..a2e6c20012
--- /dev/null
+++ b/build/scripts/resolve_java_srcs.py
@@ -0,0 +1,106 @@
+import os
+import argparse
+import re
+import sys
+
+
+def list_all_files(directory, prefix='/', hidden_files=False):
+ result = []
+ if os.path.exists(directory):
+ for i in os.listdir(directory):
+ abs_path = os.path.join(directory, i)
+ result += list_all_files(os.path.join(directory, abs_path), prefix + i + '/', hidden_files) \
+ if os.path.isdir(abs_path) else ([prefix + i] if (hidden_files or not i.startswith('.')) else [])
+ return result
+
+
+def pattern_to_regexp(p):
+ return '^' + \
+ ('/' if not p.startswith('**') else '') + \
+ re.escape(p).replace(
+ r'\*\*\/', '[_DIR_]'
+ ).replace(
+ r'\*', '[_FILE_]'
+ ).replace(
+ '[_DIR_]', '(.*/)?'
+ ).replace(
+ '[_FILE_]', '([^/]*)'
+ ) + '$'
+
+
+def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, resolve_kotlin=False, resolve_groovy=False):
+ result = {'java': [], 'not_java': [], 'kotlin': [], 'groovy': []}
+ include_patterns_normal, include_patterns_hidden, exclude_patterns_normal, exclude_patterns_hidden = [], [], [], []
+ for vis, hid, patterns in ((include_patterns_normal, include_patterns_hidden, include_patterns), (exclude_patterns_normal, exclude_patterns_hidden, exclude_patterns),):
+ for pattern in patterns:
+ if (pattern if pattern.find('/') == -1 else pattern.rsplit('/', 1)[1]).startswith('.'):
+ hid.append(pattern)
+ else:
+ vis.append(pattern)
+ re_patterns = map(pattern_to_regexp, vis + hid)
+ if sys.platform in ('win32', 'darwin'):
+ re_patterns = [re.compile(i, re.IGNORECASE) for i in re_patterns]
+ else:
+ re_patterns = [re.compile(i) for i in re_patterns]
+ vis[:], hid[:] = re_patterns[:len(vis)], re_patterns[len(vis):]
+
+ for inc_patterns, exc_patterns, with_hidden_files in (
+ (include_patterns_normal, exclude_patterns_normal, False),
+ (include_patterns_hidden, exclude_patterns_hidden, True),
+ ):
+ for f in list_all_files(srcdir, hidden_files=with_hidden_files):
+ excluded = False
+
+ for exc_re in exc_patterns:
+ if exc_re.match(f):
+ excluded = True
+ break
+
+ if excluded:
+ continue
+
+ for inc_re in inc_patterns:
+ if inc_re.match(f):
+ s = os.path.normpath(f[1:])
+ if all_resources or not (f.endswith('.java') or f.endswith('.kt') or f.endswith('.groovy')):
+ result['not_java'].append(s)
+ elif f.endswith('.java'):
+ result['java'].append(os.path.join(srcdir, s))
+ elif f.endswith('.kt') and resolve_kotlin:
+ result['kotlin'].append(os.path.join(srcdir, s))
+ elif f.endswith('.groovy') and resolve_groovy:
+ result['groovy'].append(os.path.join(srcdir, s))
+ else:
+ result['not_java'].append(s)
+ break
+
+ return sorted(result['java']), sorted(result['not_java']), sorted(result['kotlin']), sorted(result['groovy'])
+
+
+def do_it(directory, sources_file, resources_file, kotlin_sources_file, groovy_sources_file, include_patterns, exclude_patterns, resolve_kotlin, resolve_groovy, append, all_resources):
+ j, r, k, g = resolve_java_srcs(directory, include_patterns, exclude_patterns, all_resources, resolve_kotlin, resolve_groovy)
+ mode = 'a' if append else 'w'
+ open(sources_file, mode).writelines(i + '\n' for i in j)
+ open(resources_file, mode).writelines(i + '\n' for i in r)
+ if kotlin_sources_file:
+ open(kotlin_sources_file, mode).writelines(i + '\n' for i in k + j)
+ if groovy_sources_file:
+ open(groovy_sources_file, mode).writelines(i + '\n' for i in g + j)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-d', '--directory', required=True)
+ parser.add_argument('-s', '--sources-file', required=True)
+ parser.add_argument('-r', '--resources-file', required=True)
+ parser.add_argument('-k', '--kotlin-sources-file', default=None)
+ parser.add_argument('-g', '--groovy-sources-file', default=None)
+ parser.add_argument('--append', action='store_true', default=False)
+ parser.add_argument('--all-resources', action='store_true', default=False)
+ parser.add_argument('--resolve-kotlin', action='store_true', default=False)
+ parser.add_argument('--resolve-groovy', action='store_true', default=False)
+ parser.add_argument('--include-patterns', nargs='*', default=[])
+ parser.add_argument('--exclude-patterns', nargs='*', default=[])
+ args = parser.parse_args()
+
+ do_it(**vars(args))
diff --git a/build/scripts/retry.py b/build/scripts/retry.py
new file mode 100644
index 0000000000..d14170bfec
--- /dev/null
+++ b/build/scripts/retry.py
@@ -0,0 +1,29 @@
+import time
+import functools
+
+
+# Partly copy-pasted from contrib/python/retry
+def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1):
+ _tries, _delay = tries, delay
+ while _tries:
+ try:
+ return f()
+ except exceptions as e:
+ _tries -= 1
+ if not _tries:
+ raise
+
+ time.sleep(_delay)
+ _delay *= backoff
+
+ if max_delay is not None:
+ _delay = min(_delay, max_delay)
+
+
+def retry(**retry_kwargs):
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ return retry_func(lambda: func(*args, **kwargs), **retry_kwargs)
+ return wrapper
+ return decorator
diff --git a/build/scripts/rodata2asm.py b/build/scripts/rodata2asm.py
new file mode 100644
index 0000000000..555639499f
--- /dev/null
+++ b/build/scripts/rodata2asm.py
@@ -0,0 +1,31 @@
+import os
+import argparse
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Convert rodata into asm source with embedded file content')
+ parser.add_argument('symbol', help='symvol name exported from generated filr')
+ parser.add_argument('rodata', help='input .rodata file path')
+ parser.add_argument('asm', type=argparse.FileType('w', encoding='UTF-8'), help='destination .asm file path')
+ parser.add_argument('--elf', action='store_true')
+
+ args = parser.parse_args()
+
+ file_size = os.path.getsize(args.rodata)
+
+ args.asm.write('global ' + args.symbol + '\n')
+ args.asm.write('global ' + args.symbol + 'Size' + '\n')
+ args.asm.write('SECTION .rodata ALIGN=16\n')
+ args.asm.write(args.symbol + ':\nincbin "' + args.rodata + '"\n')
+ args.asm.write('align 4, db 0\n')
+ args.asm.write(args.symbol + 'Size:\ndd ' + str(file_size) + '\n')
+
+ if args.elf:
+ args.asm.write('size ' + args.symbol + ' ' + str(file_size) + '\n')
+ args.asm.write('size ' + args.symbol + 'Size 4\n')
+
+ args.asm.close()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/run_ios_simulator.py b/build/scripts/run_ios_simulator.py
new file mode 100644
index 0000000000..052c855b77
--- /dev/null
+++ b/build/scripts/run_ios_simulator.py
@@ -0,0 +1,79 @@
+import argparse
+import json
+import os
+import subprocess
+import sys
+
+
+def just_do_it():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--action", choices=["create", "spawn", "kill"])
+ parser.add_argument("--simctl", help="simctl binary path")
+ parser.add_argument("--profiles", help="profiles path")
+ parser.add_argument("--device-dir", help="devices directory")
+ parser.add_argument("--device-name", help="temp device name")
+ args, tail = parser.parse_known_args()
+ if args.action == 'create':
+ action_create(args.simctl, args.profiles, args.device_dir, args.device_name, tail)
+ elif args.action == "spawn":
+ action_spawn(args.simctl, args.profiles, args.device_dir, args.device_name, tail)
+ elif args.action == "kill":
+ action_kill(args.simctl, args.profiles, args.device_dir, args.device_name)
+
+
+def action_create(simctl, profiles, device_dir, name, args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--device-type", default="com.apple.CoreSimulator.SimDeviceType.iPhone-X")
+ parser.add_argument("--device-runtime", default="com.apple.CoreSimulator.SimRuntime.iOS-12-1")
+ args = parser.parse_args(args)
+ all_devices = list(get_all_devices(simctl, profiles, device_dir))
+ if filter(lambda x: x["name"] == name, all_devices):
+ raise Exception("Device named {} already exists".format(name))
+ subprocess.check_call([simctl, "--profiles", profiles, "--set", device_dir, "create", name, args.device_type, args.device_runtime])
+ created = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir))
+ if not created:
+ raise Exception("Creation error: temp device named {} not found".format(name))
+ created = created[0]
+ if created["availability"] != "(available)":
+ raise Exception("Creation error: temp device {} status is {} ((available) expected)".format(name, created["availability"]))
+
+
+def action_spawn(simctl, profiles, device_dir, name, args):
+ device = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir))
+ if not device:
+ raise Exception("Can't spawn process: device named {} not found".format(name))
+ if len(device) > 1:
+ raise Exception("Can't spawn process: too many devices named {} found".format(name))
+ device = device[0]
+ os.execv(simctl, [simctl, "--profiles", profiles, "--set", device_dir, "spawn", device["udid"]] + args)
+
+
+def action_kill(simctl, profiles, device_dir, name):
+ device = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir))
+ if not device:
+ print >> sys.stderr, "Device named {} not found; do nothing".format(name)
+ return
+ if len(device) > 1:
+ raise Exception("Can't remove: too many devices named {}:\n{}".format(name, '\n'.join(i for i in device)))
+ device = device[0]
+ os.execv(simctl, [simctl, "--profiles", profiles, "--set", device_dir, "delete", device["udid"]])
+
+
+def get_all_devices(simctl, profiles, device_dir):
+ p = subprocess.Popen([simctl, "--profiles", profiles, "--set", device_dir, "list", "--json", "devices"], stdout=subprocess.PIPE)
+ out, _ = p.communicate()
+ rc = p.wait()
+ if rc:
+ raise Exception("Devices list command return code is {}\nstdout:\n{}".format(rc, out))
+ raw_object = json.loads(out)
+ if "devices" not in raw_object:
+ raise Exception("Devices not found in\n{}".format(json.dumps(raw_object)))
+ raw_object = raw_object["devices"]
+ for os_name, devices in raw_object.items():
+ for device in devices:
+ device["os_name"] = os_name
+ yield device
+
+
+if __name__ == '__main__':
+ just_do_it()
diff --git a/build/scripts/run_javac.py b/build/scripts/run_javac.py
new file mode 100644
index 0000000000..c35546e0fe
--- /dev/null
+++ b/build/scripts/run_javac.py
@@ -0,0 +1,122 @@
+import sys
+import subprocess
+import optparse
+import re
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.disable_interspersed_args()
+ parser.add_option('--sources-list')
+ parser.add_option('--verbose', default=False, action='store_true')
+ parser.add_option('--remove-notes', default=False, action='store_true')
+ parser.add_option('--ignore-errors', default=False, action='store_true')
+ parser.add_option('--kotlin', default=False, action='store_true')
+ return parser.parse_args()
+
+
+COLORING = {
+ r'^(?P<path>.*):(?P<line>\d*): error: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format(
+ path=m.group('path'),
+ line=m.group('line'),
+ msg=m.group('msg'),
+ ),
+ r'^(?P<path>.*):(?P<line>\d*): warning: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-yellow]]warning[[rst]]: {msg}'.format(
+ path=m.group('path'),
+ line=m.group('line'),
+ msg=m.group('msg'),
+ ),
+ r'^warning: ': lambda m: '[[c:light-yellow]]warning[[rst]]: ',
+ r'^error: (?P<msg>.*)': lambda m: '[[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format(msg=m.group('msg')),
+ r'^Note: ': lambda m: '[[c:light-cyan]]Note[[rst]]: ',
+}
+
+
+def colorize(err):
+ for regex, sub in COLORING.iteritems():
+ err = re.sub(regex, sub, err, flags=re.MULTILINE)
+ return err
+
+
+def remove_notes(err):
+ return '\n'.join([line for line in err.split('\n') if not line.startswith('Note:')])
+
+
+def find_javac(cmd):
+ if not cmd:
+ return None
+ if cmd[0].endswith('javac') or cmd[0].endswith('javac.exe'):
+ return cmd[0]
+ if len(cmd) > 2 and cmd[1].endswith('build_java_with_error_prone.py'):
+ for javas in ('java', 'javac'):
+ if cmd[2].endswith(javas) or cmd[2].endswith(javas + '.exe'):
+ return cmd[2]
+ return None
+
+
+# temporary, for jdk8/jdk9+ compatibility
+def fix_cmd(cmd):
+ if not cmd:
+ return cmd
+ javac = find_javac(cmd)
+ if not javac:
+ return cmd
+ p = subprocess.Popen([javac, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ out, err = out.strip(), err.strip()
+ for prefix in ('javac 1.8', 'java version "1.8'):
+ for raw_out in ((out or ''), (err or '')):
+ for line in raw_out.split('\n'):
+ if line.startswith(prefix):
+ res = []
+ i = 0
+ while i < len(cmd):
+ for option in ('--add-exports', '--add-modules'):
+ if cmd[i] == option:
+ i += 1
+ break
+ elif cmd[i].startswith(option + '='):
+ break
+ else:
+ res.append(cmd[i])
+ i += 1
+ return res
+ return cmd
+
+
+def main():
+ opts, cmd = parse_args()
+
+ with open(opts.sources_list) as f:
+ input_files = f.read().strip().split()
+
+ if opts.kotlin:
+ input_files = [i for i in input_files if i.endswith('.kt')]
+
+ if not input_files:
+ if opts.verbose:
+ sys.stderr.write('No files to compile, javac is not launched.\n')
+
+ else:
+ p = subprocess.Popen(fix_cmd(cmd), stderr=subprocess.PIPE)
+ _, err = p.communicate()
+ rc = p.wait()
+
+ if opts.remove_notes:
+ err = remove_notes(err)
+
+ try:
+ err = colorize(err)
+
+ except Exception:
+ pass
+
+ if opts.ignore_errors and rc:
+ sys.stderr.write('error: javac actually failed with exit code {}\n'.format(rc))
+ rc = 0
+ sys.stderr.write(err)
+ sys.exit(rc)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/run_junit.py b/build/scripts/run_junit.py
new file mode 100644
index 0000000000..089f149f72
--- /dev/null
+++ b/build/scripts/run_junit.py
@@ -0,0 +1,65 @@
+import os
+import sys
+
+SHUTDOWN_SIGNAL = 'SIGUSR1'
+
+
+class SignalInterruptionError(Exception):
+ pass
+
+
+def on_shutdown(s, f):
+ raise SignalInterruptionError()
+
+
+def main():
+ args = sys.argv[1:]
+
+ def execve():
+ os.execve(args[0], args, os.environ)
+
+ jar_binary = args[args.index('--jar-binary') + 1]
+ java_bin_dir = os.path.dirname(jar_binary)
+ jstack_binary = os.path.join(java_bin_dir, 'jstack')
+
+ if not os.path.exists(jstack_binary):
+ sys.stderr.write("jstack is missing: {}\n".format(jstack_binary))
+ execve()
+
+ import signal
+
+ signum = getattr(signal, SHUTDOWN_SIGNAL, None)
+
+ if signum is None:
+ execve()
+
+ import subprocess
+
+ proc = subprocess.Popen(args)
+ signal.signal(signum, on_shutdown)
+ timeout = False
+
+ try:
+ proc.wait()
+ except SignalInterruptionError:
+ sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum))
+ # Dump stack traces
+ subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr)
+ # Kill junit - for more info see DEVTOOLS-7636
+ os.kill(proc.pid, signal.SIGKILL)
+ proc.wait()
+ timeout = True
+
+ if proc.returncode:
+ sys.stderr.write('java exit code: {}\n'.format(proc.returncode))
+ if timeout:
+ # In case of timeout return specific exit code
+ # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301
+ proc.returncode = 10
+ sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode))
+
+ return proc.returncode
+
+
+if __name__ == '__main__':
+ exit(main())
diff --git a/build/scripts/run_llvm_dsymutil.py b/build/scripts/run_llvm_dsymutil.py
new file mode 100644
index 0000000000..4f43362ad9
--- /dev/null
+++ b/build/scripts/run_llvm_dsymutil.py
@@ -0,0 +1,11 @@
+import os
+import sys
+import subprocess
+
+
+if __name__ == '__main__':
+ with open(os.devnull, 'w') as fnull:
+ p = subprocess.Popen(sys.argv[1:], shell=False, stderr=fnull, stdout=sys.stdout)
+
+ p.communicate()
+ sys.exit(p.returncode)
diff --git a/build/scripts/run_msvc_wine.py b/build/scripts/run_msvc_wine.py
new file mode 100644
index 0000000000..eb5ed3eba5
--- /dev/null
+++ b/build/scripts/run_msvc_wine.py
@@ -0,0 +1,586 @@
+import sys
+import os
+import re
+import subprocess
+import signal
+import time
+import json
+import argparse
+import errno
+
+import process_command_files as pcf
+import process_whole_archive_option as pwa
+
+
+procs = []
+build_kekeke = 45
+
+
+def stringize(s):
+ return s.encode('utf-8') if isinstance(s, unicode) else s
+
+
+def run_subprocess(*args, **kwargs):
+ if 'env' in kwargs:
+ kwargs['env'] = {stringize(k): stringize(v) for k, v in kwargs['env'].iteritems()}
+
+ p = subprocess.Popen(*args, **kwargs)
+
+ procs.append(p)
+
+ return p
+
+
+def run_subprocess_with_timeout(timeout, args):
+ attempts_remaining = 5
+ delay = 1
+ p = None
+ while True:
+ try:
+ p = run_subprocess(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate(timeout=timeout)
+ return p, stdout, stderr
+ except subprocess.TimeoutExpired as e:
+ print >>sys.stderr, 'timeout running {0}, error {1}, delay {2} seconds'.format(args, str(e), delay)
+ if p is not None:
+ try:
+ p.kill()
+ p.wait(timeout=1)
+ except Exception:
+ pass
+ attempts_remaining -= 1
+ if attempts_remaining == 0:
+ raise
+ time.sleep(delay)
+ delay = min(2 * delay, 4)
+
+
+def terminate_slaves():
+ for p in procs:
+ try:
+ p.terminate()
+ except Exception:
+ pass
+
+
+def sig_term(sig, fr):
+ terminate_slaves()
+ sys.exit(sig)
+
+
+def subst_path(l):
+ if len(l) > 3:
+ if l[:3].lower() in ('z:\\', 'z:/'):
+ return l[2:].replace('\\', '/')
+
+ return l
+
+
+def call_wine_cmd_once(wine, cmd, env, mode):
+ p = run_subprocess(wine + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, close_fds=True, shell=False)
+
+ output = find_cmd_out(cmd)
+ error = None
+ if output is not None and os.path.exists(output):
+ try:
+ os.remove(output)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ error = e
+ except Exception as e:
+ error = e
+
+ if error is not None:
+ print >> sys.stderr, 'Output {} already exists and we have failed to remove it: {}'.format(output, error)
+
+ # print >>sys.stderr, cmd, env, wine
+
+ stdout_and_stderr, _ = p.communicate()
+
+ return_code = p.returncode
+ if not stdout_and_stderr:
+ if return_code != 0:
+ raise Exception('wine did something strange')
+
+ return return_code
+ elif ' : fatal error ' in stdout_and_stderr:
+ return_code = 1
+ elif ' : error ' in stdout_and_stderr:
+ return_code = 2
+
+ lines = [x.strip() for x in stdout_and_stderr.split('\n')]
+
+ prefixes = [
+ 'Microsoft (R)',
+ 'Copyright (C)',
+ 'Application tried to create a window',
+ 'The graphics driver is missing',
+ 'Could not load wine-gecko',
+ 'wine: configuration in',
+ 'wine: created the configuration directory',
+ 'libpng warning:'
+ ]
+
+ suffixes = [
+ '.c',
+ '.cxx',
+ '.cc',
+ '.cpp',
+ '.masm',
+ ]
+
+ substrs = [
+ 'Creating library Z:',
+ 'err:heap',
+ 'err:menubuilder:',
+ 'err:msvcrt',
+ 'err:ole:',
+ 'err:wincodecs:',
+ 'err:winediag:',
+ ]
+
+ def good_line(l):
+ for x in prefixes:
+ if l.startswith(x):
+ return False
+
+ for x in suffixes:
+ if l.endswith(x):
+ return False
+
+ for x in substrs:
+ if x in l:
+ return False
+
+ return True
+
+ def filter_lines():
+ for l in lines:
+ if good_line(l):
+ yield subst_path(l.strip())
+
+ stdout_and_stderr = '\n'.join(filter_lines()).strip()
+
+ if stdout_and_stderr:
+ print >>sys.stderr, stdout_and_stderr
+
+ return return_code
+
+
+def prepare_vc(fr, to):
+ for p in os.listdir(fr):
+ fr_p = os.path.join(fr, p)
+ to_p = os.path.join(to, p)
+
+ if not os.path.exists(to_p):
+ print >>sys.stderr, 'install %s -> %s' % (fr_p, to_p)
+
+ os.link(fr_p, to_p)
+
+
+def run_slave():
+ args = json.loads(sys.argv[3])
+ wine = sys.argv[1]
+
+ signal.signal(signal.SIGTERM, sig_term)
+
+ if args.get('tout', None):
+ signal.signal(signal.SIGALRM, sig_term)
+ signal.alarm(args['tout'])
+
+ tout = 0.1
+
+ while True:
+ try:
+ return call_wine_cmd_once([wine], args['cmd'], args['env'], args['mode'])
+ except Exception as e:
+ print >>sys.stderr, '%s, will retry in %s' % (str(e), tout)
+
+ time.sleep(tout)
+ tout = min(2 * tout, 4)
+
+
+def find_cmd_out(args):
+ for arg in args:
+ if arg.startswith('/Fo'):
+ return arg[3:]
+
+ if arg.startswith('/OUT:'):
+ return arg[5:]
+
+
+def calc_zero_cnt(data):
+ zero_cnt = 0
+
+ for ch in data:
+ if ch == chr(0):
+ zero_cnt += 1
+
+ return zero_cnt
+
+
+def is_good_file(p):
+ if not os.path.isfile(p):
+ return False
+
+ if os.path.getsize(p) < 300:
+ return False
+
+ asm_pattern = re.compile(r'asm(\.\w+)?\.obj$')
+ if asm_pattern.search(p):
+ pass
+ elif p.endswith('.obj'):
+ with open(p, 'rb') as f:
+ prefix = f.read(200)
+
+ if ord(prefix[0]) != 0:
+ return False
+
+ if ord(prefix[1]) != 0:
+ return False
+
+ if ord(prefix[2]) != 0xFF:
+ return False
+
+ if ord(prefix[3]) != 0xFF:
+ return False
+
+ if calc_zero_cnt(prefix) > 195:
+ return False
+
+ f.seek(-100, os.SEEK_END)
+ last = f.read(100)
+
+ if calc_zero_cnt(last) > 95:
+ return False
+
+ if last[-1] != chr(0):
+ return False
+ elif p.endswith('.lib'):
+ with open(p, 'rb') as f:
+ if f.read(7) != '!<arch>':
+ return False
+
+ return True
+
+
+RED = '\x1b[31;1m'
+GRAY = '\x1b[30;1m'
+RST = '\x1b[0m'
+MGT = '\x1b[35m'
+YEL = '\x1b[33m'
+GRN = '\x1b[32m'
+CYA = '\x1b[36m'
+
+
+def colorize_strings(l):
+ p = l.find("'")
+
+ if p >= 0:
+ yield l[:p]
+
+ l = l[p + 1:]
+
+ p = l.find("'")
+
+ if p >= 0:
+ yield CYA + "'" + subst_path(l[:p]) + "'" + RST
+
+ for x in colorize_strings(l[p + 1:]):
+ yield x
+ else:
+ yield "'" + l
+ else:
+ yield l
+
+
+def colorize_line(l):
+ lll = l
+
+ try:
+ parts = []
+
+ if l.startswith('(compiler file'):
+ return ''.join(colorize_strings(l))
+
+ if l.startswith('/'):
+ p = l.find('(')
+ parts.append(GRAY + l[:p] + RST)
+ l = l[p:]
+
+ if l and l.startswith('('):
+ p = l.find(')')
+ parts.append(':' + MGT + l[1:p] + RST)
+ l = l[p + 1:]
+
+ if l:
+ if l.startswith(' : '):
+ l = l[1:]
+
+ if l.startswith(': error'):
+ parts.append(': ' + RED + 'error' + RST)
+ l = l[7:]
+ elif l.startswith(': warning'):
+ parts.append(': ' + YEL + 'warning' + RST)
+ l = l[9:]
+ elif l.startswith(': note'):
+ parts.append(': ' + GRN + 'note' + RST)
+ l = l[6:]
+ elif l.startswith('fatal error'):
+ parts.append(RED + 'fatal error' + RST)
+ l = l[11:]
+
+ if l:
+ parts.extend(colorize_strings(l))
+
+ return ''.join(parts)
+ except Exception:
+ return lll
+
+
+def colorize(out):
+ return '\n'.join(colorize_line(l) for l in out.split('\n'))
+
+
+def trim_path(path, winepath):
+ p1, p1_stdout, p1_stderr = run_subprocess_with_timeout(60, [winepath, '-w', path])
+ win_path = p1_stdout.strip()
+
+ if p1.returncode != 0 or not win_path:
+ # Fall back to only winepath -s
+ win_path = path
+
+ p2, p2_stdout, p2_stderr = run_subprocess_with_timeout(60, [winepath, '-s', win_path])
+ short_path = p2_stdout.strip()
+
+ check_path = short_path
+ if check_path.startswith(('Z:', 'z:')):
+ check_path = check_path[2:]
+
+ if not check_path[1:].startswith((path[1:4], path[1:4].upper())):
+ raise Exception(
+ 'Cannot trim path {}; 1st winepath exit code: {}, stdout:\n{}\n stderr:\n{}\n 2nd winepath exit code: {}, stdout:\n{}\n stderr:\n{}'.format(
+ path, p1.returncode, p1_stdout, p1_stderr, p2.returncode, p2_stdout, p2_stderr
+ ))
+
+ return short_path
+
+
+def downsize_path(path, short_names):
+ flag = ''
+ if path.startswith('/Fo'):
+ flag = '/Fo'
+ path = path[3:]
+
+ for full_name, short_name in short_names.items():
+ if path.startswith(full_name):
+ path = path.replace(full_name, short_name)
+
+ return flag + path
+
+
+def make_full_path_arg(arg, bld_root, short_root):
+ if arg[0] != '/' and len(os.path.join(bld_root, arg)) > 250:
+ return os.path.join(short_root, arg)
+ return arg
+
+
+def fix_path(p):
+ topdirs = ['/%s/' % d for d in os.listdir('/')]
+
+ def abs_path_start(path, pos):
+ if pos < 0:
+ return False
+ return pos == 0 or path[pos - 1] == ':'
+
+ pp = None
+ for pr in topdirs:
+ pp2 = p.find(pr)
+ if abs_path_start(p, pp2) and (pp is None or pp > pp2):
+ pp = pp2
+ if pp is not None:
+ return p[:pp] + 'Z:' + p[pp:].replace('/', '\\')
+ if p.startswith('/Fo'):
+ return '/Fo' + p[3:].replace('/', '\\')
+ return p
+
+
+def process_free_args(args, wine, bld_root, mode):
+ whole_archive_prefix = '/WHOLEARCHIVE:'
+ short_names = {}
+ winepath = os.path.join(os.path.dirname(wine), 'winepath')
+ short_names[bld_root] = trim_path(bld_root, winepath)
+ # Slow for no benefit.
+ # arc_root = args.arcadia_root
+ # short_names[arc_root] = trim_path(arc_root, winepath)
+
+ free_args, wa_peers, wa_libs = pwa.get_whole_archive_peers_and_libs(pcf.skip_markers(args))
+
+ process_link = lambda x: make_full_path_arg(x, bld_root, short_names[bld_root]) if mode in ('link', 'lib') else x
+
+ def process_arg(arg):
+ with_wa_prefix = arg.startswith(whole_archive_prefix)
+ prefix = whole_archive_prefix if with_wa_prefix else ''
+ without_prefix_arg = arg[len(prefix):]
+ return prefix + fix_path(process_link(downsize_path(without_prefix_arg, short_names)))
+
+ result = []
+ for arg in free_args:
+ if pcf.is_cmdfile_arg(arg):
+ cmd_file_path = pcf.cmdfile_path(arg)
+ cf_args = pcf.read_from_command_file(cmd_file_path)
+ with open(cmd_file_path, 'w') as afile:
+ for cf_arg in cf_args:
+ afile.write(process_arg(cf_arg) + "\n")
+ result.append(arg)
+ else:
+ result.append(process_arg(arg))
+ return pwa.ProcessWholeArchiveOption('WINDOWS', wa_peers, wa_libs).construct_cmd(result)
+
+
+def run_main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('wine', action='store')
+ parser.add_argument('-v', action='store', dest='version', default='120')
+ parser.add_argument('-I', action='append', dest='incl_paths')
+ parser.add_argument('mode', action='store')
+ parser.add_argument('arcadia_root', action='store')
+ parser.add_argument('arcadia_build_root', action='store')
+ parser.add_argument('binary', action='store')
+ parser.add_argument('free_args', nargs=argparse.REMAINDER)
+ # By now just unpack. Ideally we should fix path and pack arguments back into command file
+ args = parser.parse_args()
+
+ wine = args.wine
+ mode = args.mode
+ binary = args.binary
+ version = args.version
+ incl_paths = args.incl_paths
+ bld_root = args.arcadia_build_root
+ free_args = args.free_args
+
+ wine_dir = os.path.dirname(os.path.dirname(wine))
+ bin_dir = os.path.dirname(binary)
+ tc_dir = os.path.dirname(os.path.dirname(os.path.dirname(bin_dir)))
+ if not incl_paths:
+ incl_paths = [tc_dir + '/VC/include', tc_dir + '/include']
+
+ cmd_out = find_cmd_out(free_args)
+
+ env = os.environ.copy()
+
+ env.pop('DISPLAY', None)
+
+ env['WINEDLLOVERRIDES'] = 'msvcr{}=n'.format(version)
+ env['WINEDEBUG'] = 'fixme-all'
+ env['INCLUDE'] = ';'.join(fix_path(p) for p in incl_paths)
+ env['VSINSTALLDIR'] = fix_path(tc_dir)
+ env['VCINSTALLDIR'] = fix_path(tc_dir + '/VC')
+ env['WindowsSdkDir'] = fix_path(tc_dir)
+ env['LIBPATH'] = fix_path(tc_dir + '/VC/lib/amd64')
+ env['LIB'] = fix_path(tc_dir + '/VC/lib/amd64')
+ env['LD_LIBRARY_PATH'] = ':'.join(wine_dir + d for d in ['/lib', '/lib64', '/lib64/wine'])
+
+ cmd = [binary] + process_free_args(free_args, wine, bld_root, mode)
+
+ for x in ('/NOLOGO', '/nologo', '/FD'):
+ try:
+ cmd.remove(x)
+ except ValueError:
+ pass
+
+ def run_process(sleep, tout):
+ if sleep:
+ time.sleep(sleep)
+
+ args = {
+ 'cmd': cmd,
+ 'env': env,
+ 'mode': mode,
+ 'tout': tout
+ }
+
+ slave_cmd = [sys.executable, sys.argv[0], wine, 'slave', json.dumps(args)]
+ p = run_subprocess(slave_cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=False)
+ out, _ = p.communicate()
+ return p.wait(), out
+
+ def print_err_log(log):
+ if not log:
+ return
+ if mode == 'cxx':
+ log = colorize(log)
+ print >>sys.stderr, log
+
+ tout = 200
+
+ while True:
+ rc, out = run_process(0, tout)
+
+ if rc in (-signal.SIGALRM, signal.SIGALRM):
+ print_err_log(out)
+ print >>sys.stderr, '##append_tag##time out'
+ elif out and ' stack overflow ' in out:
+ print >>sys.stderr, '##append_tag##stack overflow'
+ elif out and 'recvmsg: Connection reset by peer' in out:
+ print >>sys.stderr, '##append_tag##wine gone'
+ elif out and 'D8037' in out:
+ print >>sys.stderr, '##append_tag##repair wine'
+
+ try:
+ os.unlink(os.path.join(os.environ['WINEPREFIX'], '.update-timestamp'))
+ except Exception as e:
+ print >>sys.stderr, e
+
+ else:
+ print_err_log(out)
+
+ # non-zero return code - bad, return it immediately
+ if rc:
+ print >>sys.stderr, '##win_cmd##' + ' '.join(cmd)
+ print >>sys.stderr, '##args##' + ' '.join(free_args)
+ return rc
+
+ # check for output existence(if we expect it!) and real length
+ if cmd_out:
+ if is_good_file(cmd_out):
+ return 0
+ else:
+ # retry!
+ print >>sys.stderr, '##append_tag##no output'
+ else:
+ return 0
+
+ tout *= 3
+
+
+def main():
+ prefix_suffix = os.environ.pop('WINEPREFIX_SUFFIX', None)
+ if prefix_suffix is not None:
+ prefix = os.environ.pop('WINEPREFIX', None)
+ if prefix is not None:
+ os.environ['WINEPREFIX'] = os.path.join(prefix, prefix_suffix)
+
+ # just in case
+ signal.alarm(2000)
+
+ if sys.argv[2] == 'slave':
+ func = run_slave
+ else:
+ func = run_main
+
+ try:
+ try:
+ sys.exit(func())
+ finally:
+ terminate_slaves()
+ except KeyboardInterrupt:
+ sys.exit(4)
+ except Exception as e:
+ print >>sys.stderr, str(e)
+
+ sys.exit(3)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/run_sonar.py b/build/scripts/run_sonar.py
new file mode 100644
index 0000000000..761cc34b78
--- /dev/null
+++ b/build/scripts/run_sonar.py
@@ -0,0 +1,121 @@
+import os
+import sys
+import zipfile
+import tarfile
+import subprocess as sp
+import optparse
+import shutil
+import xml.etree.ElementTree as et
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.add_option(
+ '--classes-jar-path',
+ dest='classes_jar_paths',
+ action='append',
+ default=[],
+ )
+ parser.add_option('--sources-jar-path')
+ parser.add_option('--sonar-scanner-jar-path')
+ parser.add_option('--sonar-scanner-main-class')
+ parser.add_option('--java-coverage-merged-tar')
+ parser.add_option('--java-binary-path')
+ parser.add_option('--log-path')
+ parser.add_option('--gcov-report-path')
+ parser.add_option('--source-root')
+ parser.add_option('--java-args', action='append', default=[])
+ return parser.parse_args()
+
+
+def extract_zip_file(zip_file_path, dest_dir):
+ with zipfile.ZipFile(zip_file_path) as arch:
+ arch.extractall(dest_dir)
+
+
+def get_source_real_path(source_root, path):
+ parts = os.path.normpath(path).split(os.path.sep)
+ for i in xrange(len(parts)):
+ if os.path.exists(os.path.join(source_root, *parts[i:])):
+ return os.path.join(*parts[i:])
+ return None
+
+
+def collect_cpp_sources(report, source_root, destination):
+ sources = set()
+ with open(report) as f:
+ root = et.fromstring(f.read())
+ for f in root.findall('.//class[@filename]'):
+ real_filename = get_source_real_path(source_root, f.attrib['filename'])
+ if real_filename:
+ f.attrib['filename'] = real_filename
+ sources.add(real_filename)
+ with open(report, 'w') as f:
+ pref = '''<?xml version="1.0" ?>
+<!DOCTYPE coverage
+ SYSTEM 'http://cobertura.sourceforge.net/xml/coverage-03.dtd'>\n'''
+ f.write(pref + et.tostring(root, encoding='utf-8') + '\n\n')
+ for src in sources:
+ dst = os.path.join(destination, src)
+ src = os.path.join(source_root, src)
+ if os.path.isfile(src):
+ if not os.path.exists(os.path.dirname(dst)):
+ os.makedirs(os.path.dirname(dst))
+ os.link(src, dst)
+
+
+def main(opts, props_args):
+ sources_dir = os.path.abspath('src')
+ base_props_args = ['-Dsonar.sources=' + sources_dir]
+ os.mkdir(sources_dir)
+ if opts.sources_jar_path:
+ extract_zip_file(opts.sources_jar_path, sources_dir)
+ if opts.gcov_report_path:
+ collect_cpp_sources(opts.gcov_report_path, opts.source_root, sources_dir)
+ base_props_args += ['-Dsonar.projectBaseDir=' + sources_dir, '-Dsonar.cxx.coverage.reportPath=' + opts.gcov_report_path]
+
+ if opts.classes_jar_paths:
+ classes_dir = os.path.abspath('cls')
+ os.mkdir(classes_dir)
+
+ for classes_jar_path in opts.classes_jar_paths:
+ extract_zip_file(classes_jar_path, classes_dir)
+
+ base_props_args.append('-Dsonar.java.binaries=' + classes_dir)
+
+ if opts.java_coverage_merged_tar:
+ jacoco_report_path = os.path.abspath('jacoco.exec')
+ with open(jacoco_report_path, 'w') as dest:
+ with tarfile.open(opts.java_coverage_merged_tar) as tar:
+ for src in tar:
+ extracted = tar.extractfile(src)
+ if extracted is not None:
+ shutil.copyfileobj(extracted, dest)
+
+ base_props_args += [
+ '-Dsonar.core.codeCoveragePlugin=jacoco',
+ '-Dsonar.jacoco.reportPath=' + jacoco_report_path
+ ]
+ java_args = ['-{}'.format(i) for i in opts.java_args] + ['-Djava.net.preferIPv6Addresses=true', '-Djava.net.preferIPv4Addresses=false']
+
+ sonar_cmd = [
+ opts.java_binary_path,
+ ] + java_args + [
+ '-classpath',
+ opts.sonar_scanner_jar_path,
+ ] + base_props_args + props_args + [opts.sonar_scanner_main_class, '-X']
+
+ p = sp.Popen(sonar_cmd, stdout=sp.PIPE, stderr=sp.STDOUT)
+ out, _ = p.communicate()
+
+ sys.stderr.write(out)
+ with open(opts.log_path, 'a') as f:
+ f.write(out)
+
+ sys.exit(p.returncode)
+
+
+if __name__ == '__main__':
+ opts, args = parse_args()
+ props_args = ['-D' + arg for arg in args]
+ main(opts, props_args)
diff --git a/build/scripts/setup_java_tmpdir.py b/build/scripts/setup_java_tmpdir.py
new file mode 100644
index 0000000000..e478d4aa96
--- /dev/null
+++ b/build/scripts/setup_java_tmpdir.py
@@ -0,0 +1,40 @@
+import os
+import sys
+import platform
+import subprocess
+
+
+def fix_tmpdir(cmd):
+ if not cmd:
+ return cmd
+ java_id, option_name = None, None
+ for i, java in enumerate(cmd):
+ if java.endswith('java') or java.endswith('java.exe'):
+ java_id = i
+ option_name = '-Djava.io.tmpdir='
+ break
+ if java.endswith('javac') or java.endswith('javac.exe'):
+ java_id = i
+ option_name = '-J-Djava.io.tmpdir='
+ break
+ if java_id is None:
+ return cmd
+ for arg in cmd[java_id:]:
+ if arg.startswith(option_name):
+ return cmd
+ tmpdir = os.environ.get('TMPDIR') or os.environ.get('TEMPDIR')
+ if not tmpdir:
+ return cmd
+ return cmd[:java_id + 1] + ['{}{}'.format(option_name, tmpdir)] + cmd[java_id + 1:]
+
+
+def just_do_it():
+ args = fix_tmpdir(sys.argv[1:])
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(args).wait())
+ else:
+ os.execv(args[0], args)
+
+
+if __name__ == '__main__':
+ just_do_it()
diff --git a/build/scripts/sky.py b/build/scripts/sky.py
new file mode 100644
index 0000000000..b703af7ed1
--- /dev/null
+++ b/build/scripts/sky.py
@@ -0,0 +1,43 @@
+import logging
+import os
+import subprocess
+
+import fetch_from
+
+
+class UnsupportedProtocolException(Exception):
+ pass
+
+
+def executable_path():
+ return "/usr/local/bin/sky"
+
+
+def is_avaliable():
+ if not os.path.exists(executable_path()):
+ return False
+ try:
+ subprocess.check_output([executable_path(), "--version"])
+ return True
+ except subprocess.CalledProcessError:
+ return False
+ except OSError:
+ return False
+
+
+def fetch(skynet_id, file_name, timeout=None):
+ if not is_avaliable():
+ raise UnsupportedProtocolException("Skynet is not available")
+
+ target_dir = os.path.abspath(fetch_from.uniq_string_generator())
+ os.mkdir(target_dir)
+
+ cmd_args = [executable_path(), "get", "-N", "Backbone", "--user", "--wait", "--dir", target_dir, skynet_id]
+ if timeout is not None:
+ cmd_args += ["--timeout", str(timeout)]
+
+ logging.info("Call skynet with args: %s", cmd_args)
+ stdout = subprocess.check_output(cmd_args).strip()
+ logging.debug("Skynet call with args %s is finished, result is %s", cmd_args, stdout)
+
+ return os.path.join(target_dir, file_name)
diff --git a/build/scripts/stderr2stdout.py b/build/scripts/stderr2stdout.py
new file mode 100644
index 0000000000..0e510da373
--- /dev/null
+++ b/build/scripts/stderr2stdout.py
@@ -0,0 +1,6 @@
+import subprocess
+import sys
+
+if __name__ == '__main__':
+ assert len(sys.argv) > 1
+ sys.exit(subprocess.Popen(sys.argv[1:], stderr=sys.stdout).wait())
diff --git a/build/scripts/stdout2stderr.py b/build/scripts/stdout2stderr.py
new file mode 100644
index 0000000000..d7861fdda3
--- /dev/null
+++ b/build/scripts/stdout2stderr.py
@@ -0,0 +1,6 @@
+import subprocess
+import sys
+
+if __name__ == '__main__':
+ assert len(sys.argv) > 1
+ sys.exit(subprocess.Popen(sys.argv[1:], stdout=sys.stderr).wait())
diff --git a/build/scripts/symlink.py b/build/scripts/symlink.py
new file mode 100755
index 0000000000..9e30a25065
--- /dev/null
+++ b/build/scripts/symlink.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import platform
+from subprocess import call
+
+
+def symlink():
+ if len(sys.argv) < 3:
+ print >>sys.stderr, "Usage: symlink.py <source> <target>"
+ sys.exit(1)
+
+ source = sys.argv[1]
+ target = sys.argv[2]
+
+ print("Making a symbolic link from {0} to {1}".format(source, target))
+
+ sysName = platform.system()
+ if sysName == "Windows": # and not os.path.exists(target)
+ if os.path.isdir(source):
+ call(["mklink", "/D", target, source], shell=True)
+ else:
+ call(["mklink", target, source], shell=True)
+ else:
+ call(["ln", "-f", "-s", "-n", source, target])
+
+if __name__ == '__main__':
+ symlink()
diff --git a/build/scripts/tar_directory.py b/build/scripts/tar_directory.py
new file mode 100644
index 0000000000..a91889fa22
--- /dev/null
+++ b/build/scripts/tar_directory.py
@@ -0,0 +1,45 @@
+import os
+import sys
+import tarfile
+
+
+def is_exe(fpath):
+ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+
+def main(args):
+ if len(args) < 2 or len(args) > 3:
+ raise Exception("Illegal usage: `tar_directory.py archive.tar directory [skip prefix]` or `tar_directory.py archive.tar output_directory --extract`")
+ tar, directory, prefix, extract = args[0], args[1], None, False
+ if len(args) == 3:
+ if args[2] == '--extract':
+ extract = True
+ else:
+ prefix = args[2]
+ for tar_exe in ('/usr/bin/tar', '/bin/tar'):
+ if not is_exe(tar_exe):
+ continue
+ if extract:
+ dest = os.path.abspath(directory)
+ if not os.path.exists(dest):
+ os.makedirs(dest)
+ os.execv(tar_exe, [tar_exe, '-xf', tar, '-C', dest])
+ else:
+ source = os.path.relpath(directory, prefix) if prefix else directory
+ os.execv(tar_exe, [tar_exe, '-cf', tar] + (['-C', prefix] if prefix else []) + [source])
+ break
+ else:
+ if extract:
+ dest = os.path.abspath(directory)
+ if not os.path.exists(dest):
+ os.makedirs(dest)
+ with tarfile.open(tar, 'r') as tar_file:
+ tar_file.extractall(dest)
+ else:
+ source = directory
+ with tarfile.open(tar, 'w') as out:
+ out.add(os.path.abspath(source), arcname=os.path.relpath(source, prefix) if prefix else source)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/build/scripts/tar_sources.py b/build/scripts/tar_sources.py
new file mode 100644
index 0000000000..d7e650e4ac
--- /dev/null
+++ b/build/scripts/tar_sources.py
@@ -0,0 +1,41 @@
+import argparse
+import os
+import tarfile
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--exts', nargs='*', default=None)
+ parser.add_argument('--flat', action='store_true')
+ parser.add_argument('--input', required=True)
+ parser.add_argument('--output', required=True)
+ parser.add_argument('--prefix', default=None)
+
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+
+ srcs = []
+ for root, _, files in os.walk(args.input):
+ for f in files:
+ if not args.exts or f.endswith(tuple(args.exts)):
+ srcs.append(os.path.join(root, f))
+
+ compression_mode = ''
+ if args.output.endswith(('.tar.gz', '.tgz')):
+ compression_mode = 'gz'
+ elif args.output.endswith('.bzip2'):
+ compression_mode = 'bz2'
+
+ with tarfile.open(args.output, 'w:{}'.format(compression_mode)) as out:
+ for f in srcs:
+ arcname = os.path.basename(f) if args.flat else os.path.relpath(f, args.input)
+ if args.prefix:
+ arcname = os.path.join(args.prefix, arcname)
+ out.add(f, arcname=arcname)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/tared_protoc.py b/build/scripts/tared_protoc.py
new file mode 100644
index 0000000000..7643e1dbfe
--- /dev/null
+++ b/build/scripts/tared_protoc.py
@@ -0,0 +1,31 @@
+import os
+import optparse
+import tarfile
+import contextlib
+import subprocess as sp
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.disable_interspersed_args()
+ parser.add_option('--tar-output')
+ parser.add_option('--protoc-out-dir')
+ return parser.parse_args()
+
+
+def main():
+ opts, args = parse_args()
+ assert opts.tar_output
+ assert opts.protoc_out_dir
+
+ if not os.path.exists(opts.protoc_out_dir):
+ os.makedirs(opts.protoc_out_dir)
+
+ sp.check_call(args)
+
+ with contextlib.closing(tarfile.open(opts.tar_output, 'w')) as tf:
+ tf.add(opts.protoc_out_dir, arcname='')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/touch.py b/build/scripts/touch.py
new file mode 100755
index 0000000000..e01ba7f86b
--- /dev/null
+++ b/build/scripts/touch.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+import optparse
+import os
+import sys
+import time
+
+
+def main(argv):
+ parser = optparse.OptionParser(add_help_option=False)
+ parser.disable_interspersed_args()
+
+ parser.add_option('-?', '--help', dest='help',
+ action='store_true', default=None, help='print help')
+ parser.add_option('-t', dest='t', action='store', default=None)
+
+ opts, argv_rest = parser.parse_args(argv)
+ if getattr(opts, 'help', False):
+ parser.print_help()
+ return 0
+
+ tspec = opts.t
+ if tspec is None:
+ times = None
+ else:
+ head, sep, tail = tspec.partition('.')
+ if 8 > len(head):
+ raise Exception("time spec must follow format [[CC]YY]MMDDhhmm[.SS]: " + tspec + '; ' + head)
+ tfmt = ''
+ if 12 == len(head):
+ tfmt += '%Y'
+ elif 10 == len(head):
+ tfmt += '%y'
+ tfmt += '%m%d%H%M'
+ if 2 == len(tail):
+ tfmt += '.%S'
+ mtime = time.mktime(time.strptime(tspec, tfmt))
+ times = (mtime, mtime)
+
+ for file in argv_rest:
+ try:
+ os.utime(file, times)
+ except:
+ open(file, 'w').close()
+ if times is not None:
+ os.utime(file, times)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py
new file mode 100644
index 0000000000..9730dcd711
--- /dev/null
+++ b/build/scripts/unpacking_jtest_runner.py
@@ -0,0 +1,148 @@
+import io
+import json
+import optparse
+import os
+import sys
+import subprocess
+import time
+import zipfile
+import platform
+
+# This script changes test run classpath by unpacking tests.jar -> tests-dir. The goal
+# is to launch tests with the same classpath as maven does.
+
+
+def parse_args():
+ parser = optparse.OptionParser()
+ parser.disable_interspersed_args()
+ parser.add_option('--trace-file')
+ parser.add_option('--jar-binary')
+ parser.add_option('--tests-jar-path')
+ parser.add_option('--classpath-option-type', choices=('manifest', 'command_file', 'list'), default='manifest')
+ return parser.parse_args()
+
+
+# temporary, for jdk8/jdk9+ compatibility
+def fix_cmd(cmd):
+ if not cmd:
+ return cmd
+ java = cmd[0]
+ if not java.endswith('java') and not java.endswith('java.exe'):
+ return cmd
+ p = subprocess.Popen([java, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = p.communicate()
+ out, err = out.strip(), err.strip()
+ if ((out or '').strip().startswith('java version "1.8') or (err or '').strip().startswith('java version "1.8')):
+ res = []
+ i = 0
+ while i < len(cmd):
+ for option in ('--add-exports', '--add-modules'):
+ if cmd[i] == option:
+ i += 1
+ break
+ elif cmd[i].startswith(option + '='):
+ break
+ else:
+ res.append(cmd[i])
+ i += 1
+ return res
+ return cmd
+
+
+def dump_event(etype, data, filename):
+ event = {
+ 'timestamp': time.time(),
+ 'value': data,
+ 'name': etype,
+ }
+
+ with io.open(filename, 'a', encoding='utf8') as afile:
+ afile.write(unicode(json.dumps(event) + '\n'))
+
+
+def dump_chunk_event(data, filename):
+ return dump_event('chunk-event', data, filename)
+
+
+def extract_jars(dest, archive):
+ os.makedirs(dest)
+ with zipfile.ZipFile(archive) as zf:
+ zf.extractall(dest)
+
+
+def make_bfg_from_cp(class_path, out):
+ class_path = ' '.join(
+ map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path)
+ )
+ with zipfile.ZipFile(out, 'w') as zf:
+ lines = []
+ while class_path:
+ lines.append(class_path[:60])
+ class_path = class_path[60:]
+ if lines:
+ zf.writestr('META-INF/MANIFEST.MF', 'Manifest-Version: 1.0\nClass-Path: \n ' + '\n '.join(lines) + ' \n\n')
+
+
+def make_command_file_from_cp(class_path, out):
+ with open(out, 'w') as cp_file:
+ cp_file.write(os.pathsep.join(class_path))
+
+
+def main():
+ s = time.time()
+ opts, args = parse_args()
+
+ # unpack tests jar
+ try:
+ build_root = args[args.index('--build-root') + 1]
+ dest = os.path.join(build_root, 'test-classes')
+ except Exception:
+ build_root = ''
+ dest = os.path.abspath('test-classes')
+
+ extract_jars(dest, opts.tests_jar_path)
+
+ metrics = {
+ 'suite_jtest_extract_jars_(seconds)': time.time() - s,
+ }
+
+ s = time.time()
+ # fix java classpath
+ cp_idx = args.index('-classpath')
+ if args[cp_idx + 1].startswith('@'):
+ real_name = args[cp_idx + 1][1:]
+ mf = os.path.join(os.path.dirname(real_name), 'fixed.bfg.jar')
+ with open(real_name) as origin:
+ class_path = [os.path.join(build_root, i.strip()) for i in origin]
+ if opts.tests_jar_path in class_path:
+ class_path.remove(opts.tests_jar_path)
+ if opts.classpath_option_type == 'manifest':
+ make_bfg_from_cp(class_path, mf)
+ mf = os.pathsep.join([dest, mf])
+ elif opts.classpath_option_type == 'command_file':
+ mf = os.path.splitext(mf)[0] + '.txt'
+ make_command_file_from_cp([dest] + class_path, mf)
+ mf = "@" + mf
+ elif opts.classpath_option_type == 'list':
+ mf = os.pathsep.join([dest] + class_path)
+ else:
+ raise Exception("Unexpected classpath option type: " + opts.classpath_option_type)
+ args = fix_cmd(args[:cp_idx + 1]) + [mf] + args[cp_idx + 2:]
+ else:
+ args[cp_idx + 1] = args[cp_idx + 1].replace(opts.tests_jar_path, dest)
+ args = fix_cmd(args[:cp_idx]) + args[cp_idx:]
+
+ metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s
+
+ if opts.trace_file:
+ dump_chunk_event({'metrics': metrics}, opts.trace_file)
+
+ # run java cmd
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(args).wait())
+ else:
+ os.execv(args[0], args)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/scripts/with_coverage.py b/build/scripts/with_coverage.py
new file mode 100644
index 0000000000..d62435c3b8
--- /dev/null
+++ b/build/scripts/with_coverage.py
@@ -0,0 +1,40 @@
+# TODO prettyboy remove after ya-bin release
+
+import os
+import sys
+import subprocess
+import tarfile
+import random
+import shutil
+
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
+
+def main(args):
+ coverage_path = os.path.abspath(args[0])
+ coverage_dir = coverage_path + '.' + str(random.getrandbits(64))
+
+ mkdir_p(coverage_dir)
+
+ env = os.environ.copy()
+ env['GCOV_PREFIX'] = coverage_dir
+
+ subprocess.check_call(args[1:], env=env)
+
+ arch_path = coverage_dir + '.archive'
+
+ with tarfile.open(arch_path, 'w:') as tar:
+ tar.add(coverage_dir, arcname='.')
+
+ os.rename(arch_path, coverage_path)
+
+ shutil.rmtree(coverage_dir)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/build/scripts/with_crash_on_timeout.py b/build/scripts/with_crash_on_timeout.py
new file mode 100644
index 0000000000..bde864ed29
--- /dev/null
+++ b/build/scripts/with_crash_on_timeout.py
@@ -0,0 +1,22 @@
+# TODO prettyboy remove after ya-bin release
+
+import os
+import sys
+import subprocess
+import json
+
+
+def main(args):
+ meta_path = os.path.abspath(args[0])
+ timeout_code = int(args[1])
+ subprocess.check_call(args[2:])
+ with open(meta_path) as f:
+ meta_info = json.loads(f.read())
+ if meta_info["exit_code"] == timeout_code:
+ print >> sys.stderr, meta_info["project"], 'crashed by timeout, use --test-disable-timeout option'
+ return 1
+ return 0
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/build/scripts/with_kapt_args.py b/build/scripts/with_kapt_args.py
new file mode 100644
index 0000000000..eb7438a4c9
--- /dev/null
+++ b/build/scripts/with_kapt_args.py
@@ -0,0 +1,35 @@
+import sys
+import os
+import subprocess
+import platform
+import argparse
+import re
+
+
+def parse_args(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ap-classpath', nargs='*', type=str, dest='classpath')
+ cmd_start = args.index('--')
+ return parser.parse_args(args[:cmd_start]), args[cmd_start+1:]
+
+
+def get_ap_classpath(directory):
+ jar_re = re.compile(r'.*(?<!-sources)\.jar')
+ found_jars = [os.path.join(address, name) for address, dirs, files in os.walk(directory) for name in files if jar_re.match(name)]
+ if len(found_jars) != 1:
+ raise Exception("found %d JAR files in directory %s" % (len(found_jars), directory))
+ arg = 'plugin:org.jetbrains.kotlin.kapt3:apclasspath=' + found_jars[0]
+ return '-P', arg
+
+
+def create_extra_args(args):
+ cp_opts = [arg for d in args.classpath for arg in get_ap_classpath(d)]
+ return cp_opts
+
+if __name__ == '__main__':
+ args, cmd = parse_args(sys.argv[1:])
+ res = cmd + create_extra_args(args)
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(res).wait())
+ else:
+ os.execv(res[0], res)
diff --git a/build/scripts/with_pathsep_resolve.py b/build/scripts/with_pathsep_resolve.py
new file mode 100644
index 0000000000..37c8c598ae
--- /dev/null
+++ b/build/scripts/with_pathsep_resolve.py
@@ -0,0 +1,23 @@
+import sys
+import os
+import subprocess
+import platform
+
+
+def fix_args(args):
+ just_replace_it = False
+ for arg in args:
+ if arg == '--fix-path-sep':
+ just_replace_it = True
+ continue
+ if just_replace_it:
+ arg = arg.replace('::', os.pathsep)
+ just_replace_it = False
+ yield arg
+
+if __name__ == '__main__':
+ res = list(fix_args(sys.argv[1:]))
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(res).wait())
+ else:
+ os.execv(res[0], res)
diff --git a/build/scripts/wrap_groovyc.py b/build/scripts/wrap_groovyc.py
new file mode 100644
index 0000000000..068b73fd87
--- /dev/null
+++ b/build/scripts/wrap_groovyc.py
@@ -0,0 +1,23 @@
+import platform
+import sys
+import os
+import subprocess
+
+
+def fix_windows(args):
+ for arg in args:
+ if os.path.basename(arg) == 'groovyc' and os.path.basename(os.path.dirname(arg)) == 'bin':
+ yield arg + '.bat'
+ else:
+ yield arg
+
+
+if __name__ == '__main__':
+ env = os.environ.copy()
+ jdk = sys.argv[1]
+ env['JAVA_HOME'] = jdk
+ args = sys.argv[2:]
+ if platform.system() == 'Windows':
+ sys.exit(subprocess.Popen(list(fix_windows(args)), env=env).wait())
+ else:
+ os.execve(args[0], args, env)
diff --git a/build/scripts/wrapcc.py b/build/scripts/wrapcc.py
new file mode 100644
index 0000000000..88a9e6a4fc
--- /dev/null
+++ b/build/scripts/wrapcc.py
@@ -0,0 +1,45 @@
+from __future__ import print_function
+
+import os
+import sys
+import time
+import subprocess
+
+
+def need_retry(text):
+ return 'Stack dump' in text
+
+
+def retry_inf(cmd):
+ while True:
+ try:
+ yield subprocess.check_output(cmd, stderr=subprocess.STDOUT), None
+ except subprocess.CalledProcessError as e:
+ yield e.output, e
+
+
+def retry(cmd):
+ for n, (out, err) in enumerate(retry_inf(cmd)):
+ if out:
+ sys.stderr.write(out)
+
+ if n > 5:
+ raise Exception('all retries failed')
+ elif need_retry(out):
+ time.sleep(1 + n)
+ elif err:
+ raise err
+ else:
+ return
+
+
+if __name__ == '__main__':
+ cmd = sys.argv[1:]
+
+ if '-c' in cmd:
+ try:
+ retry(cmd)
+ except subprocess.CalledProcessError as e:
+ sys.exit(e.returncode)
+ else:
+ os.execv(cmd[0], cmd)
diff --git a/build/scripts/wrapper.py b/build/scripts/wrapper.py
new file mode 100644
index 0000000000..1e9d7955a5
--- /dev/null
+++ b/build/scripts/wrapper.py
@@ -0,0 +1,11 @@
+import os
+import sys
+
+
+if __name__ == '__main__':
+ path = sys.argv[1]
+
+ if path[0] != '/':
+ path = os.path.join(os.path.dirname(__file__), path)
+
+ os.execv(path, [path] + sys.argv[2:])
diff --git a/build/scripts/write_file_size.py b/build/scripts/write_file_size.py
new file mode 100644
index 0000000000..880fb90644
--- /dev/null
+++ b/build/scripts/write_file_size.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+import sys
+import os.path
+
+if __name__ == '__main__':
+ output = sys.argv[1]
+ size_sum = 0
+ for filename in sys.argv[2:]:
+ if os.path.exists(filename):
+ size_sum += os.path.getsize(filename)
+ else:
+ sys.stderr.write('write_file_size.py: {0}: No such file or directory\n'.format(filename))
+ sys.exit(1)
+ with open(output, 'w') as f:
+ f.write(str(size_sum))
diff --git a/build/scripts/writer.py b/build/scripts/writer.py
new file mode 100644
index 0000000000..21bb3006e5
--- /dev/null
+++ b/build/scripts/writer.py
@@ -0,0 +1,40 @@
+import sys
+import argparse
+
+import process_command_files as pcf
+
+
+def parse_args():
+ args = pcf.get_args(sys.argv[1:])
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-f', '--file', dest='file_path')
+ parser.add_argument('-a', '--append', action='store_true', default=False)
+ parser.add_argument('-Q', '--quote', action='store_true', default=False)
+ parser.add_argument('-s', '--addspace', action='store_true', default=False)
+ parser.add_argument('-c', '--content', action='append', dest='content')
+ parser.add_argument('-m', '--content-multiple', nargs='*', dest='content')
+ parser.add_argument('-P', '--path-list', action='store_true', default=False)
+ return parser.parse_args(args)
+
+
+def smart_shell_quote(v):
+ if v is None:
+ return None
+ if ' ' in v or '"' in v or "'" in v:
+ return "\"{0}\"".format(v.replace('"', '\\"'))
+ return v
+
+if __name__ == '__main__':
+ args = parse_args()
+ open_type = 'a' if args.append else 'w'
+
+ content = args.content
+ if args.quote:
+ content = [smart_shell_quote(ln) for ln in content] if content is not None else None
+ content = '\n'.join(content)
+
+ with open(args.file_path, open_type) as f:
+ if args.addspace:
+ f.write(' ')
+ if content is not None:
+ f.write(content)
diff --git a/build/scripts/xargs.py b/build/scripts/xargs.py
new file mode 100644
index 0000000000..5d68929ecc
--- /dev/null
+++ b/build/scripts/xargs.py
@@ -0,0 +1,18 @@
+import sys
+import os
+import subprocess
+
+if __name__ == '__main__':
+ pos = sys.argv.index('--')
+ fname = sys.argv[pos + 1]
+ cmd = sys.argv[pos + 2:]
+
+ with open(fname, 'r') as f:
+ args = [x.strip() for x in f]
+
+ os.remove(fname)
+
+ p = subprocess.Popen(cmd + args, shell=False, stderr=sys.stderr, stdout=sys.stdout)
+ p.communicate()
+
+ sys.exit(p.returncode)
diff --git a/build/scripts/ya.make b/build/scripts/ya.make
new file mode 100644
index 0000000000..837f0e0494
--- /dev/null
+++ b/build/scripts/ya.make
@@ -0,0 +1,110 @@
+OWNER(g:ymake)
+
+PY23_TEST()
+
+IF (PY2)
+ TEST_SRCS(
+ build_dll_and_java.py
+ build_java_codenav_index.py
+ build_java_with_error_prone.py
+ build_java_with_error_prone2.py
+ build_mn.py
+ build_pln_header.py
+ cat.py
+ cgo1_wrapper.py
+ check_config_h.py
+ collect_java_srcs.py
+ compile_cuda.py
+ compile_java.py
+ compile_jsrc.py
+ compile_pysrc.py
+ configure_file.py
+ copy_docs_files.py
+ copy_docs_files_to_dir.py
+ copy_files_to_dir.py
+ copy_to_dir.py
+ coverage-info.py
+ cpp_flatc_wrapper.py
+ create_jcoverage_report.py
+ extract_asrc.py
+ extract_docs.py
+ extract_jacoco_report.py
+ f2c.py
+ fail_module_cmd.py
+ fetch_from.py
+ fetch_from_external.py
+ fetch_from_mds.py
+ fetch_from_npm.py
+ fetch_from_sandbox.py
+ fetch_resource.py
+ filter_zip.py
+ find_and_tar.py
+ fix_msvc_output.py
+ fs_tools.py
+ gen_aar_gradle_script.py
+ gen_java_codenav_entry.py
+ gen_java_codenav_protobuf.py
+ gen_mx_table.py
+ gen_py3_reg.py
+ gen_py_reg.py
+ gen_test_apk_gradle_script.py
+ gen_ub.py
+ generate_pom.py
+ go_proto_wrapper.py
+ go_tool.py
+ ios_wrapper.py
+ java_pack_to_file.py
+ link_asrc.py
+ link_dyn_lib.py
+ link_exe.py
+ link_fat_obj.py
+ link_lib.py
+ llvm_opt_wrapper.py
+ merge_coverage_data.py
+ merge_files.py
+ mkdir.py
+ mkdocs_builder_wrapper.py
+ mkver.py
+ pack_ios.py
+ pack_jcoverage_resources.py
+ perl_wrapper.py
+ postprocess_go_fbs.py
+ preprocess.py
+ py_compile.py
+ run_ios_simulator.py
+ run_javac.py
+ run_junit.py
+ run_llvm_dsymutil.py
+ run_msvc_wine.py
+ run_tool.py
+ sky.py
+ stdout2stderr.py
+ symlink.py
+ tar_directory.py
+ tar_sources.py
+ tared_protoc.py
+ touch.py
+ unpacking_jtest_runner.py
+ vcs_info.py
+ with_coverage.py
+ with_crash_on_timeout.py
+ with_pathsep_resolve.py
+ wrap_groovyc.py
+ wrapper.py
+ writer.py
+ write_file_size.py
+ xargs.py
+ yield_line.py
+ yndexer.py
+ )
+ELSEIF(PY3)
+ TEST_SRCS(
+ build_info_gen.py
+ )
+ENDIF()
+
+PEERDIR(
+ ydb/library/yql/public/udf
+)
+
+END()
diff --git a/build/scripts/yield_line.py b/build/scripts/yield_line.py
new file mode 100644
index 0000000000..c7087e521e
--- /dev/null
+++ b/build/scripts/yield_line.py
@@ -0,0 +1,8 @@
+import sys
+
+
+if __name__ == '__main__':
+ pos = sys.argv.index('--')
+
+ with open(sys.argv[pos + 1], 'a') as f:
+ f.write(' '.join(sys.argv[pos + 2:]) + '\n')
diff --git a/build/scripts/yndexer.py b/build/scripts/yndexer.py
new file mode 100644
index 0000000000..a38e28ba99
--- /dev/null
+++ b/build/scripts/yndexer.py
@@ -0,0 +1,79 @@
+import sys
+import subprocess
+import threading
+import os
+import re
+
+
+rx_resource_dir = re.compile(r'libraries: =([^:]*)')
+
+
+def _try_to_kill(process):
+ try:
+ process.kill()
+ except Exception:
+ pass
+
+
+def touch(path):
+ if not os.path.exists(path):
+ with open(path, 'w'):
+ pass
+
+
+class Process(object):
+ def __init__(self, args):
+ self._process = subprocess.Popen(args)
+ self._event = threading.Event()
+ self._result = None
+ thread = threading.Thread(target=self._run)
+ thread.setDaemon(True)
+ thread.start()
+
+ def _run(self):
+ self._process.communicate()
+ self._result = self._process.returncode
+ self._event.set()
+
+ def wait(self, timeout):
+ self._event.wait(timeout=timeout)
+ _try_to_kill(self._process)
+ return self._result
+
+
+if __name__ == '__main__':
+ args = sys.argv
+
+ yndexer = args[1]
+ timeout = int(args[2])
+ arc_root = args[3]
+ build_root = args[4]
+ input_file = args[5]
+ output_file = args[-1]
+ tail_args = args[6:-1]
+
+ subprocess.check_call(tail_args)
+
+ clang = tail_args[0]
+ out = subprocess.check_output([clang, '-print-search-dirs'])
+ resource_dir = rx_resource_dir.search(out).group(1)
+
+ yndexer_args = [
+ yndexer, input_file,
+ '-pb2',
+ '-i', 'arc::{}'.format(arc_root),
+ '-i', 'build::{}'.format(build_root),
+ '-i', '.IGNORE::/',
+ '-o', os.path.dirname(output_file),
+ '-n', os.path.basename(output_file).rsplit('.ydx.pb2', 1)[0],
+ '--'
+ ] + tail_args + [
+ '-resource-dir', resource_dir,
+ ]
+
+ process = Process(yndexer_args)
+ result = process.wait(timeout=timeout)
+
+ if result != 0:
+ print >> sys.stderr, 'Yndexing process finished with code', result
+ touch(output_file)