aboutsummaryrefslogtreecommitdiffstats
path: root/build/scripts
diff options
context:
space:
mode:
authormonster <monster@ydb.tech>2022-07-07 14:41:37 +0300
committermonster <monster@ydb.tech>2022-07-07 14:41:37 +0300
commit06e5c21a835c0e923506c4ff27929f34e00761c2 (patch)
tree75efcbc6854ef9bd476eb8bf00cc5c900da436a2 /build/scripts
parent03f024c4412e3aa613bb543cf1660176320ba8f4 (diff)
downloadydb-06e5c21a835c0e923506c4ff27929f34e00761c2.tar.gz
fix ya.make
Diffstat (limited to 'build/scripts')
-rw-r--r--build/scripts/_check_compiler.cpp1
-rw-r--r--build/scripts/_fake_src.cpp2
-rw-r--r--build/scripts/append_file.py9
-rw-r--r--build/scripts/autotar_gendirs.py70
-rwxr-xr-xbuild/scripts/build_catboost.py71
-rw-r--r--build/scripts/build_dll_and_java.py47
-rw-r--r--build/scripts/build_java_codenav_index.py49
-rw-r--r--build/scripts/build_java_with_error_prone.py36
-rw-r--r--build/scripts/build_java_with_error_prone2.py87
-rwxr-xr-xbuild/scripts/build_mn.py330
-rwxr-xr-xbuild/scripts/build_pln_header.py35
-rwxr-xr-xbuild/scripts/cat.py15
-rw-r--r--build/scripts/cgo1_wrapper.py45
-rw-r--r--build/scripts/clang_tidy.py172
-rw-r--r--build/scripts/clang_tidy_arch.py33
-rw-r--r--build/scripts/collect_java_srcs.py51
-rw-r--r--build/scripts/compile_cuda.py159
-rw-r--r--build/scripts/compile_jsrc.py24
-rw-r--r--build/scripts/compile_pysrc.py101
-rw-r--r--build/scripts/copy_docs_files.py76
-rw-r--r--build/scripts/copy_files_to_dir.py59
-rw-r--r--build/scripts/copy_to_dir.py75
-rw-r--r--build/scripts/coverage-info.py282
-rw-r--r--build/scripts/cpp_flatc_wrapper.py31
-rw-r--r--build/scripts/create_jcoverage_report.py112
-rw-r--r--build/scripts/custom_link_green_mysql.py97
-rw-r--r--build/scripts/decimal_md5.py79
-rw-r--r--build/scripts/error.py77
-rw-r--r--build/scripts/export_script_gen.py7
-rw-r--r--build/scripts/extract_asrc.py23
-rw-r--r--build/scripts/extract_jacoco_report.py29
-rw-r--r--build/scripts/f2c.py58
-rw-r--r--build/scripts/fail_module_cmd.py7
-rw-r--r--build/scripts/fetch_from_archive.py36
-rw-r--r--build/scripts/fetch_from_external.py60
-rw-r--r--build/scripts/fetch_from_mds.py50
-rw-r--r--build/scripts/fetch_from_npm.py104
-rw-r--r--build/scripts/fetch_resource.py43
-rw-r--r--build/scripts/filter_zip.py71
-rw-r--r--build/scripts/find_and_tar.py22
-rw-r--r--build/scripts/find_time_trace.py17
-rw-r--r--build/scripts/fix_java_command_file_cp.py34
-rw-r--r--build/scripts/fix_msvc_output.py43
-rw-r--r--build/scripts/gen_aar_gradle_script.py378
-rw-r--r--build/scripts/gen_java_codenav_entry.py57
-rw-r--r--build/scripts/gen_java_codenav_protobuf.py22
-rw-r--r--build/scripts/gen_mx_table.py75
-rw-r--r--build/scripts/gen_swiftc_output_map.py15
-rw-r--r--build/scripts/gen_tasklet_reg.py51
-rw-r--r--build/scripts/gen_test_apk_gradle_script.py193
-rw-r--r--build/scripts/gen_ub.py86
-rw-r--r--build/scripts/gen_yql_python_udf.py55
-rw-r--r--build/scripts/generate_mf.py113
-rw-r--r--build/scripts/generate_pom.py275
-rw-r--r--build/scripts/go_fake_include/go_asm.h0
-rw-r--r--build/scripts/go_proto_wrapper.py82
-rw-r--r--build/scripts/go_tool.py873
-rw-r--r--build/scripts/ios_wrapper.py180
-rw-r--r--build/scripts/java_pack_to_file.py43
-rw-r--r--build/scripts/jni_swig.py46
-rw-r--r--build/scripts/link_asrc.py84
-rw-r--r--build/scripts/link_fat_obj.py91
-rw-r--r--build/scripts/make_java_classpath_file.py26
-rw-r--r--build/scripts/make_java_srclists.py128
-rw-r--r--build/scripts/make_manifest_from_bf.py28
-rw-r--r--build/scripts/merge_coverage_data.py32
-rwxr-xr-xbuild/scripts/mkdir.py12
-rw-r--r--build/scripts/mkdocs_builder_wrapper.py35
-rwxr-xr-xbuild/scripts/mkver.py12
-rw-r--r--build/scripts/move.py15
-rw-r--r--build/scripts/pack_ios.py48
-rw-r--r--build/scripts/pack_jcoverage_resources.py24
-rw-r--r--build/scripts/perl_wrapper.py24
-rw-r--r--build/scripts/postprocess_go_fbs.py72
-rw-r--r--build/scripts/process_whole_archive_option.py176
-rw-r--r--build/scripts/python_yndexer.py53
-rw-r--r--build/scripts/resolve_java_srcs.py106
-rw-r--r--build/scripts/retry.py29
-rw-r--r--build/scripts/rodata2cpp.py34
-rw-r--r--build/scripts/run_ios_simulator.py79
-rw-r--r--build/scripts/run_javac.py122
-rw-r--r--build/scripts/run_junit.py65
-rw-r--r--build/scripts/run_msvc_wine.py584
-rw-r--r--build/scripts/run_sonar.py121
-rw-r--r--build/scripts/setup_java_tmpdir.py40
-rw-r--r--build/scripts/sky.py43
-rw-r--r--build/scripts/stderr2stdout.py6
-rwxr-xr-xbuild/scripts/symlink.py29
-rw-r--r--build/scripts/tar_directory.py45
-rw-r--r--build/scripts/unpacking_jtest_runner.py148
-rw-r--r--build/scripts/with_coverage.py40
-rw-r--r--build/scripts/with_crash_on_timeout.py22
-rw-r--r--build/scripts/with_pathsep_resolve.py23
-rw-r--r--build/scripts/wrap_groovyc.py23
-rw-r--r--build/scripts/wrapper.py11
-rw-r--r--build/scripts/writer.py40
-rw-r--r--build/scripts/yndexer.py79
97 files changed, 2 insertions, 7920 deletions
diff --git a/build/scripts/_check_compiler.cpp b/build/scripts/_check_compiler.cpp
deleted file mode 100644
index 53c5fdf179..0000000000
--- a/build/scripts/_check_compiler.cpp
+++ /dev/null
@@ -1 +0,0 @@
-#include <stdio.h>
diff --git a/build/scripts/_fake_src.cpp b/build/scripts/_fake_src.cpp
deleted file mode 100644
index 139597f9cb..0000000000
--- a/build/scripts/_fake_src.cpp
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py
deleted file mode 100644
index 6b5d53bc71..0000000000
--- a/build/scripts/append_file.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import sys
-
-
-if __name__ == "__main__":
-
- file_path = sys.argv[1]
- with open(file_path, "a") as f:
- for text in sys.argv[2:]:
- print >>f, text
diff --git a/build/scripts/autotar_gendirs.py b/build/scripts/autotar_gendirs.py
deleted file mode 100644
index a1228108aa..0000000000
--- a/build/scripts/autotar_gendirs.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from __future__ import print_function
-
-import os
-import sys
-import argparse
-import tarfile
-import subprocess
-
-
-def is_exe(fpath):
- return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
-
-def pack_dir(dir_path, dest_path):
- dir_path = os.path.abspath(dir_path)
- for tar_exe in ('/usr/bin/tar', '/bin/tar'):
- if is_exe(tar_exe):
- subprocess.check_call([tar_exe, '-cf', dest_path, '-C', os.path.dirname(dir_path), os.path.basename(dir_path)])
- break
- else:
- with tarfile.open(dest_path, 'w') as out:
- out.add(dir_path, arcname=os.path.basename(dir_path))
-
-
-def unpack_dir(tared_dir, dest_path):
- tared_dir = os.path.abspath(tared_dir)
- if not os.path.exists(dest_path):
- os.makedirs(dest_path)
- for tar_exe in ('/usr/bin/tar', '/bin/tar'):
- if is_exe(tar_exe):
- subprocess.check_call([tar_exe, '-xf', tared_dir, '-C', dest_path])
- break
- else:
- with tarfile.open(tared_dir, 'r') as tar_file:
- tar_file.extractall(dest_path)
-
-
-# Must only be used to pack directories in build root
-# Must silently accept empty list of dirs and do nothing in such case (workaround for ymake.core.conf limitations)
-def main(args):
- parser = argparse.ArgumentParser()
- parser.add_argument('--pack', action='store_true', default=False)
- parser.add_argument('--unpack', action='store_true', default=False)
- parser.add_argument('--ext')
- parser.add_argument('--outs', nargs='*', default=[])
- parser.add_argument('dirs', nargs='*')
- args = parser.parse_args(args)
-
- if args.pack:
- if len(args.dirs) != len(args.outs):
- print("Number and oder of dirs to pack must match to the number and order of outs", file=sys.stderr)
- return 1
- for dir, dest in zip(args.dirs, args.outs):
- pack_dir(dir, dest)
- elif args.unpack:
- for tared_dir in args.dirs:
- if not tared_dir.endswith(args.ext):
- print("Requested to unpack '{}' which do not have required extension '{}'".format(tared_dir, args.ext), file=sys.stderr)
- return 1
- dest = os.path.dirname(tared_dir)
- unpack_dir(tared_dir, dest)
- else:
- print("Neither --pack nor --unpack specified. Don't know what to do.", file=sys.stderr)
- return 1
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/build/scripts/build_catboost.py b/build/scripts/build_catboost.py
deleted file mode 100755
index 78334fc5f7..0000000000
--- a/build/scripts/build_catboost.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import sys
-import os
-import shutil
-import re
-import subprocess
-
-def get_value(val):
- dct = val.split('=', 1)
- if len(dct) > 1:
- return dct[1]
- return ''
-
-
-class BuildCbBase(object):
- def run(self, cbmodel, cbname, cb_cpp_path):
-
- data_prefix = "CB_External_"
- data = data_prefix + cbname
- datasize = data + "Size"
-
- cbtype = "const NCatboostCalcer::TCatboostCalcer"
- cbload = "(ReadModel({0}, {1}, EModelType::CatboostBinary))".format(data, datasize)
-
- cb_cpp_tmp_path = cb_cpp_path + ".tmp"
- cb_cpp_tmp = open(cb_cpp_tmp_path, 'w')
-
- cb_cpp_tmp.write("#include <kernel/catboost/catboost_calcer.h>\n")
-
- ro_data_path = os.path.dirname(cb_cpp_path) + "/" + data_prefix + cbname + ".rodata"
- cb_cpp_tmp.write("namespace{\n")
- cb_cpp_tmp.write(" extern \"C\" {\n")
- cb_cpp_tmp.write(" extern const unsigned char {1}{0}[];\n".format(cbname, data_prefix))
- cb_cpp_tmp.write(" extern const ui32 {1}{0}Size;\n".format(cbname, data_prefix))
- cb_cpp_tmp.write(" }\n")
- cb_cpp_tmp.write("}\n")
- archiverCall = subprocess.Popen([self.archiver, "-q", "-p", "-o", ro_data_path, cbmodel], stdout=None, stderr=subprocess.PIPE)
- archiverCall.wait()
- cb_cpp_tmp.write("extern {0} {1};\n".format(cbtype, cbname))
- cb_cpp_tmp.write("{0} {1}{2};".format(cbtype, cbname, cbload))
- cb_cpp_tmp.close()
- shutil.move(cb_cpp_tmp_path, cb_cpp_path)
-
-class BuildCb(BuildCbBase):
- def run(self, argv):
- if len(argv) < 5:
- print >>sys.stderr, "BuildCb.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <cppOutput> [params...])"
- sys.exit(1)
-
- self.SrcRoot = argv[0]
- self.archiver = argv[1]
- cbmodel = argv[2]
- cbname = argv[3]
- cb_cpp_path = argv[4]
-
- super(BuildCb, self).run(cbmodel, cbname, cb_cpp_path)
-
-
-def build_cb_f(argv):
- build_cb = BuildCb()
- build_cb.run(argv)
-
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >>sys.stderr, "Usage: build_cb.py <funcName> <args...>"
- sys.exit(1)
-
- if (sys.argv[2:]):
- globals()[sys.argv[1]](sys.argv[2:])
- else:
- globals()[sys.argv[1]]()
diff --git a/build/scripts/build_dll_and_java.py b/build/scripts/build_dll_and_java.py
deleted file mode 100644
index b9d8aff4df..0000000000
--- a/build/scripts/build_dll_and_java.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import os
-import subprocess
-import sys
-
-
-def just_do_it(argv):
- delim = argv[0]
- args = []
- for item in argv:
- if item == delim:
- args.append([])
- else:
- args[-1].append(item)
- dll_cmd, java_cmd, inputs, dll_out, java_out, jsrs_out, roots = args
- dll_out, java_out, jsrs_out, build_root, source_root = dll_out[0], java_out[0], jsrs_out[0], roots[0], roots[1]
- for inp in inputs:
- origin_inp = inp
- if os.path.isabs(inp):
- if os.path.relpath(inp, build_root).startswith('..'):
- inp = os.path.relpath(inp, source_root)
- else:
- inp = os.path.relpath(inp, build_root)
- ext = os.path.splitext(inp)[1]
- if ext in ('.o', '.obj'):
- if os.path.join(build_root, inp) in java_cmd:
- inp = os.path.join(build_root, inp)
- if sys.platform == 'win32':
- inp = inp.replace('\\', '/')
- if inp not in java_cmd:
- inp = build_root + '/' + inp
- java_cmd.remove(inp)
- if ext in ('.java', '.jsrc'):
- if origin_inp in dll_cmd:
- inp = origin_inp
- elif os.path.join(build_root, inp) in dll_cmd:
- inp = os.path.join(build_root, inp)
- if sys.platform == 'win32':
- inp = inp.replace('\\', '/')
- dll_cmd.remove(inp)
- java_cmd.insert(java_cmd.index(dll_out), java_out)
- java_cmd.remove(dll_out)
- subprocess.check_call(java_cmd)
- subprocess.check_call(dll_cmd)
-
-
-if __name__ == '__main__':
- just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_java_codenav_index.py b/build/scripts/build_java_codenav_index.py
deleted file mode 100644
index d7ac4f3213..0000000000
--- a/build/scripts/build_java_codenav_index.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import sys
-import re
-import os
-import subprocess
-
-FAKE_ARCADIA_ROOT = 'fake_arcadia_root'
-FAKE_BUILD_ROOT = 'fake_build_root'
-
-
-def modify_sources_file(origin, target, source_roots_map):
- def _cut_source_root(src):
- for pref, fake_root in source_roots_map.items():
- if src.startswith(pref):
- return os.path.join(fake_root, os.path.relpath(src, pref))
- return src
-
- with open(origin) as o:
- srcs = [i for line in o for i in re.split('\\s+', line) if i]
- new_srcs = map(_cut_source_root, srcs)
- with open(target, 'w') as t:
- t.write(' '.join(new_srcs))
-
-
-def just_do_it(argv):
- corpus_name, build_root, arcadia_root, sources_file, javac_tail_cmd = argv[0], argv[1], argv[2], argv[3], argv[4:]
- fake_arcadia_root = os.path.join(build_root, FAKE_ARCADIA_ROOT)
- fake_build_root = os.path.join(build_root, FAKE_BUILD_ROOT)
- fake_source_roots = {
- arcadia_root: fake_arcadia_root,
- build_root: fake_build_root,
- }
- modify_sources_file(sources_file, os.path.join(os.path.dirname(sources_file), '_' + os.path.basename(sources_file)), fake_source_roots)
- kindex_data_root = '{}/kindex'.format(os.path.join(build_root, os.path.dirname(corpus_name)))
- if not os.path.exists(kindex_data_root):
- os.makedirs(kindex_data_root)
- env = os.environ.copy()
- env['KYTHE_ROOT_DIRECTORY'] = build_root
- env['KYTHE_OUTPUT_DIRECTORY'] = kindex_data_root
- env['KYTHE_CORPUS'] = os.path.relpath(corpus_name, build_root)
- os.symlink(arcadia_root, fake_arcadia_root)
- os.symlink(build_root, fake_build_root)
- try:
- subprocess.check_call(javac_tail_cmd, env=env)
- finally:
- os.unlink(fake_arcadia_root)
- os.unlink(fake_build_root)
-
-if __name__ == '__main__':
- just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_java_with_error_prone.py b/build/scripts/build_java_with_error_prone.py
deleted file mode 100644
index 910443552e..0000000000
--- a/build/scripts/build_java_with_error_prone.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import sys
-import os
-
-ERROR_PRONE_FLAGS = [
- '-Xep:FunctionalInterfaceMethodChanged:WARN',
- '-Xep:ReturnValueIgnored:WARN',
-]
-
-JAVA10_EXPORTS = [
- '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED'
-]
-
-
-def just_do_it(argv):
- java, error_prone_tool, javac_cmd = argv[0], argv[1], argv[2:]
- if java.endswith('javac') or java.endswith('javac.exe'):
- for f in javac_cmd:
- if f.startswith('-Xep'):
- ERROR_PRONE_FLAGS.append(f)
- for f in ERROR_PRONE_FLAGS:
- if f in javac_cmd:
- javac_cmd.remove(f)
- os.execv(java, [java] + JAVA10_EXPORTS + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + javac_cmd)
- else:
- os.execv(java, [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + ERROR_PRONE_FLAGS + javac_cmd)
-
-
-if __name__ == '__main__':
- just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_java_with_error_prone2.py b/build/scripts/build_java_with_error_prone2.py
deleted file mode 100644
index ddf1ccbfc1..0000000000
--- a/build/scripts/build_java_with_error_prone2.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import sys
-import os
-import re
-import subprocess
-import platform
-
-
-ERROR_PRONE_FLAGS = [
- '-Xep:FunctionalInterfaceMethodChanged:WARN',
- '-Xep:ReturnValueIgnored:WARN',
-]
-
-JAVA10_EXPORTS = [
- '--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED',
- '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED',
-]
-
-
-def get_java_version(exe):
- p = subprocess.Popen([exe, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- for line in (out or '').strip().split("\n") + (err or '').strip().split("\n"):
- m = re.match(r'java version "(.+)"', line)
- if m:
- parts = m.groups()[0].split(".")
- return parts[1] if parts[0] == "1" else parts[0]
- m = re.match(r'openjdk version "(\d+).*"', line)
- if m:
- parts = m.groups()[0].split(".")
- return parts[0]
- return None
-
-
-def get_classpath(cmd):
- for i, part in enumerate(cmd):
- if part == '-classpath':
- i += 1
- if i < len(cmd):
- return cmd[i]
- else:
- return None
- return None
-
-
-def parse_args(argv):
- parsed = []
- for i in range(len(argv)):
- if not argv[i].startswith('-'):
- parsed.append(argv[i])
- if len(parsed) >= 3:
- break
- return parsed + [argv[i + 1:]]
-
-
-def just_do_it(argv):
- java, javac, error_prone_tool, javac_cmd = parse_args(argv)
- ver = get_java_version(java)
- if not ver:
- raise Exception("Can't determine java version")
- if int(ver) >= 10:
- for f in javac_cmd:
- if f.startswith('-Xep'):
- ERROR_PRONE_FLAGS.append(f)
- for f in ERROR_PRONE_FLAGS:
- if f in javac_cmd:
- javac_cmd.remove(f)
- if '-processor' in javac_cmd:
- classpath = get_classpath(javac_cmd)
- if classpath:
- error_prone_tool = error_prone_tool + os.pathsep + classpath
- cmd = [javac] + JAVA10_EXPORTS + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + javac_cmd
- else:
- cmd = [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + ERROR_PRONE_FLAGS + javac_cmd
- if platform.system() == 'Windows':
- sys.exit(subprocess.Popen(cmd).wait())
- else:
- os.execv(cmd[0], cmd)
-
-
-if __name__ == '__main__':
- just_do_it(sys.argv[1:])
diff --git a/build/scripts/build_mn.py b/build/scripts/build_mn.py
deleted file mode 100755
index 5bb03c247c..0000000000
--- a/build/scripts/build_mn.py
+++ /dev/null
@@ -1,330 +0,0 @@
-#!/usr/bin/env python
-# Ymake MatrixNet support
-
-import sys
-import os
-import shutil
-import re
-import subprocess
-
-
-def get_value(val):
- dct = val.split('=', 1)
- if len(dct) > 1:
- return dct[1]
- return ''
-
-
-class BuildMnBase(object):
- def Run(self, mninfo, mnname, mnrankingSuffix, mncppPath, check=False, ptr=False, multi=False):
- self.mninfo = mninfo
- self.mnname = mnname
- self.mnrankingSuffix = mnrankingSuffix
- self.mncppPath = mncppPath
- self.check = check
- self.ptr = ptr
- self.multi = multi
- dataprefix = "MN_External_"
- mninfoName = os.path.basename(self.mninfo)
- data = dataprefix + mnname
- datasize = data + "Size"
-
- if self.multi:
- if self.ptr:
- mntype = "const NMatrixnet::TMnMultiCategPtr"
- mnload = "(new NMatrixnet::TMnMultiCateg( {1}, {2}, \"{0}\"))".format(mninfoName, data, datasize)
- else:
- mntype = "const NMatrixnet::TMnMultiCateg"
- mnload = "({1}, {2}, \"{0}\")".format(mninfoName, data, datasize)
- else:
- if self.ptr:
- mntype = "const NMatrixnet::TMnSsePtr"
- mnload = "(new NMatrixnet::TMnSseInfo({1}, {2}, \"{0}\"))".format(mninfoName, data, datasize)
- else:
- mntype = "const NMatrixnet::TMnSseInfo"
- mnload = "({1}, {2}, \"{0}\")".format(mninfoName, data, datasize)
-
- if self.check:
- self.CheckMn()
-
- mncpptmpPath = self.mncppPath + ".tmp"
- mncpptmp = open(mncpptmpPath, 'w')
-
- if self.multi:
- mncpptmp.write("#include <kernel/matrixnet/mn_multi_categ.h>\n")
- else:
- mncpptmp.write("#include <kernel/matrixnet/mn_sse.h>\n")
-
- rodatapath = os.path.dirname(self.mncppPath) + "/" + dataprefix + self.mnname + ".rodata"
- mncpptmp.write("namespace{\n")
- mncpptmp.write(" extern \"C\" {\n")
- mncpptmp.write(" extern const unsigned char {1}{0}[];\n".format(self.mnname, dataprefix))
- mncpptmp.write(" extern const ui32 {1}{0}Size;\n".format(self.mnname, dataprefix))
- mncpptmp.write(" }\n")
- mncpptmp.write("}\n")
- archiverCall = subprocess.Popen([self.archiver, "-q", "-p", "-o", rodatapath, self.mninfo], stdout=None, stderr=subprocess.PIPE)
- archiverCall.wait()
- mncpptmp.write("extern {0} {1};\n".format(mntype, self.mnname))
- mncpptmp.write("{0} {1}{2};".format(mntype, self.mnname, mnload))
- mncpptmp.close()
- shutil.move(mncpptmpPath, self.mncppPath)
-
- def CheckMn(self):
- if not self.fml_unused_tool:
- print >>sys.stderr, "fml_unused_tool undefined!"
- failed_msg = "fml_unused_tool failed: {0} -A {1} -e -r {2}".format(self.fml_unused_tool, self.SrcRoot, self.mninfo)
- assert not subprocess.call([self.fml_unused_tool, "-A", self.SrcRoot, "-e", "-r", self.mninfo]), failed_msg
-
-
-class BuildMn(BuildMnBase):
- def Run(self, argv):
- if len(argv) < 6:
- print >>sys.stderr, "BuildMn.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <mnrankingSuffix> <cppOutput> [params...])"
- sys.exit(1)
-
- self.SrcRoot = argv[0]
- self.archiver = argv[1]
-
- mninfo = argv[2]
- mnname = argv[3]
- mnrankingSuffix = argv[4]
- mncppPath = argv[5]
- check = False
- ptr = False
- multi = False
- self.fml_unused_tool = ''
- for param in argv[6:]:
- if param == "CHECK":
- check = True
- elif param == "PTR":
- ptr = True
- elif param == "MULTI":
- multi = True
- elif param.startswith('fml_tool='):
- self.fml_unused_tool = get_value(param)
- else:
- print >>sys.stdout, "Unknown param: {0}".format(param)
- super(BuildMn, self).Run(mninfo, mnname, mnrankingSuffix, mncppPath, check=check, ptr=ptr, multi=multi)
-
-
-class BuildMns(BuildMnBase):
- def InitBase(self, listname, mnrankingSuffix):
- self.autogen = '// DO NOT EDIT THIS FILE DIRECTLY, AUTOGENERATED!\n'
- self.mnrankingSuffix = mnrankingSuffix
- self.mnlistname = listname + mnrankingSuffix
- self.mnlistelem = "const NMatrixnet::TMnSsePtr*"
- mnlisttype = "TMap< TString, {0} >".format(self.mnlistelem)
- self.mnlist = "const {0} {1}".format(mnlisttype, self.mnlistname)
-
- self.mnmultilistname = "{0}{1}Multi".format(listname, self.mnrankingSuffix)
- self.mnmultilistelem = "const NMatrixnet::TMnMultiCategPtr*"
- mnmultilisttype = "TMap< TString, {0} >".format(self.mnmultilistelem)
- self.mnmultilist = "const {0} {1}".format(mnmultilisttype, self.mnmultilistname)
-
- def InitForAll(self, argv):
- if len(argv) < 8:
- print >>sys.stderr, "BuildMns.InitForAll(<ARCADIA_ROOT> <BINDIR> <archiver> <listname> <mnranking_suffix> <hdrfile> <srcfile> <mninfos> [fml_tool=<fml_unused_tool> CHECK])"
- sys.exit(1)
-
- bmns_args = []
- self.check = False
- self.fml_unused_tool = ''
- for arg in argv:
- if arg == "CHECK":
- self.check = True
- elif arg.startswith('fml_tool='):
- self.fml_unused_tool = get_value(arg)
- else:
- bmns_args.append(arg)
-
- self.SrcRoot = bmns_args[0]
- self.BINDIR = bmns_args[1]
- self.archiver = bmns_args[2]
- self.listname = bmns_args[3]
- self.mnrankingSuffix = get_value(bmns_args[4])
- self.hdrfile = bmns_args[5]
- self.srcfile = bmns_args[6]
- self.mninfos = bmns_args[7:]
-
- self.InitBase(self.listname, self.mnrankingSuffix)
-
- def InitForHeader(self, argv):
- if len(argv) < 4:
- print >>sys.stderr, "BuildMns.InitForHeader(<listname> <rankingSuffix> <hdrfile> <mninfos...>)"
- sys.exit(1)
-
- self.listname = argv[0]
- self.mnrankingSuffix = get_value(argv[1])
- self.hdrfile = argv[2]
- self.mninfos = argv[3:]
-
- self.InitBase(self.listname, self.mnrankingSuffix)
-
- def InitForCpp(self, argv):
- if len(argv) < 5:
- print >>sys.stderr, "BuildMns.InitForCpp(<listname> <rankingSuffix> <hdrfile> <srcfile> <mninfos...>)"
- sys.exit(1)
-
- self.listname = argv[0]
- self.mnrankingSuffix = get_value(argv[1])
- self.hdrfile = argv[2]
- self.srcfile = argv[3]
- self.mninfos = argv[4:]
-
- self.InitBase(self.listname, self.mnrankingSuffix)
-
- def InitForFiles(self, argv):
- if len(argv) < 7:
- print >>sys.stderr, "BuildMns.InitForFiles(<ARCADIA_ROOT> <BINDIR> <archiver> <fml_unused_tool> <listname> <rankingSuffix> <mninfos...> [CHECK])"
- sys.exit(1)
-
- bmns_args = []
- self.check = False
- self.fml_unused_tool = ''
- for arg in argv:
- if arg == "CHECK":
- self.check = True
- elif arg.startswith('fml_tool='):
- self.fml_unused_tool = get_value(arg)
- else:
- bmns_args.append(arg)
-
- self.SrcRoot = bmns_args[0]
- self.BINDIR = bmns_args[1]
- self.archiver = bmns_args[2]
- self.listname = bmns_args[3]
- self.mnrankingSuffix = get_value(bmns_args[4])
- self.mninfos = bmns_args[5:]
-
- def BuildMnsHeader(self):
- if self.mninfos:
- self.mninfos = sorted(set(self.mninfos))
-
- tmpHdrPath = self.hdrfile + ".tmp"
- tmpHdrFile = open(tmpHdrPath, 'w')
-
- tmpHdrFile.write(self.autogen)
- tmpHdrFile.write("#include <kernel/matrixnet/mn_sse.h>\n")
- tmpHdrFile.write("#include <kernel/matrixnet/mn_multi_categ.h>\n\n")
- tmpHdrFile.write("extern {0};\n".format(self.mnlist))
- tmpHdrFile.write("extern {0};\n".format(self.mnmultilist))
-
- for item in self.mninfos:
- mnfilename = os.path.basename(item)
- mnfilename, ext = os.path.splitext(mnfilename)
-
- mnname = re.sub("[^-a-zA-Z0-9_]", "_", mnfilename)
-
- if ext == ".info":
- mnname = "staticMn{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
- tmpHdrFile.write("extern const NMatrixnet::TMnSsePtr {0};\n".format(mnname))
- elif ext == ".mnmc":
- mnname = "staticMnMulti{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
- tmpHdrFile.write("extern const NMatrixnet::TMnMultiCategPtr {0};\n".format(mnname))
-
- tmpHdrFile.close()
- shutil.move(tmpHdrPath, self.hdrfile)
-
- def BuildMnFiles(self):
- for item in self.mninfos:
- mnfilename = os.path.basename(item)
- mnfilename, ext = os.path.splitext(mnfilename)
-
- mnname = re.sub("[^-a-zA-Z0-9_]", "_", mnfilename)
-
- if ext == ".info":
- mnname = "staticMn{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
- super(BuildMns, self).Run(item, mnname, self.mnrankingSuffix, self.BINDIR + "/mn.{0}.cpp".format(mnname), check=self.check, ptr=True, multi=False)
- elif ext == ".mnmc":
- mnname = "staticMnMulti{0}{1}Ptr".format(self.mnrankingSuffix, mnname)
- # BUILD_MN_PTR_MULTI
- super(BuildMns, self).Run(item, mnname, self.mnrankingSuffix, self.BINDIR + "/mnmulti.{0}.cpp".format(mnname), check=False, ptr=True, multi=True)
-
- def BuildMnsCpp(self):
- if self.mninfos:
- self.mninfos = sorted(set(self.mninfos))
-
- tmpSrcPath = self.srcfile + ".tmp"
- tmpSrcFile = open(tmpSrcPath, 'w')
- hdrrel = os.path.basename(self.hdrfile)
-
- mnnames = []
- mnmultinames = []
- for item in self.mninfos:
- mnfilename = os.path.basename(item)
- mnfilename, ext = os.path.splitext(mnfilename)
-
- if ext == ".info":
- mnnames.append(mnfilename)
- elif ext == ".mnmc":
- mnmultinames.append(mnfilename)
-
- tmpSrcFile.write(self.autogen)
- tmpSrcFile.write("#include \"{0}\"\n\n".format(hdrrel))
-
- if mnnames:
- mndata = self.mnlistname + "_data"
- tmpSrcFile.write("static const std::pair< TString, {0} > {1}[] = {{\n".format(self.mnlistelem, mndata))
- for item in mnnames:
- mnname = re.sub("[^-a-zA-Z0-9_]", "_", item)
- tmpSrcFile.write(" std::make_pair(TString(\"{0}\"), &staticMn{1}{2}Ptr),\n".format(item, self.mnrankingSuffix, mnname))
- tmpSrcFile.write("};\n")
- tmpSrcFile.write("{0}({1},{1} + sizeof({1}) / sizeof({1}[0]));\n\n".format(self.mnlist, mndata))
- else:
- tmpSrcFile.write("{0};\n\n".format(self.mnlist))
-
- if mnmultinames:
- mnmultidata = self.mnmultilistname + "_data"
- tmpSrcFile.write("static const std::pair< TString, {0} > {1}[] = {{\n".format(self.mnmultilistelem, mnmultidata))
- for item in mnmultinames:
- mnname = re.sub("[^-a-zA-Z0-9_]", "_", item)
- tmpSrcFile.write(" std::make_pair(TString(\"{0}\"), &staticMnMulti{1}{2}Ptr),\n".format(item, self.mnrankingSuffix, mnname))
- tmpSrcFile.write("};\n")
- tmpSrcFile.write("{0}({1},{1} + sizeof({1}) / sizeof({1}[0]));\n".format(self.mnmultilist, mnmultidata))
- else:
- tmpSrcFile.write("{0};\n".format(self.mnmultilist))
-
- tmpSrcFile.close()
- shutil.move(tmpSrcPath, self.srcfile)
-
-
-def BuildMnsAllF(argv):
- bldMns = BuildMns()
- bldMns.InitForAll(argv)
- bldMns.BuildMnsCpp()
- bldMns.BuildMnsHeader()
- bldMns.BuildMnFiles()
-
-
-def BuildMnsCppF(argv):
- bldMns = BuildMns()
- bldMns.InitForCpp(argv)
- bldMns.BuildMnsCpp()
-
-
-def BuildMnsHeaderF(argv):
- bldMns = BuildMns()
- bldMns.InitForHeader(argv)
- bldMns.BuildMnsHeader()
-
-
-def BuildMnsFilesF(argv):
- bldMns = BuildMns()
- bldMns.InitForFiles(argv)
- bldMns.BuildMnFiles()
-
-
-def BuildMnF(argv):
- bldMn = BuildMn()
- bldMn.Run(argv)
-
-
-if __name__ == '__main__':
- if len(sys.argv) < 2:
- print >>sys.stderr, "Usage: build_mn.py <funcName> <args...>"
- sys.exit(1)
-
- if (sys.argv[2:]):
- globals()[sys.argv[1]](sys.argv[2:])
- else:
- globals()[sys.argv[1]]()
diff --git a/build/scripts/build_pln_header.py b/build/scripts/build_pln_header.py
deleted file mode 100755
index c73693f444..0000000000
--- a/build/scripts/build_pln_header.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import os
-
-
-def BuildPlnHeader():
- if len(sys.argv) < 2:
- print >>sys.stderr, "Usage: build_pln_header.py <absolute/path/to/OutFile>"
- sys.exit(1)
-
- print >>sys.stdout, "Build Pln Header..."
- outPath = sys.argv[1]
- tmpPath = outPath + '.tmp'
- tmpFile = open(tmpPath, 'w')
-
- tmpFile.write('#include <library/cpp/sse/sse.h>\n')
- tmpFile.write('#include <kernel/relevfml/relev_fml.h>\n')
- for path in sys.argv[2:]:
- name = os.path.basename(path).split(".")[0] # name without extensions
- tmpFile.write('\nextern SRelevanceFormula fml{0};\n'.format(name))
- tmpFile.write('float {0}(const float* f);\n'.format(name))
- tmpFile.write('void {0}SSE(const float* const* factors, float* result);\n'.format(name))
- tmpFile.close()
- try:
- os.remove(outPath)
- except:
- pass
- try:
- os.rename(tmpPath, outPath)
- except:
- print >>sys.stdout, 'Error: Failed to rename ' + tmpPath + ' to ' + outPath
-
-if __name__ == '__main__':
- BuildPlnHeader()
diff --git a/build/scripts/cat.py b/build/scripts/cat.py
deleted file mode 100755
index 0c3f73d96f..0000000000
--- a/build/scripts/cat.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python
-import sys
-from shutil import copyfileobj as copy
-import os.path
-
-if __name__ == '__main__':
- for filename in sys.argv[1:] or ["-"]:
- if filename == "-":
- copy(sys.stdin, sys.stdout)
- else:
- if os.path.exists(filename):
- with open(filename, 'rb') as file:
- copy(file, sys.stdout)
- else:
- sys.stderr.write('cat.py: {0}: No such file or directory\n'.format(filename))
diff --git a/build/scripts/cgo1_wrapper.py b/build/scripts/cgo1_wrapper.py
deleted file mode 100644
index 986082f7e9..0000000000
--- a/build/scripts/cgo1_wrapper.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import argparse
-import shutil
-import subprocess
-import sys
-
-
-CGO1_SUFFIX='.cgo1.go'
-
-
-def call(cmd, cwd, env=None):
- # sys.stderr.write('{}\n'.format(' '.join(cmd)))
- return subprocess.call(cmd, stdin=None, stderr=sys.stderr, stdout=sys.stdout, cwd=cwd, env=env)
-
-
-def process_file(source_root, source_prefix, build_root, build_prefix, src_path, comment_prefix):
- dst_path = '{}.tmp'.format(src_path)
- with open(src_path, 'r') as src_file, open(dst_path, 'w') as dst_file:
- for line in src_file:
- if line.startswith(comment_prefix):
- dst_file.write(line.replace(source_root, source_prefix).replace(build_root, build_prefix))
- else:
- dst_file.write(line)
- shutil.move(dst_path, src_path)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument('--build-prefix', default='__ARCADIA_BUILD_ROOT_PREFIX__')
- parser.add_argument('--build-root', required=True)
- parser.add_argument('--cgo1-files', nargs='+', required=True)
- parser.add_argument('--cgo2-files', nargs='+', required=True)
- parser.add_argument('--source-prefix', default='__ARCADIA_SOURCE_ROOT_PREFIX__')
- parser.add_argument('--source-root', required=True)
- parser.add_argument('cgo1_cmd', nargs='*')
- args = parser.parse_args()
-
- exit_code = call(args.cgo1_cmd, args.source_root)
- if exit_code != 0:
- sys.exit(exit_code)
-
- for src_path in args.cgo1_files:
- process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '//')
-
- for src_path in args.cgo2_files:
- process_file(args.source_root, args.source_prefix, args.build_root, args.build_prefix, src_path, '#line')
diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py
deleted file mode 100644
index 982e9015cb..0000000000
--- a/build/scripts/clang_tidy.py
+++ /dev/null
@@ -1,172 +0,0 @@
-import argparse
-import json
-import os
-import re
-import shutil
-import sys
-
-import subprocess
-
-import yaml
-
-
-def setup_script(args):
- global tidy_config_validation
- sys.path.append(os.path.dirname(args.config_validation_script))
- import tidy_config_validation
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument("--testing-src", required=True)
- parser.add_argument("--clang-tidy-bin", required=True)
- parser.add_argument("--config-validation-script", required=True)
- parser.add_argument("--ymake-python", required=True)
- parser.add_argument("--tidy-json", required=True)
- parser.add_argument("--source-root", required=True)
- parser.add_argument("--build-root", required=True)
- parser.add_argument("--default-config-file", required=True)
- parser.add_argument("--project-config-file", required=True)
- parser.add_argument("--export-fixes", required=True)
- parser.add_argument("--checks", required=False, default="")
- parser.add_argument("--header-filter", required=False, default=None)
- return parser.parse_known_args()
-
-
-def generate_compilation_database(clang_cmd, source_root, filename, path):
- compile_database = [
- {
- "file": filename,
- "command": subprocess.list2cmdline(clang_cmd),
- "directory": source_root,
- }
- ]
- compilation_database_json = os.path.join(path, "compile_commands.json")
- with open(compilation_database_json, "w") as afile:
- json.dump(compile_database, afile)
- return compilation_database_json
-
-
-def load_profile(path):
- if os.path.exists(path):
- files = os.listdir(path)
- if len(files) == 1:
- with open(os.path.join(path, files[0])) as afile:
- return json.load(afile)["profile"]
- elif len(files) > 1:
- return {
- "error": "found several profile files: {}".format(files),
- }
- return {
- "error": "profile file is missing",
- }
-
-
-def load_fixes(path):
- if os.path.exists(path):
- with open(path, 'r') as afile:
- return afile.read()
- else:
- return ""
-
-
-def is_generated(testing_src, build_root):
- return testing_src.startswith(build_root)
-
-
-def generate_outputs(output_json):
- output_obj = os.path.splitext(output_json)[0] + ".o"
- open(output_obj, "w").close()
- open(output_json, "w").close()
-
-
-def filter_configs(result_config, filtered_config):
- with open(result_config, 'r') as afile:
- input_config = yaml.safe_load(afile)
- result_config = tidy_config_validation.filter_config(input_config)
- with open(filtered_config, 'w') as afile:
- yaml.safe_dump(result_config, afile)
-
-
-def main():
- args, clang_cmd = parse_args()
- setup_script(args)
- clang_tidy_bin = args.clang_tidy_bin
- output_json = args.tidy_json
- generate_outputs(output_json)
- if is_generated(args.testing_src, args.build_root):
- return
- if args.header_filter is None:
- # .pb.h files will be excluded because they are not in source_root
- header_filter = r"^" + re.escape(os.path.dirname(args.testing_src)) + r".*"
- else:
- header_filter = r"^(" + args.header_filter + r").*"
-
- def ensure_clean_dir(path):
- path = os.path.join(args.build_root, path)
- if os.path.exists(path):
- shutil.rmtree(path)
- os.makedirs(path)
- return path
-
- profile_tmpdir = ensure_clean_dir("profile_tmpdir")
- db_tmpdir = ensure_clean_dir("db_tmpdir")
- fixes_file = "fixes.txt"
- config_dir = ensure_clean_dir("config_dir")
- result_config_file = args.default_config_file
- if args.project_config_file != args.default_config_file:
- result_config = os.path.join(config_dir, "result_tidy_config.yaml")
- filtered_config = os.path.join(config_dir, "filtered_tidy_config.yaml")
- filter_configs(args.project_config_file, filtered_config)
- result_config_file = tidy_config_validation.merge_tidy_configs(
- base_config_path=args.default_config_file,
- additional_config_path=filtered_config,
- result_config_path=result_config,
- )
- compile_command_path = generate_compilation_database(clang_cmd, args.source_root, args.testing_src, db_tmpdir)
-
- cmd = [
- clang_tidy_bin,
- args.testing_src,
- "-p",
- compile_command_path,
- "--warnings-as-errors",
- "*",
- "--config-file",
- result_config_file,
- "--header-filter",
- header_filter,
- "--use-color",
- "--enable-check-profile",
- "--store-check-profile={}".format(profile_tmpdir),
- ]
- if args.export_fixes == "yes":
- cmd += ["--export-fixes", fixes_file]
-
- if args.checks:
- cmd += ["--checks", args.checks]
-
- print("cmd: {}".format(' '.join(cmd)))
- res = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = res.communicate()
- out = out.replace(args.source_root, "$(SOURCE_ROOT)")
- profile = load_profile(profile_tmpdir)
- testing_src = os.path.relpath(args.testing_src, args.source_root)
- tidy_fixes = load_fixes(fixes_file)
-
- with open(output_json, "wb") as afile:
- json.dump(
- {
- "file": testing_src,
- "exit_code": res.returncode,
- "profile": profile,
- "stderr": err,
- "stdout": out,
- "fixes": tidy_fixes,
- },
- afile,
- )
-
-
-if __name__ == "__main__":
- main()
diff --git a/build/scripts/clang_tidy_arch.py b/build/scripts/clang_tidy_arch.py
deleted file mode 100644
index 7caf623a3d..0000000000
--- a/build/scripts/clang_tidy_arch.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-import argparse
-import json
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument("--output-file")
- parser.add_argument("--build-root")
- parser.add_argument("--source-root")
- return parser.parse_known_args()
-
-
-def main():
- args, unknown_args = parse_args()
- inputs = unknown_args
- result_json = {}
- for inp in inputs:
- if os.path.exists(inp) and inp.endswith("tidyjson"):
- with open(inp, 'r') as afile:
- file_content = afile.read().strip()
- if not file_content:
- continue
- errors = json.loads(file_content)
- testing_src = errors["file"]
- result_json[testing_src] = errors
-
- with open(args.output_file, 'w') as afile:
- json.dump(result_json, afile, indent=4) # TODO remove indent
-
-
-if __name__ == "__main__":
- main()
diff --git a/build/scripts/collect_java_srcs.py b/build/scripts/collect_java_srcs.py
deleted file mode 100644
index 170002520a..0000000000
--- a/build/scripts/collect_java_srcs.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import os
-import sys
-import contextlib
-import tarfile
-import zipfile
-
-
-if __name__ == '__main__':
- build_root = sys.argv[1]
- root = os.path.normpath(sys.argv[2])
- dest = os.path.normpath(sys.argv[3])
- srcs = sys.argv[4:]
-
- for src in srcs:
- src = os.path.normpath(src)
- if src.endswith('.java') or src.endswith('.kt'):
- src_rel_path = os.path.relpath(src, root)
-
- if os.path.join(root, src_rel_path) == src:
- # Inside root
- dst = os.path.join(dest, src_rel_path)
-
- else:
- # Outside root
- print>>sys.stderr, 'External src file "{}" is outside of srcdir {}, ignore'.format(
- os.path.relpath(src, build_root),
- os.path.relpath(root, build_root),
- )
- continue
-
- if os.path.exists(dst):
- print>>sys.stderr, 'Duplicate external src file {}, choice is undefined'.format(
- os.path.relpath(dst, root)
- )
-
- else:
- destdir = os.path.dirname(dst)
- if destdir and not os.path.exists(destdir):
- os.makedirs(destdir)
- os.rename(src, dst)
-
- elif src.endswith('.jsr'):
- with contextlib.closing(tarfile.open(src, 'r')) as tf:
- tf.extractall(dst)
-
- elif src.endswith('-sources.jar'):
- with zipfile.ZipFile(src) as zf:
- zf.extractall(dst)
-
- else:
- print>>sys.stderr, 'Unrecognized file type', os.path.relpath(src, build_root)
diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py
deleted file mode 100644
index 9d89340344..0000000000
--- a/build/scripts/compile_cuda.py
+++ /dev/null
@@ -1,159 +0,0 @@
-import sys
-import subprocess
-import os
-import collections
-import re
-import tempfile
-
-
-def is_clang(command):
- for word in command:
- if '--compiler-bindir' in word and 'clang' in word:
- return True
-
- return False
-
-
-def main():
- try:
- sys.argv.remove('--y_skip_nocxxinc')
- skip_nocxxinc = True
- except ValueError:
- skip_nocxxinc = False
-
- spl = sys.argv.index('--cflags')
- mtime0 = sys.argv[1]
- command = sys.argv[2: spl]
- cflags = sys.argv[spl + 1:]
-
- dump_args = False
- if '--y_dump_args' in command:
- command.remove('--y_dump_args')
- dump_args = True
-
- executable = command[0]
- if not os.path.exists(executable):
- print >> sys.stderr, '{} not found'.format(executable)
- sys.exit(1)
-
- if is_clang(command):
- # nvcc concatenates the sources for clang, and clang reports unused
- # things from .h files as if they they were defined in a .cpp file.
- cflags += ['-Wno-unused-function', '-Wno-unused-parameter']
-
- if not is_clang(command) and '-fopenmp=libomp' in cflags:
- cflags.append('-fopenmp')
- cflags.remove('-fopenmp=libomp')
-
- skip_list = [
- '-gline-tables-only',
- # clang coverage
- '-fprofile-instr-generate',
- '-fcoverage-mapping',
- '/Zc:inline', # disable unreferenced functions (kernel registrators) remove
- '-Wno-c++17-extensions',
- '-flto',
- '-faligned-allocation',
- '-fsized-deallocation'
- ]
-
- if skip_nocxxinc:
- skip_list.append('-nostdinc++')
-
- for flag in skip_list:
- if flag in cflags:
- cflags.remove(flag)
-
- skip_prefix_list = [
- '-fsanitize=',
- '-fsanitize-coverage=',
- '-fsanitize-blacklist=',
- '--system-header-prefix',
- ]
- new_cflags = []
- for flag in cflags:
- if all(not flag.startswith(skip_prefix) for skip_prefix in skip_prefix_list):
- if flag.startswith('-fopenmp-version='):
- new_cflags.append('-fopenmp-version=45') # Clang 11 only supports OpenMP 4.5, but the default is 5.0, so we need to forcefully redefine it.
- else:
- new_cflags.append(flag)
- cflags = new_cflags
-
- if not is_clang(command):
- def good(arg):
- if arg.startswith('--target='):
- return False
- if arg in ('-Wno-exceptions',
- '-Wno-inconsistent-missing-override'):
- return False
- return True
- cflags = filter(good, cflags)
-
- cpp_args = []
- compiler_args = []
-
- # NVCC requires particular MSVC versions which may differ from the version
- # used to compile regular C++ code. We have a separate MSVC in Arcadia for
- # the CUDA builds and pass it's root in $Y_VC_Root.
- # The separate MSVC for CUDA may absent in Yandex Open Source builds.
- vc_root = os.environ.get('Y_VC_Root')
-
- cflags_queue = collections.deque(cflags)
- while cflags_queue:
-
- arg = cflags_queue.popleft()
- if arg == '-mllvm':
- compiler_args.append(arg)
- compiler_args.append(cflags_queue.popleft())
- continue
- if arg[:2].upper() in ('-I', '/I', '-B'):
- value = arg[2:]
- if not value:
- value = cflags_queue.popleft()
- if arg[1] == 'I':
- cpp_args.append('-I{}'.format(value))
- elif arg[1] == 'B': # todo: delete "B" flag check when cuda stop to use gcc
- pass
- continue
-
- match = re.match(r'[-/]D(.*)', arg)
- if match:
- define = match.group(1)
- # We have C++ flags configured for the regular C++ build.
- # There is Y_MSVC_INCLUDE define with a path to the VC header files.
- # We need to change the path accordingly when using a separate MSVC for CUDA.
- if vc_root and define.startswith('Y_MSVC_INCLUDE'):
- define = os.path.expandvars('Y_MSVC_INCLUDE={}/include'.format(vc_root))
- cpp_args.append('-D' + define.replace('\\', '/'))
- continue
-
- compiler_args.append(arg)
-
- command += cpp_args
- if compiler_args:
- command += ['--compiler-options', ','.join(compiler_args)]
-
- # --keep is necessary to prevent nvcc from embedding nvcc pid in generated
- # symbols. It makes nvcc use the original file name as the prefix in the
- # generated files (otherwise it also prepends tmpxft_{pid}_00000000-5), and
- # cicc derives the module name from its {input}.cpp1.ii file name.
- command += ['--keep', '--keep-dir', tempfile.mkdtemp(prefix='compile_cuda.py.')]
- # nvcc generates symbols like __fatbinwrap_{len}_{basename}_{hash} where
- # {basename} is {input}.cpp1.ii with non-C chars translated to _, {len} is
- # {basename} length, and {hash} is the hash of first exported symbol in
- # {input}.cpp1.ii if there is one, otherwise it is based on its modification
- # time (converted to string in the local timezone) and the current working
- # directory. To stabilize the names of these symbols we need to fix mtime,
- # timezone, and cwd.
- os.environ['LD_PRELOAD'] = mtime0
- os.environ['TZ'] = 'UTC0' # POSIX fixed offset format.
- os.environ['TZDIR'] = '/var/empty' # Against counterfeit /usr/share/zoneinfo/$TZ.
-
- if dump_args:
- sys.stdout.write('\n'.join(command))
- else:
- sys.exit(subprocess.Popen(command, stdout=sys.stderr, stderr=sys.stderr, cwd='/').wait())
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/compile_jsrc.py b/build/scripts/compile_jsrc.py
deleted file mode 100644
index 8760e5eee9..0000000000
--- a/build/scripts/compile_jsrc.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import argparse
-import os
-import tarfile
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--input', nargs='*', required=True)
- parser.add_argument('--output', required=True)
- parser.add_argument('--prefix', required=True)
-
- return parser.parse_args()
-
-
-def main():
- args = parse_args()
-
- with tarfile.open(args.output, 'w') as out:
- for f in args.input:
- out.add(f, arcname=os.path.relpath(f, args.prefix))
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/compile_pysrc.py b/build/scripts/compile_pysrc.py
deleted file mode 100644
index e3637e18e2..0000000000
--- a/build/scripts/compile_pysrc.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import argparse
-import os
-import shutil
-import subprocess
-import tarfile
-
-
-LIMIT = 6000
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--input', required=True)
- parser.add_argument('--output', required=True)
- parser.add_argument('--rescompiler', required=True)
- subparsers = parser.add_subparsers(dest='mode')
-
- parser_py2 = subparsers.add_parser('py2')
- parser_py2.add_argument('--py_compile', required=True)
- parser_py2.add_argument('--python', required=True)
-
- parser_py3 = subparsers.add_parser('py3')
- parser_py3.add_argument('--pycc', required=True)
-
- return parser.parse_args()
-
-
-def call(cmd, cwd=None, env=None):
- return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env)
-
-
-def iterate_py2_resource_params(py_files):
- for py in py_files:
- mod = py[:-3].replace('/', '.')
- key = '/py_modules/{}'.format(mod)
- yield py, key
- yield '-', 'resfs/src/{}={}'.format(key, py)
- yield '{}.yapyc'.format(py), '/py_code/{}'.format(mod)
-
-
-def iterate_py3_resource_params(py_files):
- for py in py_files:
- for ext in ('', '.yapyc3'):
- path = '{}{}'.format(py, ext)
- dest = 'py/{}'.format(path)
- key = 'resfs/file/{}'.format(dest)
- src = 'resfs/src/{}={}'.format(key, os.path.basename(path))
- yield '-', src
- yield path, key
-
-
-def main():
- args = parse_args()
-
- names = []
- with tarfile.open(args.input, 'r') as tar:
- names = tar.getnames()
- tar.extractall()
-
- if args.mode == 'py3':
- pycc_cmd = [args.pycc]
- pycc_ext = '.yapyc3'
- iterate_resource_params = iterate_py3_resource_params
- else:
- pycc_cmd = [args.python, args.py_compile]
- pycc_ext = '.yapyc'
- iterate_resource_params = iterate_py2_resource_params
-
- py_files = sorted(names)
-
- for py in py_files:
- cmd = pycc_cmd + ['{}-'.format(os.path.basename(py)), py, '{}{}'.format(py, pycc_ext)]
- call(cmd)
-
- outputs = []
- cmd = [args.rescompiler, '{}.0'.format(args.output)]
- size = 0
- for path, key in iterate_resource_params(py_files):
- addendum = len(path) + len(key)
- if size + addendum > LIMIT and len(cmd) > 2:
- call(cmd)
- outputs.append(cmd[1])
- cmd[1] = '{}.{}'.format(args.output, len(outputs))
- cmd = cmd[0:2]
- size = 0
- cmd.extend([path, key])
- size += addendum
- if len(outputs) == 0:
- cmd[1] = args.output
- call(cmd)
- else:
- call(cmd)
- outputs.append(cmd[1])
- with open(args.output, 'w') as fout:
- for fname in outputs:
- with open(fname, 'r') as fin:
- shutil.copyfileobj(fin, fout)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/copy_docs_files.py b/build/scripts/copy_docs_files.py
deleted file mode 100644
index 8c6c064a03..0000000000
--- a/build/scripts/copy_docs_files.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import argparse
-import errno
-import os
-import process_command_files as pcf
-import shutil
-import sys
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--build-root', required=True)
- parser.add_argument('--dst-dir', required=True)
- parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite')
- parser.add_argument('--source-root', required=True)
- parser.add_argument('--src-dir', required=None)
- parser.add_argument('files', nargs='*')
- return parser.parse_args(pcf.get_args(sys.argv[1:]))
-
-
-def makedirs(dirname):
- try:
- os.makedirs(dirname)
- except OSError as e:
- if e.errno == errno.EEXIST and os.path.isdir(dirname):
- pass
- else:
- raise
-
-
-def copy_file(src, dst, overwrite=False, orig_path=None, generated=False):
- if os.path.exists(dst) and not overwrite:
- return
-
- makedirs(os.path.dirname(dst))
-
- with open(src, 'r') as fsrc, open(dst, 'w') as fdst:
- if (orig_path or generated) and src.endswith('.md'):
- fdst.write('---\n{}\n\n---\n'.format('generated: true' if generated else 'vcsPath: {}'.format(orig_path)))
- shutil.copyfileobj(fsrc, fdst)
-
-
-def main():
- args = parse_args()
-
- source_root = os.path.normpath(args.source_root) + os.path.sep
- build_root = os.path.normpath(args.build_root) + os.path.sep
-
- dst_dir = os.path.normpath(args.dst_dir)
- assert dst_dir.startswith(build_root)
- makedirs(dst_dir)
-
- src_dir = os.path.normpath(args.src_dir) + os.path.sep
- assert src_dir.startswith(source_root)
-
- if src_dir.startswith(source_root):
- root = source_root
- is_from_source_root = True
- elif src_dir.startswith(build_root):
- root = build_root
- is_from_source_root = False
- else:
- assert False, 'src_dir [{}] should start with [{}] or [{}]'.format(src_dir, source_root, build_root)
-
- is_overwrite_existing = args.existing == 'overwrite'
-
- for f in [os.path.normpath(f) for f in args.files]:
- src_file = os.path.join(src_dir, f)
- dst_file = os.path.join(dst_dir, f)
- if src_file == dst_file:
- continue
- rel_path = src_file[len(root):] if is_from_source_root else None
- copy_file(src_file, dst_file, overwrite=is_overwrite_existing, orig_path=rel_path)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/copy_files_to_dir.py b/build/scripts/copy_files_to_dir.py
deleted file mode 100644
index ead57ba16e..0000000000
--- a/build/scripts/copy_files_to_dir.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import argparse
-import errno
-import os
-import process_command_files as pcf
-import shutil
-import sys
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--dest-dir', required=True)
- parser.add_argument('--existing', choices=('skip', 'overwrite'), default='overwrite')
- parser.add_argument('--flat', action='store_true')
- parser.add_argument('--skip-prefix', dest='skip_prefixes', action='append', default=[])
- parser.add_argument('files', nargs='*')
- return parser.parse_args(pcf.get_args(sys.argv[1:]))
-
-
-def makedirs(dirname):
- try:
- os.makedirs(dirname)
- except OSError as e:
- if e.errno == errno.EEXIST and os.path.isdir(dirname):
- pass
- else:
- raise
-
-
-def main():
- args = parse_args()
-
- dest_dir = os.path.normpath(args.dest_dir) + os.pathsep
- makedirs(dest_dir)
-
- prefixes = ['{}{}'.format(os.path.normpath(p), os.path.sep) for p in args.skip_prefixes]
-
- for src in args.files:
- src = os.path.normpath(src)
- assert os.path.isfile(src)
- if args.flat:
- rel_dst = os.path.basename(src)
- else:
- rel_dst = src
- for prefix in prefixes:
- if src.startswith(prefix):
- rel_dst = src[len(prefix):]
- break
- assert not os.path.isabs(rel_dst)
- dst = os.path.join(args.dest_dir, rel_dst)
- if os.path.isfile(dst) and args.existing == 'skip':
- break
-
- makedirs(os.path.dirname(dst))
-
- shutil.copyfile(src, dst)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py
deleted file mode 100644
index 9baeb5ffac..0000000000
--- a/build/scripts/copy_to_dir.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import errno
-import sys
-import os
-import shutil
-import optparse
-import tarfile
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.add_option('--build-root')
- parser.add_option('--dest-dir')
- parser.add_option('--dest-arch')
- return parser.parse_args()
-
-
-def ensure_dir_exists(path):
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno == errno.EEXIST and os.path.isdir(path):
- pass
- else:
- raise
-
-
-def hardlink_or_copy(src, dst):
- if os.name == 'nt':
- shutil.copy(src, dst)
- else:
- try:
- os.link(src, dst)
- except OSError as e:
- if e.errno == errno.EEXIST:
- return
- elif e.errno == errno.EXDEV:
- sys.stderr.write("Can't make cross-device hardlink - fallback to copy: {} -> {}\n".format(src, dst))
- shutil.copy(src, dst)
- else:
- raise
-
-
-def main():
- opts, args = parse_args()
- assert opts.build_root
- assert opts.dest_dir
-
- dest_arch = None
- if opts.dest_arch:
- if opts.dest_arch.endswith('.tar'):
- dest_arch = tarfile.open(opts.dest_arch, 'w', dereference=True)
- elif opts.dest_arch.endswith('.tar.gz') or opts.dest_arch.endswith('.tgz'):
- dest_arch = tarfile.open(opts.dest_arch, 'w:gz', dereference=True)
- else:
- # TODO: move check to graph generation stage
- raise Exception('Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format(os.path.basename(opts.dest_arch)))
-
- for arg in args:
- dst = arg
- if dst.startswith(opts.build_root):
- dst = dst[len(opts.build_root) + 1:]
-
- if dest_arch and not arg.endswith('.pkg.fake'):
- dest_arch.add(arg, arcname=dst)
-
- dst = os.path.join(opts.dest_dir, dst)
- ensure_dir_exists(os.path.dirname(dst))
- hardlink_or_copy(arg, dst)
-
- if dest_arch:
- dest_arch.close()
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py
deleted file mode 100644
index 94491d9256..0000000000
--- a/build/scripts/coverage-info.py
+++ /dev/null
@@ -1,282 +0,0 @@
-import argparse
-import os
-import sys
-import tarfile
-import collections
-import subprocess
-import re
-
-
-GCDA_EXT = '.gcda'
-GCNO_EXT = '.gcno'
-
-
-def suffixes(path):
- """
- >>> list(suffixes('/a/b/c'))
- ['c', 'b/c', '/a/b/c']
- >>> list(suffixes('/a/b/c/'))
- ['c', 'b/c', '/a/b/c']
- >>> list(suffixes('/a'))
- ['/a']
- >>> list(suffixes('/a/'))
- ['/a']
- >>> list(suffixes('/'))
- []
- """
- path = os.path.normpath(path)
-
- def up_dirs(cur_path):
- while os.path.dirname(cur_path) != cur_path:
- cur_path = os.path.dirname(cur_path)
- yield cur_path
-
- for x in up_dirs(path):
- yield path.replace(x + os.path.sep, '')
-
-
-def recast(in_file, out_file, probe_path, update_stat):
- PREFIX = 'SF:'
-
- probed_path = None
-
- any_payload = False
-
- with open(in_file, 'r') as input, open(out_file, 'w') as output:
- active = True
- for line in input:
- line = line.rstrip('\n')
- if line.startswith('TN:'):
- output.write(line + '\n')
- elif line.startswith(PREFIX):
- path = line[len(PREFIX):]
- probed_path = probe_path(path)
- if probed_path:
- output.write(PREFIX + probed_path + '\n')
- active = bool(probed_path)
- else:
- if active:
- update_stat(probed_path, line)
- output.write(line + '\n')
- any_payload = True
-
- return any_payload
-
-
-def print_stat(da, fnda, teamcity_stat_output):
- lines_hit = sum(map(bool, da.values()))
- lines_total = len(da.values())
- lines_coverage = 100.0 * lines_hit / lines_total if lines_total else 0
-
- func_hit = sum(map(bool, fnda.values()))
- func_total = len(fnda.values())
- func_coverage = 100.0 * func_hit / func_total if func_total else 0
-
- print >>sys.stderr, '[[imp]]Lines[[rst]] {: >16} {: >16} {: >16.1f}%'.format(lines_hit, lines_total, lines_coverage)
- print >>sys.stderr, '[[imp]]Functions[[rst]] {: >16} {: >16} {: >16.1f}%'.format(func_hit, func_total, func_coverage)
-
- if teamcity_stat_output:
- with open(teamcity_stat_output, 'w') as tc_file:
- tc_file.write("##teamcity[blockOpened name='Code Coverage Summary']\n")
- tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLTotal\' value='{}']\n".format(lines_total))
- tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLCovered\' value='{}']\n".format(lines_hit))
- tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMTotal\' value='{}']\n".format(func_total))
- tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMCovered\' value='{}']\n".format(func_hit))
- tc_file.write("##teamcity[blockClosed name='Code Coverage Summary']\n")
-
-
-def chunks(l, n):
- """
- >>> list(chunks(range(10), 3))
- [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
- >>> list(chunks(range(10), 5))
- [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]
- """
- for i in xrange(0, len(l), n):
- yield l[i:i + n]
-
-
-def combine_info_files(lcov, files, out_file):
- chunk_size = 50
- files = list(set(files))
-
- for chunk in chunks(files, chunk_size):
- combine_cmd = [lcov]
- if os.path.exists(out_file):
- chunk.append(out_file)
- for trace in chunk:
- assert os.path.exists(trace), "Trace file does not exist: {} (cwd={})".format(trace, os.getcwd())
- combine_cmd += ["-a", os.path.abspath(trace)]
- print >>sys.stderr, '## lcov', ' '.join(combine_cmd[1:])
- out_file_tmp = "combined.tmp"
- with open(out_file_tmp, "w") as stdout:
- subprocess.check_call(combine_cmd, stdout=stdout)
- if os.path.exists(out_file):
- os.remove(out_file)
- os.rename(out_file_tmp, out_file)
-
-
-def probe_path_global(path, source_root, prefix_filter, exclude_files):
- if path.endswith('_ut.cpp'):
- return None
-
- for suff in reversed(list(suffixes(path))):
- if (not prefix_filter or suff.startswith(prefix_filter)) and (not exclude_files or not exclude_files.match(suff)):
- full_path = source_root + os.sep + suff
- if os.path.isfile(full_path):
- return full_path
-
- return None
-
-
-def update_stat_global(src_file, line, fnda, da):
- if line.startswith("FNDA:"):
- visits, func_name = line[len("FNDA:"):].split(',')
- fnda[src_file + func_name] += int(visits)
-
- if line.startswith("DA"):
- line_number, visits = line[len("DA:"):].split(',')
- if visits == '=====':
- visits = 0
-
- da[src_file + line_number] += int(visits)
-
-
-def gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args):
- print >>sys.stderr, '## geninfo', ' '.join(cmd)
- subprocess.check_call(cmd)
- if recast(cov_info + '.tmp', cov_info, probe_path, update_stat):
- lcov_args.append(cov_info)
-
-
-def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files):
- with tarfile.open(gcno_archive) as gcno_tf:
- for gcno_item in gcno_tf:
- if gcno_item.isfile() and gcno_item.name.endswith(GCNO_EXT):
- gcno_tf.extract(gcno_item)
-
- gcno_name = gcno_item.name
- source_fname = gcno_name[:-len(GCNO_EXT)]
- if prefix_filter and not source_fname.startswith(prefix_filter):
- sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter))
- continue
- if exclude_files and exclude_files.search(source_fname):
- sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern))
- continue
-
- fname2gcno[source_fname] = gcno_name
-
- if os.path.getsize(gcno_name) > 0:
- coverage_info = source_fname + '.' + str(len(fname2info[source_fname])) + '.info'
- fname2info[source_fname].append(coverage_info)
- geninfo_cmd = [
- geninfo_executable,
- '--gcov-tool', gcov_tool,
- '-i', gcno_name,
- '-o', coverage_info + '.tmp'
- ]
- gen_info(geninfo_cmd, coverage_info)
-
-
-def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info):
- with tarfile.open(gcda_archive) as gcda_tf:
- for gcda_item in gcda_tf:
- if gcda_item.isfile() and gcda_item.name.endswith(GCDA_EXT):
- gcda_name = gcda_item.name
- source_fname = gcda_name[:-len(GCDA_EXT)]
- for suff in suffixes(source_fname):
- if suff in fname2gcno:
- gcda_new_name = suff + GCDA_EXT
- gcda_item.name = gcda_new_name
- gcda_tf.extract(gcda_item)
- if os.path.getsize(gcda_new_name) > 0:
- coverage_info = suff + '.' + str(len(fname2info[suff])) + '.info'
- fname2info[suff].append(coverage_info)
- geninfo_cmd = [
- geninfo_executable,
- '--gcov-tool', gcov_tool,
- gcda_new_name,
- '-o', coverage_info + '.tmp'
- ]
- gen_info(geninfo_cmd, coverage_info)
-
-
-def gen_cobertura(tool, output, combined_info):
- cmd = [
- tool,
- combined_info,
- '-b', '#hamster#',
- '-o', output
- ]
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- if p.returncode:
- raise Exception('lcov_cobertura failed with exit code {}\nstdout: {}\nstderr: {}'.format(p.returncode, out, err))
-
-
-def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filter, exclude_regexp, teamcity_stat_output, coverage_report_path, gcov_report, lcov_cobertura):
- exclude_files = re.compile(exclude_regexp) if exclude_regexp else None
-
- fname2gcno = {}
- fname2info = collections.defaultdict(list)
- lcov_args = []
- geninfo_executable = os.path.join(source_root, 'devtools', 'lcov', 'geninfo')
-
- def probe_path(path):
- return probe_path_global(path, source_root, prefix_filter, exclude_files)
-
- fnda = collections.defaultdict(int)
- da = collections.defaultdict(int)
-
- def update_stat(src_file, line):
- update_stat_global(src_file, line, da, fnda)
-
- def gen_info(cmd, cov_info):
- gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args)
-
- init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files)
- process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info)
-
- if coverage_report_path:
- output_dir = coverage_report_path
- else:
- output_dir = output + '.dir'
-
- if not os.path.exists(output_dir):
- os.makedirs(output_dir)
-
- teamcity_stat_file = None
- if teamcity_stat_output:
- teamcity_stat_file = os.path.join(output_dir, 'teamcity.out')
- print_stat(da, fnda, teamcity_stat_file)
-
- if lcov_args:
- output_trace = "combined.info"
- combine_info_files(os.path.join(source_root, 'devtools', 'lcov', 'lcov'), lcov_args, output_trace)
- cmd = [os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), '-p', source_root, '--ignore-errors', 'source', '-o', output_dir, output_trace]
- print >>sys.stderr, '## genhtml', ' '.join(cmd)
- subprocess.check_call(cmd)
- if lcov_cobertura:
- gen_cobertura(lcov_cobertura, gcov_report, output_trace)
-
- with tarfile.open(output, 'w') as tar:
- tar.add(output_dir, arcname='.')
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
-
- parser.add_argument('--source-root', action='store')
- parser.add_argument('--output', action='store')
- parser.add_argument('--gcno-archive', action='store')
- parser.add_argument('--gcda-archive', action='store')
- parser.add_argument('--gcov-tool', action='store')
- parser.add_argument('--prefix-filter', action='store')
- parser.add_argument('--exclude-regexp', action='store')
- parser.add_argument('--teamcity-stat-output', action='store_const', const=True)
- parser.add_argument('--coverage-report-path', action='store')
- parser.add_argument('--gcov-report', action='store')
- parser.add_argument('--lcov-cobertura', action='store')
-
- args = parser.parse_args()
- main(**vars(args))
diff --git a/build/scripts/cpp_flatc_wrapper.py b/build/scripts/cpp_flatc_wrapper.py
deleted file mode 100644
index 78a20e0280..0000000000
--- a/build/scripts/cpp_flatc_wrapper.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-import subprocess
-import sys
-
-
-def main():
- cmd = sys.argv[1:]
- h_file = None
- try:
- index = cmd.index('-o')
- h_file = cmd[index+1]
- cmd[index+1] = os.path.dirname(h_file)
- except (ValueError, IndexError):
- pass
- p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- if p.returncode:
- if out:
- sys.stderr.write('stdout:\n{}\n'.format(out))
- if err:
- sys.stderr.write('stderr:\n{}\n'.format(err))
- sys.exit(p.returncode)
- if h_file and h_file.endswith(('.fbs.h', '.fbs64.h')):
- cpp_file = '{}.cpp'.format(h_file[:-2])
- with open(cpp_file, 'w') as f:
- f.write('#include "{}"\n'.format(os.path.basename(h_file)))
- sys.exit(0)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py
deleted file mode 100644
index 45083ff4f7..0000000000
--- a/build/scripts/create_jcoverage_report.py
+++ /dev/null
@@ -1,112 +0,0 @@
-import argparse
-import tarfile
-import zipfile
-import os
-import sys
-import time
-import subprocess
-
-
-def mkdir_p(path):
- try:
- os.makedirs(path)
- except OSError:
- pass
-
-
-class Timer(object):
-
- def __init__(self):
- self.start = time.time()
-
- def step(self, msg):
- sys.stderr.write("{} ({}s)\n".format(msg, int(time.time() - self.start)))
- self.start = time.time()
-
-
-def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths):
- timer = Timer()
- reports_dir = 'jacoco_reports_dir'
- mkdir_p(reports_dir)
- with tarfile.open(source) as tf:
- tf.extractall(reports_dir)
- timer.step("Coverage data extracted")
- reports = [os.path.join(reports_dir, fname) for fname in os.listdir(reports_dir)]
-
- with open(jars_list) as f:
- jars = f.read().strip().split()
- if jars and runners_paths:
- for r in runners_paths:
- try:
- jars.remove(r)
- except ValueError:
- pass
-
- src_dir = 'sources_dir'
- cls_dir = 'classes_dir'
-
- mkdir_p(src_dir)
- mkdir_p(cls_dir)
-
- for jar in jars:
- if jar.endswith('devtools-jacoco-agent.jar'):
- agent_disposition = jar
-
- # Skip java contrib - it's irrelevant coverage
- if jar.startswith('contrib/java'):
- continue
-
- with zipfile.ZipFile(jar) as jf:
- for entry in jf.infolist():
- if entry.filename.endswith('.java'):
- dest = src_dir
-
- elif entry.filename.endswith('.class'):
- dest = cls_dir
-
- else:
- continue
-
- entry.filename = entry.filename.encode('utf-8')
- jf.extract(entry, dest)
- timer.step("Jar files extracted")
-
- if not agent_disposition:
- print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.'
-
- if tar_output:
- report_dir = 'java.report.temp'
- else:
- report_dir = output
- mkdir_p(report_dir)
-
- if agent_disposition:
- agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format]
- agent_cmd += reports
- subprocess.check_call(agent_cmd)
- timer.step("Jacoco finished")
-
- if tar_output:
- with tarfile.open(output, 'w') as outf:
- outf.add(report_dir, arcname='.')
-
-
-if __name__ == '__main__':
- if 'LC_ALL' in os.environ:
- if os.environ['LC_ALL'] == 'C':
- os.environ['LC_ALL'] = 'en_GB.UTF-8'
-
- parser = argparse.ArgumentParser()
-
- parser.add_argument('--source', action='store')
- parser.add_argument('--output', action='store')
- parser.add_argument('--java', action='store')
- parser.add_argument('--prefix-filter', action='store')
- parser.add_argument('--exclude-filter', action='store')
- parser.add_argument('--jars-list', action='store')
- parser.add_argument('--output-format', action='store', default="html")
- parser.add_argument('--raw-output', dest='tar_output', action='store_false', default=True)
- parser.add_argument('--agent-disposition', action='store')
- parser.add_argument('--runner-path', dest='runners_paths', action='append', default=[])
- args = parser.parse_args()
- main(**vars(args))
diff --git a/build/scripts/custom_link_green_mysql.py b/build/scripts/custom_link_green_mysql.py
deleted file mode 100644
index 13bb9e4ac7..0000000000
--- a/build/scripts/custom_link_green_mysql.py
+++ /dev/null
@@ -1,97 +0,0 @@
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-
-SYMBOLS_TO_PATCH = (
- 'connect',
- 'poll',
- 'recv',
- 'recvfrom',
- 'send',
- 'sendto',
-)
-
-class Error(Exception):
- pass
-
-
-def find_compiler(args):
- for arg in args:
- if os.path.basename(arg) in ('clang', 'clang++'):
- return arg
- raise Error('No known compiler found in the command line')
-
-
-def find_libraries(project, args):
- if not project.endswith('/'):
- project = project + '/'
-
- for arg in args:
- if arg.startswith(project):
- yield arg
-
-
-def rename_symbol(symbol):
- return 'green_{}'.format(symbol)
-
-
-def patch_object(object_path, objcopy):
- args = [objcopy]
- for symbol in SYMBOLS_TO_PATCH:
- args.extend(('--redefine-sym', '{}={}'.format(symbol, rename_symbol(symbol))))
- args.append(object_path)
- subprocess.check_call(args)
-
-
-def patch_library(library_path, ar, objcopy):
- tmpdir = tempfile.mkdtemp(dir=os.path.dirname(library_path))
- try:
- subprocess.check_call((ar, 'x', library_path), cwd=tmpdir)
- names = os.listdir(tmpdir)
- for name in names:
- patch_object(os.path.join(tmpdir, name), objcopy=objcopy)
-
- new_library_path = os.path.join(tmpdir, 'library.a')
- subprocess.check_call([ar, 'rcs', new_library_path] + names, cwd=tmpdir)
-
- os.rename(new_library_path, library_path)
-
- finally:
- shutil.rmtree(tmpdir)
-
-
-def main():
- try:
- args = sys.argv[1:]
- compiler = find_compiler(args)
- compiler_dir = os.path.dirname(compiler)
-
- def get_tool(name):
- path = os.path.join(compiler_dir, name)
- if not os.path.exists(path):
- raise Error('No {} found alongside the compiler'.format(name))
- return path
-
- ar = get_tool('llvm-ar')
- objcopy = get_tool('llvm-objcopy')
-
- libraries = tuple(find_libraries('contrib/libs/libmysql_r', args))
- for library in libraries:
- library_path = os.path.abspath(library)
- if not os.path.exists(library_path):
- raise Error('No {} file exists'.format(library))
-
- patch_library(library_path, ar=ar, objcopy=objcopy)
-
- except Exception as error:
- name = os.path.basename(sys.argv[0])
- command = ' '.join(args)
- message = '{name} failed: {error}\nCommand line: {command}'
- print >> sys.stderr, message.format(**locals())
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/decimal_md5.py b/build/scripts/decimal_md5.py
deleted file mode 100644
index e70ca80a09..0000000000
--- a/build/scripts/decimal_md5.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import hashlib
-import struct
-import sys
-import os
-import argparse
-
-
-def print_code(checksum, func_name):
- if len(func_name) == 0: # safe fallback for old ya.make files
- func_name = "DecimalMD5"
- print 'const char* ' + func_name + '() {return "' + checksum + '";}'
-
-
-def ensure_paths_exist(paths):
- bad_paths = sorted(
- path for path in paths
- if not os.path.exists(path)
- )
- if bad_paths:
- print >> sys.stderr, "decimal_md5 inputs do not exist:"
- for path in bad_paths:
- print >> sys.stderr, path
- sys.exit(1)
-
-
-def _update_digest_with_file_contents(digest, path, block_size=65535):
- with open(path) as f:
- while True:
- block = f.read(block_size)
- if not block:
- break
- digest.update(block)
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("--fixed-output", help="don not calculate md5, use this value instead")
- parser.add_argument("--lower-bits", help="use specified count of lower bits", type=int, default=32)
- parser.add_argument("--source-root", help="arcadia source root")
- parser.add_argument("--func-name", help="custom function name to be defined", default="DecimalMD5")
- parser.add_argument("targets", nargs='*', default=['.'])
-
- args = parser.parse_args()
-
- abs_paths = [
- os.path.join(args.source_root, target)
- for target in args.targets
- ]
- ensure_paths_exist(abs_paths)
-
- if args.fixed_output:
- try:
- bitmask = (1 << args.lower_bits) - 1
- fmt = '{:0%dd}' % len(str(bitmask))
- checksum = fmt.format(int(args.fixed_output) & bitmask)
- except ValueError:
- raise ValueError("decimal_md5: bad value passed via --fixed-output: %s" % args.fixed_output)
- print_code(str(checksum), func_name=args.func_name)
- return
-
- md5 = hashlib.md5()
- for path in abs_paths:
- _update_digest_with_file_contents(md5, path)
-
- md5_parts = struct.unpack('IIII', md5.digest())
- md5_int = sum(part << (32 * n) for n, part in enumerate(md5_parts))
- bitmask = (1 << args.lower_bits) - 1
- fmt = '{:0%dd}' % len(str(bitmask))
-
- checksum_str = fmt.format(md5_int & bitmask)
- print_code(checksum_str, func_name=args.func_name)
-
-
-if __name__ == "__main__":
- main()
-
diff --git a/build/scripts/error.py b/build/scripts/error.py
deleted file mode 100644
index f7d8ecb2cc..0000000000
--- a/build/scripts/error.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Sync content of this file with devtools/ya/core/error/__init__.py
-
-TEMPORARY_ERROR_MESSAGES = [
- 'Connection reset by peer',
- 'Connection timed out',
- 'Function not implemented',
- 'I/O operation on closed file',
- 'Internal Server Error',
- 'Network connection closed unexpectedly',
- 'Network is unreachable',
- 'No route to host',
- 'No space left on device',
- 'Not enough space',
- 'Temporary failure in name resolution',
- 'The read operation timed out',
- 'timeout: timed out',
-]
-
-
-# Node exit codes
-class ExitCodes(object):
- TEST_FAILED = 10
- COMPILATION_FAILED = 11
- INFRASTRUCTURE_ERROR = 12
- NOT_RETRIABLE_ERROR = 13
- YT_STORE_FETCH_ERROR = 14
-
-
-def merge_exit_codes(exit_codes):
- return max(e if e >= 0 else 1 for e in exit_codes) if exit_codes else 0
-
-
-def is_temporary_error(exc):
- import logging
- logger = logging.getLogger(__name__)
-
- if getattr(exc, 'temporary', False):
- logger.debug("Exception has temporary attribute: %s", exc)
- return True
-
- import errno
- err = getattr(exc, 'errno', None)
-
- if err == errno.ECONNREFUSED or err == errno.ENETUNREACH:
- logger.debug("Exception has errno attribute: %s (errno=%s)", exc, err)
- return True
-
- import socket
-
- if isinstance(exc, socket.timeout) or isinstance(getattr(exc, 'reason', None), socket.timeout):
- logger.debug("Socket timeout exception: %s", exc)
- return True
-
- if isinstance(exc, socket.gaierror):
- logger.debug("Getaddrinfo exception: %s", exc)
- return True
-
- import urllib2
-
- if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ):
- logger.debug("urllib2.HTTPError: %s", exc)
- return True
-
- import httplib
-
- if isinstance(exc, httplib.IncompleteRead):
- logger.debug("IncompleteRead exception: %s", exc)
- return True
-
- exc_str = str(exc)
-
- for message in TEMPORARY_ERROR_MESSAGES:
- if message in exc_str:
- logger.debug("Found temporary error pattern (%s): %s", message, exc_str)
- return True
-
- return False
diff --git a/build/scripts/export_script_gen.py b/build/scripts/export_script_gen.py
index 64b732eff5..269af2e3ca 100644
--- a/build/scripts/export_script_gen.py
+++ b/build/scripts/export_script_gen.py
@@ -85,20 +85,17 @@ def to_msvc(src, dest):
def to_darwin(src, dest):
- pre = ''
for item in parse_export_file(src):
if item.get('linux_version'):
continue
if item['lang'] == 'C':
- dest.write(pre + '-Wl,-exported_symbol,_' + item['sym'])
+ dest.write('-Wl,-exported_symbol,_' + item['sym'])
elif item['lang'] == 'C++':
for sym in to_c(item['sym']):
- dest.write(pre + '-Wl,-exported_symbol,_' + sym)
+ dest.write('-Wl,-exported_symbol,_' + sym)
else:
raise Exception('unsupported lang: ' + item['lang'])
- if pre == '':
- pre = ' '
def main():
diff --git a/build/scripts/extract_asrc.py b/build/scripts/extract_asrc.py
deleted file mode 100644
index 89892ddf2d..0000000000
--- a/build/scripts/extract_asrc.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import argparse
-import os
-import tarfile
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--input', nargs='*', required=True)
- parser.add_argument('--output', required=True)
-
- return parser.parse_args()
-
-
-def main():
- args = parse_args()
-
- for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.input):
- with tarfile.open(asrc, 'r') as tar:
- tar.extractall(path=args.output)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/extract_jacoco_report.py b/build/scripts/extract_jacoco_report.py
deleted file mode 100644
index 02e4ba9f13..0000000000
--- a/build/scripts/extract_jacoco_report.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import argparse
-import os
-import re
-import tarfile
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
-
- parser.add_argument('--archive', action='store')
- parser.add_argument('--source-re', action='store')
- parser.add_argument('--destination', action='store')
-
- args = parser.parse_args()
-
- with tarfile.open(args.archive) as tf:
- open(args.destination, 'wb').close()
- extract_list = []
- matcher = re.compile(args.source_re)
- temp_dir = os.path.join(os.path.dirname(args.destination), 'temp_profiles')
- if not os.path.exists(temp_dir):
- os.makedirs(temp_dir)
- for f in [i for i in tf if matcher.match(i.name)]:
- tf.extract(f, path=temp_dir)
- for directory, _, srcs in os.walk(temp_dir):
- for f in srcs:
- with open(args.destination, 'ab') as dst:
- with open(os.path.join(temp_dir, directory, f), 'rb') as src:
- dst.write(src.read())
diff --git a/build/scripts/f2c.py b/build/scripts/f2c.py
deleted file mode 100644
index 7021e1391f..0000000000
--- a/build/scripts/f2c.py
+++ /dev/null
@@ -1,58 +0,0 @@
-import sys
-import subprocess
-import argparse
-import os
-
-
-header = '''\
-#ifdef __GNUC__
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wunused-parameter"
-#pragma GCC diagnostic ignored "-Wmissing-braces"
-#pragma GCC diagnostic ignored "-Wuninitialized"
-#pragma GCC diagnostic ignored "-Wreturn-type"
-#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
-#endif
-
-'''
-
-footer = '''
-#ifdef __GNUC__
-#pragma GCC diagnostic pop
-#endif
-'''
-
-
-def mkdir_p(directory):
- if not os.path.exists(directory):
- os.makedirs(directory)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
-
- parser.add_argument('-t', '--tool')
- parser.add_argument('-c', '--input')
- parser.add_argument('-o', '--output')
-
- args = parser.parse_args()
- tmpdir = args.output + '.f2c'
- mkdir_p(tmpdir)
- # should parse includes, really
- p = subprocess.Popen(
- [args.tool, '-w', '-R', '-a', '-I' + os.path.dirname(args.input), '-T' + tmpdir],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
- stdout, stderr = p.communicate(input=open(args.input).read())
- ret = p.wait()
-
- if ret:
- print >>sys.stderr, 'f2c failed: %s, %s' % (stderr, ret)
- sys.exit(ret)
-
- if 'Error' in stderr:
- print >>sys.stderr, stderr
-
- with open(args.output, 'w') as f:
- f.write(header)
- f.write(stdout)
- f.write(footer)
diff --git a/build/scripts/fail_module_cmd.py b/build/scripts/fail_module_cmd.py
deleted file mode 100644
index fa14c0d851..0000000000
--- a/build/scripts/fail_module_cmd.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import sys
-
-
-if __name__ == '__main__':
- assert len(sys.argv) == 2, 'Unexpected number of arguments...'
- sys.stderr.write('Error: module command for target [[bad]]{}[[rst]] was not executed due to build graph configuration errors...\n'.format(sys.argv[1]))
- sys.exit(1)
diff --git a/build/scripts/fetch_from_archive.py b/build/scripts/fetch_from_archive.py
deleted file mode 100644
index 57aff91b5e..0000000000
--- a/build/scripts/fetch_from_archive.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import os
-import sys
-import logging
-import argparse
-
-import fetch_from
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- fetch_from.add_common_arguments(parser)
- parser.add_argument('--file-name', required=True)
- parser.add_argument('--archive', required=True)
-
- return parser.parse_args()
-
-def main(args):
- archive = args.archive
- file_name = args.file_name.rstrip('-')
-
- fetch_from.process(archive, file_name, args, remove=False)
-
-
-if __name__ == '__main__':
- args = parse_args()
- fetch_from.setup_logging(args, os.path.basename(__file__))
-
- try:
- main(args)
- except Exception as e:
- logging.exception(e)
- print >>sys.stderr, open(args.abs_log_path).read()
- sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_external.py b/build/scripts/fetch_from_external.py
deleted file mode 100644
index d4ed6f4221..0000000000
--- a/build/scripts/fetch_from_external.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import sys
-import json
-import os.path
-import fetch_from
-import argparse
-import logging
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- fetch_from.add_common_arguments(parser)
- parser.add_argument('--external-file', required=True)
- parser.add_argument('--custom-fetcher')
- parser.add_argument('--resource-file')
- return parser.parse_args()
-
-
-def main(args):
- external_file = args.external_file.rstrip('.external')
- if os.path.isfile(args.resource_file):
- fetch_from.process(args.resource_file, os.path.basename(args.resource_file), args, False)
- return
-
- error = None
- try:
- with open(args.external_file) as f:
- js = json.load(f)
-
- if js['storage'] == 'SANDBOX':
- import fetch_from_sandbox as ffsb
- del args.external_file
- args.resource_id = js['resource_id']
- ffsb.main(args)
- elif js['storage'] == 'MDS':
- import fetch_from_mds as fmds
- del args.external_file
- args.key = js['resource_id']
- fmds.main(args)
- else:
- error = 'Unsupported storage in {}'.format(external_file)
- except:
- logging.error('Invalid external file: {}'.format(external_file))
- raise
- if error:
- raise Exception(error)
-
-
-if __name__ == '__main__':
- args = parse_args()
- fetch_from.setup_logging(args, os.path.basename(__file__))
-
- try:
- main(args)
- except Exception as e:
- logging.exception(e)
- print >>sys.stderr, open(args.abs_log_path).read()
- sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_mds.py b/build/scripts/fetch_from_mds.py
deleted file mode 100644
index 5e4e656394..0000000000
--- a/build/scripts/fetch_from_mds.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-import sys
-import logging
-import argparse
-
-import fetch_from
-
-MDS_PREFIX = "https://storage.yandex-team.ru/get-devtools/"
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- fetch_from.add_common_arguments(parser)
-
- parser.add_argument('--key', required=True)
-
- return parser.parse_args()
-
-
-def fetch(key):
- parts = key.split("/")
- if len(parts) != 3:
- raise ValueError("Invalid MDS key '{}'".format(key))
-
- _, sha1, file_name = parts
-
- fetched_file = fetch_from.fetch_url(MDS_PREFIX + key, False, file_name, expected_sha1=sha1)
-
- return fetched_file, file_name
-
-
-def main(args):
- fetched_file, resource_file_name = fetch(args.key)
-
- fetch_from.process(fetched_file, resource_file_name, args)
-
-
-if __name__ == '__main__':
- args = parse_args()
- fetch_from.setup_logging(args, os.path.basename(__file__))
-
- try:
- main(args)
- except Exception as e:
- logging.exception(e)
- print >>sys.stderr, open(args.abs_log_path).read()
- sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_from_npm.py b/build/scripts/fetch_from_npm.py
deleted file mode 100644
index 28a1e5c929..0000000000
--- a/build/scripts/fetch_from_npm.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import os
-import sys
-import time
-import logging
-import argparse
-import hashlib
-
-import sky
-import fetch_from
-
-
-NPM_BASEURL = "http://npm.yandex-team.ru/"
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- fetch_from.add_common_arguments(parser)
-
- parser.add_argument("--name", required=True)
- parser.add_argument("--version", required=True)
- parser.add_argument("--sky-id", required=True)
- parser.add_argument("--integrity", required=True)
- parser.add_argument("--integrity-algorithm", required=True)
-
- return parser.parse_args()
-
-
-def fetch(name, version, sky_id, integrity, integrity_algorithm, file_name, tries=5):
- """
- :param name: package name
- :type name: str
- :param version: package version
- :type version: str
- :param sky_id: sky id of tarball
- :type sky_id: str
- :param integrity: tarball integrity (hex)
- :type integrity: str
- :param integrity_algorithm: integrity algorithm (known for openssl)
- :type integrity_algorithm: str
- :param tries: tries count
- :type tries: int
- :return: path to fetched file
- :rtype: str
- """
- if sky.is_avaliable():
- fetcher = lambda: sky.fetch(sky_id, file_name)
- else:
- fetcher = lambda: _fetch_via_http(name, version, integrity, integrity_algorithm, file_name)
-
- fetched_file = None
- exc_info = None
-
- for i in range(0, tries):
- try:
- fetched_file = fetcher()
- exc_info = None
- break
- except Exception as e:
- logging.exception(e)
- exc_info = exc_info or sys.exc_info()
- time.sleep(i)
-
- if exc_info:
- raise exc_info[0], exc_info[1], exc_info[2]
-
- return fetched_file
-
-
-def _fetch_via_http(name, version, integrity, integrity_algorithm, file_name):
- # Example: "http://npm.yandex-team.ru/@scope/name/-/name-0.0.1.tgz" for @scope/name v0.0.1.
- url = NPM_BASEURL + "/".join([name, "-", "{}-{}.tgz".format(name.split("/").pop(), version)])
-
- hashobj = hashlib.new(integrity_algorithm)
- fetched_file = fetch_from.fetch_url(url, False, file_name, tries=1, writers=[hashobj.update])
-
- if hashobj.hexdigest() != integrity:
- raise fetch_from.BadChecksumFetchError("Expected {}, but got {} for {}".format(
- integrity,
- hashobj.hexdigest(),
- file_name,
- ))
-
- return fetched_file
-
-
-def main(args):
- file_name = os.path.basename(args.copy_to)
- fetched_file = fetch(args.name, args.version, args.sky_id, args.integrity, args.integrity_algorithm, file_name)
- fetch_from.process(fetched_file, file_name, args)
-
-
-if __name__ == "__main__":
- args = parse_args()
- fetch_from.setup_logging(args, os.path.basename(__file__))
-
- try:
- main(args)
- except Exception as e:
- logging.exception(e)
- print >>sys.stderr, open(args.abs_log_path).read()
- sys.stderr.flush()
-
- import error
- sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1)
diff --git a/build/scripts/fetch_resource.py b/build/scripts/fetch_resource.py
deleted file mode 100644
index d5af311e5d..0000000000
--- a/build/scripts/fetch_resource.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import urllib2
-import argparse
-import xmlrpclib
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('-r', '--resource-id', type=int, required=True)
- parser.add_argument('-o', '--output', required=True)
- return parser.parse_args()
-
-
-def fetch(url, retries=4, timeout=5):
- for i in xrange(retries):
- try:
- return urllib2.urlopen(url, timeout=timeout).read()
-
- except Exception:
- if i + 1 < retries:
- continue
-
- else:
- raise
-
-
-def fetch_resource(id_):
- urls = xmlrpclib.ServerProxy("https://sandbox.yandex-team.ru/sandbox/xmlrpc").get_resource_http_links(id_)
-
- for u in urls:
- try:
- return fetch(u)
-
- except Exception:
- continue
-
- raise Exception('Cannot fetch resource {}'.format(id_))
-
-
-if __name__ == '__main__':
- args = parse_args()
-
- with open(args.output, 'wb') as f:
- f.write(fetch_resource(int(args.resource_id)))
diff --git a/build/scripts/filter_zip.py b/build/scripts/filter_zip.py
deleted file mode 100644
index b2121b9c9e..0000000000
--- a/build/scripts/filter_zip.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import argparse
-import os
-import re
-import uuid
-import zipfile
-
-
-def pattern_to_regexp(p):
- return re.compile(
- '^'
- + re.escape(p)
- .replace(r'\*\*\/', '[_DIR_]')
- .replace(r'\*', '[_FILE_]')
- .replace('[_DIR_]', '(.*/)?')
- .replace('[_FILE_]', '([^/]*)')
- + '$'
- )
-
-
-def is_deathman(positive_filter, negative_filter, candidate):
- remove = positive_filter
- for pf in positive_filter:
- if pf.match(candidate):
- remove = False
- break
- if not negative_filter or remove:
- return remove
- for nf in negative_filter:
- if nf.match(candidate):
- remove = True
- break
- return remove
-
-
-def just_do_it():
- parser = argparse.ArgumentParser()
- parser.add_argument('--positive', action='append', default=[])
- parser.add_argument('--negative', action='append', default=[])
- parser.add_argument('--file', action='store', required=True)
- args = parser.parse_args()
- if not args.positive and not args.negative:
- return
- pos = [pattern_to_regexp(i) for i in args.positive]
- neg = [pattern_to_regexp(i) for i in args.negative]
- temp_dirname = None
- for _ in range(10):
- candidate = '__unpacked_{}__'.format(uuid.uuid4())
- if not os.path.exists(candidate):
- temp_dirname = candidate
- os.makedirs(temp_dirname)
- if not temp_dirname:
- raise Exception("Can't generate name for temp dir")
-
- with zipfile.ZipFile(args.file, 'r') as zip_ref:
- zip_ref.extractall(temp_dirname)
-
- for root, _, files in os.walk(temp_dirname):
- for f in files:
- candidate = os.path.join(root, f).replace('\\', '/')
- if is_deathman(pos, neg, os.path.relpath(candidate, temp_dirname)):
- os.remove(candidate)
-
- with zipfile.ZipFile(args.file, 'w') as zip_ref:
- for root, _, files in os.walk(temp_dirname):
- for f in files:
- realname = os.path.join(root, f)
- zip_ref.write(realname, os.path.sep.join(os.path.normpath(realname).split(os.path.sep, 2)[1:]))
-
-
-if __name__ == '__main__':
- just_do_it()
diff --git a/build/scripts/find_and_tar.py b/build/scripts/find_and_tar.py
deleted file mode 100644
index f251623c68..0000000000
--- a/build/scripts/find_and_tar.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-import sys
-import tarfile
-
-
-def find_gcno(dirname, tail):
- for cur, _dirs, files in os.walk(dirname):
- for f in files:
- if f.endswith(tail):
- yield os.path.relpath(os.path.join(cur, f))
-
-
-def main(args):
- output = args[0]
- tail = args[1] if len(args) > 1 else ''
- with tarfile.open(output, 'w:') as tf:
- for f in find_gcno(os.getcwd(), tail):
- tf.add(f)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/scripts/find_time_trace.py b/build/scripts/find_time_trace.py
deleted file mode 100644
index 954d203caa..0000000000
--- a/build/scripts/find_time_trace.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import os
-import sys
-
-# /scripts/find_time_trace.py <object_file> <destination>
-# clang generates `-ftime-trace` output file path based on main output file path
-
-
-def main():
- assert len(sys.argv) == 3
- obj_path = sys.argv[1]
- trace_path = sys.argv[2]
- orig_trace_path = obj_path.rpartition('.o')[0] + '.json'
- os.rename(orig_trace_path, trace_path)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/fix_java_command_file_cp.py b/build/scripts/fix_java_command_file_cp.py
deleted file mode 100644
index fc87048c32..0000000000
--- a/build/scripts/fix_java_command_file_cp.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import sys
-import os
-import argparse
-import subprocess
-import platform
-
-
-def fix_files(args):
- args = args[:]
- parser = argparse.ArgumentParser()
- parser.add_argument('--build-root', default=None)
- args, tail = parser.parse_known_args(args)
- for idx, arg in list(enumerate(tail)):
- if arg.startswith('@') and os.path.isfile(arg[1:]):
- with open(arg[1:]) as f:
- fixed = [i.strip() for i in f]
- if args.build_root:
- fixed = [os.path.join(args.build_root, i) for ln in fixed for i in ln.split(os.path.pathsep)]
- fixed = os.pathsep.join([i.strip() for i in fixed])
- fixed_name = list(os.path.splitext(arg))
- fixed_name[0] += '_fixed'
- fixed_name = ''.join(fixed_name)
- with open(fixed_name[1:], 'w') as f:
- f.write(fixed)
- tail[idx:idx + 1] = [fixed_name]
- return tail
-
-
-if __name__ == '__main__':
- args = fix_files(sys.argv[1:])
- if platform.system() == 'Windows':
- sys.exit(subprocess.Popen(args).wait())
- else:
- os.execv(args[0], args)
diff --git a/build/scripts/fix_msvc_output.py b/build/scripts/fix_msvc_output.py
deleted file mode 100644
index 183a442e1f..0000000000
--- a/build/scripts/fix_msvc_output.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import subprocess
-import sys
-
-import process_command_files as pcf
-import process_whole_archive_option as pwa
-
-
-def out2err(cmd):
- return subprocess.Popen(cmd, stdout=sys.stderr).wait()
-
-
-def decoding_needed(strval):
- if sys.version_info >= (3, 0, 0):
- return isinstance(strval, bytes)
- else:
- return False
-
-
-def out2err_cut_first_line(cmd):
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- first_line = True
- while True:
- line = p.stdout.readline()
- line = line.decode('utf-8') if decoding_needed(line) else line
- if not line:
- break
- if first_line:
- sys.stdout.write(line)
- first_line = False
- else:
- sys.stderr.write(line)
- return p.wait()
-
-
-if __name__ == '__main__':
- mode = sys.argv[1]
- args, wa_peers, wa_libs = pwa.get_whole_archive_peers_and_libs(pcf.skip_markers(sys.argv[2:]))
- cmd = pwa.ProcessWholeArchiveOption('WINDOWS', wa_peers, wa_libs).construct_cmd(args)
- run = out2err
- if mode in ('cl', 'ml'):
- # First line of cl.exe and ml64.exe stdout is useless: it prints input file
- run = out2err_cut_first_line
- sys.exit(run(cmd))
diff --git a/build/scripts/gen_aar_gradle_script.py b/build/scripts/gen_aar_gradle_script.py
deleted file mode 100644
index 7f268547f5..0000000000
--- a/build/scripts/gen_aar_gradle_script.py
+++ /dev/null
@@ -1,378 +0,0 @@
-import argparse
-import os
-import tarfile
-
-FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n'
-MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n'
-KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n'
-
-ENABLE_JAVADOC = 'tasks["bundle${suffix}Aar"].dependsOn packageJavadocTask'
-DO_NOT_STRIP = '''\
- packagingOptions {
- doNotStrip "*/arm64-v8a/*.so"
- doNotStrip "*/armeabi-v7a/*.so"
- doNotStrip "*/x86_64/*.so"
- doNotStrip "*/x86/*.so"
- }
-'''
-
-AAR_TEMPLATE = """\
-ext.jniLibsDirs = [
- {jni_libs_dirs}
-]
-
-ext.resDirs = [
- {res_dirs}
-]
-
-ext.assetsDirs = [
- {assets_dirs}
-]
-
-ext.javaDirs = [
- {java_dirs}
-]
-
-def aidlDirs = [
- {aidl_dirs}
-]
-
-ext.bundles = [
- {bundles}
-]
-
-ext.androidArs = [
- {aars}
-]
-
-ext.compileOnlyAndroidArs = [
- {compile_only_aars}
-]
-
-def minVersion = 21
-def compileVersion = 30
-def targetVersion = 30
-def buildVersion = '30.0.3'
-
-import com.android.build.gradle.LibraryPlugin
-import java.nio.file.Files
-import java.nio.file.Paths
-import java.util.regex.Matcher
-import java.util.regex.Pattern
-import java.util.zip.ZipFile
-
-
-apply plugin: 'com.github.dcendents.android-maven'
-
-buildDir = "$projectDir/build"
-
-if (!ext.has("packageSuffix"))
- ext.packageSuffix = ""
-
-buildscript {{
-// repositories {{
-// jcenter()
-// mavenCentral()
-// }}
-
- repositories {{
- {maven_repos}
- }}
-
- dependencies {{
- classpath 'com.android.tools.build:gradle:4.0.2'
- classpath 'com.github.dcendents:android-maven-gradle-plugin:1.5'
- }}
-}}
-
-apply plugin: LibraryPlugin
-
-repositories {{
-// flatDir {{
-// dirs System.env.PKG_ROOT + '/bundle'
-// }}
-// maven {{
-// url "http://maven.google.com/"
-// }}
-// maven {{
-// url "http://artifactory.yandex.net/artifactory/public/"
-// }}
-
- {flat_dirs_repo}
-
- {maven_repos}
-}}
-
-android {{
- {keystore}
-
- compileSdkVersion compileVersion
- buildToolsVersion buildVersion
-
- defaultConfig {{
- minSdkVersion minVersion
- targetSdkVersion targetVersion
- consumerProguardFiles '{proguard_rules}'
- }}
-
- sourceSets {{
- main {{
- manifest.srcFile '{manifest}'
- jniLibs.srcDirs = jniLibsDirs
- res.srcDirs = resDirs
- assets.srcDirs = assetsDirs
- java.srcDirs = javaDirs
- aidl.srcDirs = aidlDirs
- }}
- // We don't use this feature, so we set it to nonexisting directory
- androidTest.setRoot('bundle/tests')
- }}
-
- {do_not_strip}
-
- dependencies {{
- for (bundle in bundles)
- compile("$bundle") {{
- transitive = true
- }}
- for (bundle in androidArs)
- compile(bundle) {{
- transitive = true
- }}
- for (bundle in compileOnlyAndroidArs)
- compileOnly(bundle)
- }}
-
- android.libraryVariants.all {{ variant ->
- def suffix = variant.buildType.name.capitalize()
-
- def sourcesJarTask = project.tasks.create(name: "sourcesJar${{suffix}}", type: Jar) {{
- classifier = 'sources'
- from android.sourceSets.main.java.srcDirs
- include '**/*.java'
- eachFile {{ fcd ->
- def segments = fcd.relativePath.segments
- if (segments[0] == 'impl') {{
- fcd.relativePath = new RelativePath(true, segments.drop(1))
- }}
- }}
- includeEmptyDirs = false
- }}
-
- def manifestFile = android.sourceSets.main.manifest.srcFile
- def manifestXml = new XmlParser().parse(manifestFile)
-
- def packageName = manifestXml['@package']
- def groupName = packageName.tokenize('.')[0..-2].join('.')
-
- def androidNs = new groovy.xml.Namespace("http://schemas.android.com/apk/res/android")
- def packageVersion = manifestXml.attributes()[androidNs.versionName]
-
- def writePomTask = project.tasks.create(name: "writePom${{suffix}}") {{
- pom {{
- project {{
- groupId groupName
- version packageVersion
- packaging 'aar'
- }}
- }}.writeTo("$buildDir/${{rootProject.name}}$packageSuffix-pom.xml")
- }}
-
- tasks["bundle${{suffix}}Aar"].dependsOn sourcesJarTask
- tasks["bundle${{suffix}}Aar"].dependsOn writePomTask
- }}
-
- android.libraryVariants.all {{ variant ->
- def capitalizedVariantName = variant.name.capitalize()
- def suffix = variant.buildType.name.capitalize()
-
- def javadocTask = project.tasks.create(name: "generate${{capitalizedVariantName}}Javadoc", type: Javadoc) {{
- group = "Javadoc"
- description "Generates Javadoc for $capitalizedVariantName"
-
- title = "Yandex documentation"
-
- source = android.sourceSets.main.java.srcDirs
- include "**/*/yandex/*/**"
- // TODO: remove this when we support internal doc exclusion in IDL
- // https://st.yandex-team.ru/MAPSMOBCORE-11364
- exclude "**/internal/**"
-
- ext.androidJar = "${{android.sdkDirectory.path}}/platforms/${{android.compileSdkVersion}}/android.jar"
- classpath =
- files(android.getBootClasspath().join(File.pathSeparator)) +
- configurations.compile +
- files(ext.androidJar) +
- files(variant.javaCompile.outputs.files)
-
- destinationDir = file("$buildDir/${{rootProject.name}}-javadoc/$capitalizedVariantName/")
-
- options.doclet("ExcludeDoclet")
- options.docletpath(
- files(repositories.maven.url).getAsFileTree()
- .matching{{include "**/exclude-doclet-1.0.0.jar"}}
- .getSingleFile())
-
- options.charSet = "UTF-8"
- options.encoding = "UTF-8"
-
- failOnError false
-
- afterEvaluate {{
- def dependencyTree = project.configurations.compile.getAsFileTree()
- def aar_set = dependencyTree.matching{{include "**/*.aar"}}.getFiles()
- def jar_tree = dependencyTree.matching{{include "**/*.jar"}}
-
- classpath += files(android.libraryVariants.collect {{ libraryVariant ->
- libraryVariant.javaCompileProvider.get().classpath.files
- }})
-
- aar_set.each{{ aar ->
- def outputPath = "$buildDir/tmp/aarJar/${{aar.name.replace('.aar', '.jar')}}"
- classpath += files(outputPath)
-
- dependsOn task(name: "extract_${{aar.getAbsolutePath().replace(File.separatorChar, '_' as char)}}-${{capitalizedVariantName}}").doLast {{
- extractClassesJar(aar, outputPath)
- }}
- }}
- }}
- }}
-
- def packageJavadocTask = project.tasks.create(name: "package${{capitalizedVariantName}}Javadoc", type: Tar) {{
- description "Makes an archive from Javadoc output"
- from "${{buildDir}}/${{rootProject.name}}-javadoc/$capitalizedVariantName/"
- archiveFileName = "${{rootProject.name}}-javadoc.tar.gz"
- destinationDirectory = new File("${{buildDir}}")
- dependsOn javadocTask
- }}
-
- {enable_javadoc}
- }}
-
-}}
-
-private def extractClassesJar(aarPath, outputPath) {{
- if (!aarPath.exists()) {{
- throw new GradleException("AAR $aarPath not found")
- }}
-
- def zip = new ZipFile(aarPath)
- zip.entries().each {{
- if (it.name == "classes.jar") {{
- def path = Paths.get(outputPath)
- if (!Files.exists(path)) {{
- Files.createDirectories(path.getParent())
- Files.copy(zip.getInputStream(it), path)
- }}
- }}
- }}
- zip.close()
-}}
-
-"""
-
-
-def gen_build_script(args):
-
- def wrap(items):
- return ',\n '.join('"{}"'.format(x) for x in items)
-
- bundles = []
- bundles_dirs = set(args.flat_repos)
- for bundle in args.bundles:
- dir_name, base_name = os.path.split(bundle)
- assert(len(dir_name) > 0 and len(base_name) > 0)
- name, ext = os.path.splitext(base_name)
- assert(len(name) > 0 and ext == '.aar')
- bundles_dirs.add(dir_name)
- bundles.append('com.yandex:{}@aar'.format(name))
-
- if len(bundles_dirs) > 0:
- flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs))
- else:
- flat_dirs_repo = ''
-
- maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos)
-
- if args.keystore:
- keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore)
- else:
- keystore = ''
-
- if args.generate_doc:
- enable_javadoc = ENABLE_JAVADOC
- else:
- enable_javadoc = ''
-
- if args.do_not_strip:
- do_not_strip = DO_NOT_STRIP
- else:
- do_not_strip = ''
-
- return AAR_TEMPLATE.format(
- aars=wrap(args.aars),
- compile_only_aars=wrap(args.compile_only_aars),
- aidl_dirs=wrap(args.aidl_dirs),
- assets_dirs=wrap(args.assets_dirs),
- bundles=wrap(bundles),
- do_not_strip=do_not_strip,
- enable_javadoc=enable_javadoc,
- flat_dirs_repo=flat_dirs_repo,
- java_dirs=wrap(args.java_dirs),
- jni_libs_dirs=wrap(args.jni_libs_dirs),
- keystore=keystore,
- manifest=args.manifest,
- maven_repos=maven_repos,
- proguard_rules=args.proguard_rules,
- res_dirs=wrap(args.res_dirs),
- )
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument('--aars', nargs='*', default=[])
- parser.add_argument('--compile-only-aars', nargs='*', default=[])
- parser.add_argument('--aidl-dirs', nargs='*', default=[])
- parser.add_argument('--assets-dirs', nargs='*', default=[])
- parser.add_argument('--bundle-name', nargs='?', default='default-bundle-name')
- parser.add_argument('--bundles', nargs='*', default=[])
- parser.add_argument('--do-not-strip', action='store_true')
- parser.add_argument('--flat-repos', nargs='*', default=[])
- parser.add_argument('--generate-doc', action='store_true')
- parser.add_argument('--java-dirs', nargs='*', default=[])
- parser.add_argument('--jni-libs-dirs', nargs='*', default=[])
- parser.add_argument('--keystore', default=None)
- parser.add_argument('--manifest', required=True)
- parser.add_argument('--maven-repos', nargs='*', default=[])
- parser.add_argument('--output-dir', required=True)
- parser.add_argument('--peers', nargs='*', default=[])
- parser.add_argument('--proguard-rules', nargs='?', default=None)
- parser.add_argument('--res-dirs', nargs='*', default=[])
- args = parser.parse_args()
-
- if args.proguard_rules is None:
- args.proguard_rules = os.path.join(args.output_dir, 'proguard-rules.txt')
- with open(args.proguard_rules, 'w') as f:
- pass
-
- for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)):
- jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index)))
- os.makedirs(jsrc_dir)
- with tarfile.open(jsrc, 'r') as tar:
- tar.extractall(path=jsrc_dir)
- args.java_dirs.append(jsrc_dir)
-
- args.build_gradle = os.path.join(args.output_dir, 'build.gradle')
- args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle')
- args.gradle_properties = os.path.join(args.output_dir, 'gradle.properties')
-
- content = gen_build_script(args)
- with open(args.build_gradle, 'w') as f:
- f.write(content)
-
- with open(args.gradle_properties, 'w') as f:
- f.write('android.useAndroidX=true')
-
- if args.bundle_name:
- with open(args.settings_gradle, 'w') as f:
- f.write('rootProject.name = "{}"'.format(args.bundle_name))
diff --git a/build/scripts/gen_java_codenav_entry.py b/build/scripts/gen_java_codenav_entry.py
deleted file mode 100644
index ff0a5c737d..0000000000
--- a/build/scripts/gen_java_codenav_entry.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import argparse
-import datetime
-import os
-import subprocess
-import sys
-import tarfile
-
-
-def extract_kindexes(kindexes):
- for kindex in kindexes:
- with tarfile.TarFile(kindex) as tf:
- for fname in [i for i in tf.getnames() if i.endswith('.kzip')]:
- tf.extract(fname)
- yield fname
-
-
-def just_do_it(java, kythe, kythe_to_proto, out_name, binding_only, kindexes):
- temp_out_name = out_name + '.temp'
- kindex_inputs = list(extract_kindexes(kindexes))
- open(temp_out_name, 'w').close()
- start = datetime.datetime.now()
- for kindex in kindex_inputs:
- print >> sys.stderr, '[INFO] Processing:', kindex
- indexer_start = datetime.datetime.now()
- p = subprocess.Popen([java, '-jar', os.path.join(kythe, 'indexers/java_indexer.jar'), kindex], stdout=subprocess.PIPE)
- indexer_out, _ = p.communicate()
- print >> sys.stderr, '[INFO] Indexer execution time:', (datetime.datetime.now() - indexer_start).total_seconds(), 'seconds'
- if p.returncode:
- raise Exception('java_indexer failed with exit code {}'.format(p.returncode))
- dedup_start = datetime.datetime.now()
- p = subprocess.Popen([os.path.join(kythe, 'tools/dedup_stream')], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
- dedup_out, _ = p.communicate(indexer_out)
- print >> sys.stderr, '[INFO] Dedup execution time:', (datetime.datetime.now() - dedup_start).total_seconds(), 'seconds'
- if p.returncode:
- raise Exception('dedup_stream failed with exit code {}'.format(p.returncode))
- entrystream_start = datetime.datetime.now()
- p = subprocess.Popen([os.path.join(kythe, 'tools/entrystream'), '--write_json'], stdin=subprocess.PIPE, stdout=open(temp_out_name, 'a'))
- p.communicate(dedup_out)
- if p.returncode:
- raise Exception('entrystream failed with exit code {}'.format(p.returncode))
- print >> sys.stderr, '[INFO] Entrystream execution time:', (datetime.datetime.now() - entrystream_start).total_seconds(), 'seconds'
- preprocess_start = datetime.datetime.now()
- subprocess.check_call([kythe_to_proto, '--preprocess-entry', '--entries', temp_out_name, '--out', out_name] + (['--only-binding-data'] if binding_only else []))
- print >> sys.stderr, '[INFO] Preprocessing execution time:', (datetime.datetime.now() - preprocess_start).total_seconds(), 'seconds'
- print >> sys.stderr, '[INFO] Total execution time:', (datetime.datetime.now() - start).total_seconds(), 'seconds'
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument("--java", help="java path")
- parser.add_argument("--kythe", help="kythe path")
- parser.add_argument("--kythe-to-proto", help="kythe_to_proto tool path")
- parser.add_argument("--out-name", help="entries json out name")
- parser.add_argument("--binding-only", action="store_true", default=False, help="filter only binding data")
- parser.add_argument("kindexes", nargs='*')
- args = parser.parse_args()
- just_do_it(args.java, args.kythe, args.kythe_to_proto, args.out_name, args.binding_only, args.kindexes)
diff --git a/build/scripts/gen_java_codenav_protobuf.py b/build/scripts/gen_java_codenav_protobuf.py
deleted file mode 100644
index aee8cfe6c3..0000000000
--- a/build/scripts/gen_java_codenav_protobuf.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import argparse
-import os
-
-
-def just_do_it(kythe_to_proto, entries, out_name, build_file, source_root):
- with open(build_file) as f:
- classpath = os.pathsep.join([line.strip() for line in f])
- os.execv(
- kythe_to_proto,
- [kythe_to_proto, '--sources-rel-root', 'fake_arcadia_root', '--entries', entries, '--out', out_name, '--classpath', classpath, '--arcadia-root', source_root]
- )
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument("--kythe-to-proto", help="kythe_to_proto tool path")
- parser.add_argument("--entries", help="entries json path")
- parser.add_argument("--out-name", help="protbuf out name")
- parser.add_argument("--build-file", help="build file( containing classpath )")
- parser.add_argument("--source-root", help="source root")
- args = parser.parse_args()
- just_do_it(args.kythe_to_proto, args.entries, args.out_name, args.build_file, args.source_root)
diff --git a/build/scripts/gen_mx_table.py b/build/scripts/gen_mx_table.py
deleted file mode 100644
index 187c21c539..0000000000
--- a/build/scripts/gen_mx_table.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import sys
-
-tmpl = """
-#include "yabs_mx_calc_table.h"
-
-#include <kernel/matrixnet/mn_sse.h>
-
-#include <library/cpp/archive/yarchive.h>
-
-#include <util/memory/blob.h>
-#include <util/generic/hash.h>
-#include <util/generic/ptr.h>
-#include <util/generic/singleton.h>
-
-using namespace NMatrixnet;
-
-extern "C" {
- extern const unsigned char MxFormulas[];
- extern const ui32 MxFormulasSize;
-}
-
-namespace {
- struct TFml: public TBlob, public TMnSseInfo {
- inline TFml(const TBlob& b)
- : TBlob(b)
- , TMnSseInfo(Data(), Size())
- {
- }
- };
-
- struct TFormulas: public THashMap<size_t, TAutoPtr<TFml>> {
- inline TFormulas() {
- TBlob b = TBlob::NoCopy(MxFormulas, MxFormulasSize);
- TArchiveReader ar(b);
- %s
- }
-
- inline const TMnSseInfo& at(size_t n) const noexcept {
- return *find(n)->second;
- }
- };
-
- %s
-
- static func_descr_t yabs_funcs[] = {
- %s
- };
-}
-
-yabs_mx_calc_table_t yabs_mx_calc_table = {YABS_MX_CALC_VERSION, 10000, 0, yabs_funcs};
-"""
-
-if __name__ == '__main__':
- init = []
- body = []
- defs = {}
-
- for i in sys.argv[1:]:
- name = i.replace('.', '_')
- num = long(name.split('_')[1])
-
- init.append('(*this)[%s] = new TFml(ar.ObjectBlobByKey("%s"));' % (num, '/' + i))
-
- f1 = 'static void yabs_%s(size_t count, const float** args, double* res) {Singleton<TFormulas>()->at(%s).DoCalcRelevs(args, res, count);}' % (name, num)
- f2 = 'static size_t yabs_%s_factor_count() {return Singleton<TFormulas>()->at(%s).MaxFactorIndex() + 1;}' % (name, num)
-
- body.append(f1)
- body.append(f2)
-
- d1 = 'yabs_%s' % name
- d2 = 'yabs_%s_factor_count' % name
-
- defs[num] = '{%s, %s}' % (d1, d2)
-
- print tmpl % ('\n'.join(init), '\n\n'.join(body), ',\n'.join((defs.get(i, '{nullptr, nullptr}') for i in range(0, 10000))))
diff --git a/build/scripts/gen_swiftc_output_map.py b/build/scripts/gen_swiftc_output_map.py
deleted file mode 100644
index 01ce85f256..0000000000
--- a/build/scripts/gen_swiftc_output_map.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import json
-import sys
-
-
-def just_do_it(args):
- source_root, build_root, out_file, srcs = args[0], args[1], args[2], args[3:]
- assert(len(srcs))
- result_obj = {}
- for src in srcs:
- result_obj[src] = {'object': src.replace(source_root, build_root) + '.o'}
- with open(out_file, 'w') as of:
- of.write(json.dumps(result_obj))
-
-if __name__ == '__main__':
- just_do_it(sys.argv[1:])
diff --git a/build/scripts/gen_tasklet_reg.py b/build/scripts/gen_tasklet_reg.py
deleted file mode 100644
index 5b747c2eca..0000000000
--- a/build/scripts/gen_tasklet_reg.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import argparse
-
-TEMPLATE = '''\
-{includes}\
-#include <tasklet/runtime/lib/{language}_wrapper.h>
-#include <tasklet/runtime/lib/registry.h>
-
-static const NTasklet::TRegHelper REG(
- "{name}",
- new NTasklet::{wrapper}
-);
-'''
-
-WRAPPER = {
- 'cpp': 'TCppWrapper<{impl}>()',
- 'js': 'TJsWrapper("{impl}")',
- 'go': 'TGoWrapper("{impl}")',
- 'py': 'TPythonWrapper("{impl}")',
- 'java': 'TJavaWrapper("{impl}", "{py_wrapper}")',
-}
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('name')
- parser.add_argument('output')
- parser.add_argument('-l', '--lang', choices=WRAPPER, required=True)
- parser.add_argument('-i', '--impl', required=True)
- parser.add_argument('-w', '--wrapper', required=False)
- parser.add_argument('includes', nargs='*')
-
- return parser.parse_args()
-
-
-if __name__ == '__main__':
- args = parse_args()
-
- includes = ''.join(
- '#include <{}>\n'.format(include)
- for include in args.includes
- )
-
- code = TEMPLATE.format(
- includes=includes,
- language=args.lang,
- name=args.name,
- wrapper=WRAPPER[args.lang].format(impl=args.impl, py_wrapper=args.wrapper),
- )
-
- with open(args.output, 'w') as f:
- f.write(code)
diff --git a/build/scripts/gen_test_apk_gradle_script.py b/build/scripts/gen_test_apk_gradle_script.py
deleted file mode 100644
index d1a78ceb1c..0000000000
--- a/build/scripts/gen_test_apk_gradle_script.py
+++ /dev/null
@@ -1,193 +0,0 @@
-import argparse
-import os
-import tarfile
-import xml.etree.ElementTree as etree
-
-FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n'
-MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n'
-KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n'
-
-TEST_APK_TEMPLATE = """\
-ext.jniLibsDirs = [
- {jni_libs_dirs}
-]
-ext.resDirs = [
- {res_dirs}
-]
-ext.javaDirs = [
- {java_dirs}
-]
-ext.bundles = [
- {bundles}
-]
-
-buildscript {{
-// repositories {{
-// jcenter()
-// }}
-
- repositories {{
- {maven_repos}
- }}
-
- dependencies {{
- classpath 'com.android.tools.build:gradle:3.5.3'
- }}
-}}
-
-apply plugin: 'com.android.application'
-
-repositories {{
-// maven {{
-// url "http://maven.google.com/"
-// }}
-// maven {{
-// url "http://artifactory.yandex.net/artifactory/public/"
-// }}
-// flatDir {{
-// dirs System.env.PKG_ROOT + '/bundle'
-// }}
-
- {flat_dirs_repo}
-
- {maven_repos}
-}}
-
-dependencies {{
- for (bundle in bundles) {{
- compile("$bundle")
- }}
-}}
-
-android {{
- {keystore}
-
- compileSdkVersion 30
- buildToolsVersion "30.0.3"
-
-
- defaultConfig {{
- minSdkVersion 21
- targetSdkVersion 30
- applicationId "{app_id}"
- }}
-
- sourceSets {{
- main {{
- manifest.srcFile 'Manifest.xml'
- jniLibs.srcDirs = jniLibsDirs
- res.srcDirs = resDirs
- java.srcDirs = javaDirs
- }}
- }}
-
- applicationVariants.all {{ variant ->
- variant.outputs.each {{ output ->
- def fileName = "$projectDir/output/{app_id}.apk"
- output.outputFileName = new File(output.outputFile.parent, fileName).getName()
- }}
- }}
-
- dependencies {{
- implementation 'com.google.android.gms:play-services-location:16.0.0'
- implementation 'com.google.android.gms:play-services-gcm:16.0.0'
- implementation 'com.evernote:android-job:1.2.6'
- implementation 'androidx.annotation:annotation:1.1.0'
- implementation 'androidx.core:core:1.1.0'
- }}
-}}
-"""
-
-
-def create_native_properties(output_dir, library_name):
- native_properties_file = os.path.join(output_dir, 'native_library_name.xml')
- resources = etree.Element('resources')
- name = etree.SubElement(resources, 'item', dict(name='native_library_name', type='string'))
- name.text = library_name
- etree.ElementTree(resources).write(native_properties_file, xml_declaration=True, encoding='utf-8')
-
-
-def gen_build_script(args):
- def wrap(items):
- return ',\n '.join('"{}"'.format(x) for x in items)
-
- bundles = []
- bundles_dirs = set(args.flat_repos)
- for bundle in args.bundles:
- dir_name, base_name = os.path.split(bundle)
- assert(len(dir_name) > 0 and len(base_name) > 0)
- name, ext = os.path.splitext(base_name)
- assert(len(name) > 0 and ext == '.aar')
- bundles_dirs.add(dir_name)
- bundles.append('com.yandex:{}@aar'.format(name))
-
- if len(bundles_dirs) > 0:
- flat_dirs_repo = FLAT_DIRS_REPO_TEMPLATE.format(dirs=wrap(bundles_dirs))
- else:
- flat_dirs_repo = ''
-
- maven_repos = ''.join(MAVEN_REPO_TEMPLATE.format(repo=repo) for repo in args.maven_repos)
-
- if args.keystore:
- keystore = KEYSTORE_TEMLATE.format(keystore=args.keystore)
- else:
- keystore = ''
-
- return TEST_APK_TEMPLATE.format(
- app_id=args.app_id,
- jni_libs_dirs=wrap(args.jni_libs_dirs),
- res_dirs=wrap(args.res_dirs),
- java_dirs=wrap(args.java_dirs),
- maven_repos=maven_repos,
- bundles=wrap(bundles),
- flat_dirs_repo=flat_dirs_repo,
- keystore=keystore,
- )
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument('--aars', nargs='*', default=[])
- parser.add_argument('--app-id', required=True)
- parser.add_argument('--assets-dirs', nargs='*', default=[])
- parser.add_argument('--bundles', nargs='*', default=[])
- parser.add_argument('--bundle-name', nargs='?', default=None)
- parser.add_argument('--java-dirs', nargs='*', default=[])
- parser.add_argument('--jni-libs-dirs', nargs='*', default=[])
- parser.add_argument('--library-name', required=True)
- parser.add_argument('--manifest', required=True)
- parser.add_argument('--flat-repos', nargs='*', default=[])
- parser.add_argument('--maven-repos', nargs='*', default=[])
- parser.add_argument('--output-dir', required=True)
- parser.add_argument('--peers', nargs='*', default=[])
- parser.add_argument('--keystore', default=None)
- parser.add_argument('--res-dirs', nargs='*', default=[])
- args = parser.parse_args()
-
- for index, jsrc in enumerate(filter(lambda x: x.endswith('.jsrc'), args.peers)):
- jsrc_dir = os.path.join(args.output_dir, 'jsrc_{}'.format(str(index)))
- os.makedirs(jsrc_dir)
- with tarfile.open(jsrc, 'r') as tar:
- tar.extractall(path=jsrc_dir)
- args.java_dirs.append(jsrc_dir)
-
- args.build_gradle = os.path.join(args.output_dir, 'build.gradle')
- args.settings_gradle = os.path.join(args.output_dir, 'settings.gradle')
- args.gradle_properties = os.path.join(args.output_dir, 'gradle.properties')
-
- content = gen_build_script(args)
- with open(args.build_gradle, 'w') as f:
- f.write(content)
-
- with open(args.gradle_properties, 'w') as f:
- f.write('''android.enableJetifier=true
- android.useAndroidX=true
- org.gradle.jvmargs=-Xmx8192m -XX:MaxPermSize=512m''')
-
- if args.bundle_name:
- with open(args.settings_gradle, 'w') as f:
- f.write('rootProject.name = "{}"'.format(args.bundle_name))
-
- values_dir = os.path.join(args.output_dir, 'res', 'values')
- os.makedirs(values_dir)
- create_native_properties(values_dir, args.library_name)
diff --git a/build/scripts/gen_ub.py b/build/scripts/gen_ub.py
deleted file mode 100644
index ad79cda926..0000000000
--- a/build/scripts/gen_ub.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import argparse
-import os
-import tarfile
-import contextlib
-import hashlib
-import base64
-import io
-
-
-stub = """#!/usr/bin/env python
-
-info = {info}
-data = "{data}"
-
-import platform
-import os
-import sys
-import tarfile
-import contextlib
-import io
-import base64
-
-
-def current_platform():
- arch = platform.machine().upper()
-
- if arch == 'AMD64':
- arch = 'X86_64'
-
- platf = platform.system().upper()
-
- if platf.startswith('WIN'):
- platf = 'WIN'
-
- return (platf + '-' + arch).lower()
-
-
-def extract_file(fname):
- with contextlib.closing(tarfile.open(fileobj=io.BytesIO(base64.b64decode(data)))) as f:
- return f.extractfile(fname).read()
-
-
-fname = info[current_platform()]
-my_path = os.path.realpath(os.path.abspath(__file__))
-tmp_path = my_path + '.tmp'
-
-with open(tmp_path, 'wb') as f:
- f.write(extract_file(fname))
-
-os.rename(tmp_path, my_path)
-os.chmod(my_path, 0775)
-os.execv(sys.argv[0], sys.argv)
-"""
-
-
-def gen_ub(output, data):
- info = {}
- binary = io.BytesIO()
-
- with contextlib.closing(tarfile.open(mode='w:bz2', fileobj=binary, dereference=True)) as f:
- for pl, path in data:
- fname = os.path.basename(path)
- pl = pl.split('-')
- pl = pl[1] + '-' + pl[2]
- info[pl] = fname
- f.add(path, arcname=fname)
-
- binary = binary.getvalue()
- info['md5'] = hashlib.md5(binary).hexdigest()
-
- with open(output, 'w') as f:
- f.write(stub.format(info=info, data=base64.b64encode(binary)))
-
- os.chmod(output, 0775)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
-
- parser.add_argument('--path', action='append')
- parser.add_argument('--platform', action='append')
- parser.add_argument('--output', action='store')
-
- args = parser.parse_args()
-
- gen_ub(args.output, zip(args.platform, args.path))
diff --git a/build/scripts/gen_yql_python_udf.py b/build/scripts/gen_yql_python_udf.py
deleted file mode 100644
index 13b5898117..0000000000
--- a/build/scripts/gen_yql_python_udf.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import sys
-
-TEMPLATE="""
-#include <yql/udfs/common/python/python_udf/python_udf.h>
-
-#include <ydb/library/yql/public/udf/udf_registrator.h>
-
-#if @WITH_LIBRA@
-#include <yql/udfs/quality/libra/module/module.h>
-#endif
-
-using namespace NKikimr::NUdf;
-
-#ifdef BUILD_UDF
-
-#if @WITH_LIBRA@
-LIBRA_MODULE(TLibraModule, "Libra@MODULE_NAME@");
-#endif
-
-extern "C" UDF_API void Register(IRegistrator& registrator, ui32 flags) {
- RegisterYqlPythonUdf(registrator, flags, TStringBuf("@MODULE_NAME@"), TStringBuf("@PACKAGE_NAME@"), EPythonFlavor::@FLAVOR@);
-#if @WITH_LIBRA@
- RegisterHelper<TLibraModule>(registrator);
-#endif
-}
-
-extern "C" UDF_API ui32 AbiVersion() {
- return CurrentAbiVersion();
-}
-
-extern "C" UDF_API void SetBackTraceCallback(TBackTraceCallback callback) {
- SetBackTraceCallbackImpl(callback);
-}
-
-#endif
-"""
-
-
-def main():
- assert len(sys.argv) == 6
- flavor, module_name, package_name, path, libra_flag = sys.argv[1:]
- with open(path, 'w') as f:
- f.write(
- TEMPLATE
- .strip()
- .replace('@MODULE_NAME@', module_name)
- .replace('@PACKAGE_NAME@', package_name)
- .replace('@FLAVOR@', flavor)
- .replace('@WITH_LIBRA@', libra_flag)
- )
- f.write('\n')
-
-
-if __name__ == "__main__":
- main()
diff --git a/build/scripts/generate_mf.py b/build/scripts/generate_mf.py
deleted file mode 100644
index a44a969980..0000000000
--- a/build/scripts/generate_mf.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import json
-import logging
-import optparse
-import os
-import sys
-import io
-
-import process_command_files as pcf
-
-class BadMfError(Exception):
- pass
-
-
-class GplNotAllowed(Exception):
- pass
-
-
-def process_quotes(s):
- for quote_char in '\'"':
- if s.startswith(quote_char) and s.endswith(quote_char):
- return s[1:-1]
- return s
-
-
-def parse_args():
- args = pcf.get_args(sys.argv[1:])
- lics, peers, free_args, credits = [], [], [], []
- current_list = free_args
- for a in args:
- if a == '-Ya,lics':
- current_list = lics
- elif a == '-Ya,peers':
- current_list = peers
- elif a == '-Ya,credits':
- current_list = credits
- elif a and a.startswith('-'):
- current_list = free_args
- current_list.append(a)
- else:
- current_list.append(a)
-
- parser = optparse.OptionParser()
- parser.add_option('--build-root')
- parser.add_option('--module-name')
- parser.add_option('-o', '--output')
- parser.add_option('-c', '--credits-output')
- parser.add_option('-t', '--type')
- opts, _ = parser.parse_args(free_args)
- return lics, peers, credits, opts,
-
-
-def generate_header(meta):
- return '-' * 20 + meta.get('path', 'Unknown module') + '-' * 20
-
-
-def generate_mf():
- lics, peers, credits, options = parse_args()
-
- meta = {
- 'module_name': options.module_name,
- 'path': os.path.dirname(options.output),
- 'licenses': lics,
- 'dependencies': [],
- 'license_texts': ''
- }
-
- build_root = options.build_root
- file_name = os.path.join(build_root, options.output)
-
- if options.type != 'LIBRARY':
- for rel_filename in peers:
- with open(os.path.join(build_root, rel_filename + '.mf')) as peer_file:
- peer_meta = json.load(peer_file)
- meta['dependencies'].append(peer_meta)
-
- if credits:
- union_texts = []
- for texts_file in credits:
- with open(process_quotes(texts_file)) as f:
- union_texts.append(f.read())
- meta['license_texts'] = '\n\n'.join(union_texts)
-
- if options.credits_output:
- final_credits = []
- if meta['license_texts']:
- final_credits.append(generate_header(meta) + '\n' + meta['license_texts'])
- for peer in peers:
- candidate = os.path.join(build_root, peer + '.mf')
- with open(candidate) as src:
- data = json.loads(src.read())
- texts = data.get('license_texts')
- if texts:
- candidate_text = generate_header(data) + '\n' + texts
- if isinstance(candidate_text, unicode):
- candidate_text = candidate_text.encode('utf-8')
- final_credits.append(candidate_text)
-
- with io.open(options.credits_output, 'w', encoding='utf-8') as f:
- data = '\n\n'.join(final_credits)
- if isinstance(data, str):
- data = data.decode('utf-8')
- f.write(data)
-
- with open(file_name, 'w') as mf_file:
- json.dump(meta, mf_file, indent=4)
-
-
-if __name__ == '__main__':
- try:
- generate_mf()
- except Exception as e:
- sys.stderr.write(str(e) + '\n')
- sys.exit(1)
diff --git a/build/scripts/generate_pom.py b/build/scripts/generate_pom.py
deleted file mode 100644
index 200caebc0b..0000000000
--- a/build/scripts/generate_pom.py
+++ /dev/null
@@ -1,275 +0,0 @@
-import sys
-import xml.etree.ElementTree as et
-import argparse
-import os
-import json
-import base64
-import re
-
-
-DEFAULT_YANDEX_GROUP_ID = 'ru.yandex'
-DEFAULT_NAMESPACE = 'http://maven.apache.org/POM/4.0.0'
-XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
-SCHEMA_LOCATION = 'http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd'
-MODEL_VERSION = '4.0.0'
-
-MAVEN_PLUGIN_GROUP_ID = 'org.apache.maven.plugins'
-MAVEN_PLUGIN_ARTIFACT_ID = 'maven-compiler-plugin'
-MAVEN_PLUGIN_VERSION = '3.3'
-JAVA_LANGUAGE_LEVEL = '1.8'
-
-MAVEN_BUILD_HELPER_GROUP_ID = 'org.codehaus.mojo'
-MAVEN_BUILD_HELPER_ARTIFACT_ID = 'build-helper-maven-plugin'
-MAVEN_BUILD_HELPER_VERSION = '1.9.1'
-
-MAVEN_EXEC_GROUP_ID = 'org.codehaus.mojo'
-MAVEN_EXEC_ARTIFACT_ID = 'exec-maven-plugin'
-MAVEN_EXEC_VERSION = '1.5.0'
-
-MAVEN_SUREFIRE_GROUP_ID = 'org.apache.maven.plugins'
-MAVEN_SUREFIRE_ARTIFACT_ID = 'maven-surefire-plugin'
-MAVEN_SUREFIRE_VERSION = '2.12.2'
-
-
-def target_from_contrib(target_path):
- return target_path.startswith('contrib')
-
-
-def split_artifacts(s):
- m = re.match('^([^:]*:[^:]*:[^:]*:[^:]*)(.*)$', s)
- if not m or not m.groups():
- return []
- if not m.groups()[1].startswith('::'):
- return [m.groups()[0]]
- return [m.groups()[0]] + m.groups()[1].split('::')[1:]
-
-
-def build_pom_and_export_to_maven(**kwargs):
- target_path = kwargs.get('target_path')
- target = kwargs.get('target')
- pom_path = kwargs.get('pom_path')
- source_dirs = kwargs.get('source_dirs')
- output_dir = kwargs.get('output_dir')
- final_name = kwargs.get('final_name')
- packaging = kwargs.get('packaging')
- target_dependencies = kwargs.get('target_dependencies')
- test_target_dependencies = kwargs.get('test_target_dependencies')
- test_target_dependencies_exclude = kwargs.get('test_target_dependencies_exclude')
- modules_path = kwargs.get('modules_path')
- prop_vars = kwargs.get('properties')
- external_jars = kwargs.get('external_jars')
- resources = kwargs.get('resources')
- run_java_programs = [json.loads(base64.b64decode(i)) for i in kwargs.get('run_java_programs')]
- test_source_dirs = kwargs.get('test_source_dirs')
- test_resource_dirs = kwargs.get('test_resource_dirs')
-
- modules = []
-
- def _indent(elem, level=0):
- ind = "\n" + level * " "
- if len(elem):
- if not elem.text or not elem.text.strip():
- elem.text = ind + " "
- if not elem.tail or not elem.tail.strip():
- elem.tail = ind
- for elem in elem:
- _indent(elem, level + 1)
- if not elem.tail or not elem.tail.strip():
- elem.tail = ind
- else:
- if level and (not elem.tail or not elem.tail.strip()):
- elem.tail = ind
-
- project = et.Element(
- '{}{}{}project'.format('{', DEFAULT_NAMESPACE, '}'),
- attrib={'{}{}{}schemaLocation'.format('{', XSI_NAMESPACE, '}'): SCHEMA_LOCATION}
- )
-
- group_id, artifact_id, version = target.split(':')
-
- et.SubElement(project, 'modelVersion').text = MODEL_VERSION
- et.SubElement(project, 'groupId').text = group_id
- et.SubElement(project, 'artifactId').text = artifact_id
- et.SubElement(project, 'version').text = version
- et.SubElement(project, 'packaging').text = packaging
-
- properties = et.SubElement(project, 'properties')
- et.SubElement(properties, 'project.build.sourceEncoding').text = 'UTF-8'
-
- if prop_vars:
- for property, value in json.loads(base64.b64decode(prop_vars)).items():
- et.SubElement(properties, property).text = value
-
- if modules_path:
- with open(modules_path) as f:
- modules = [i.strip() for i in f if i.strip()]
-
- if modules:
- modules_el = et.SubElement(project, 'modules')
- for module in modules:
- et.SubElement(modules_el, 'module').text = module
-
- build = et.SubElement(project, 'build')
- if source_dirs:
- et.SubElement(build, 'sourceDirectory').text = source_dirs[0]
- source_dirs = source_dirs[1:]
- if test_source_dirs:
- et.SubElement(build, 'testSourceDirectory').text = test_source_dirs[0]
- test_source_dirs = test_source_dirs[1:]
- if output_dir:
- et.SubElement(build, 'outputDirectory').text = output_dir
- if final_name:
- et.SubElement(build, 'finalName').text = final_name
- if resources:
- resource_element = et.SubElement(et.SubElement(build, 'resources'), 'resource')
- et.SubElement(resource_element, 'directory').text = '${basedir}'
- includes = et.SubElement(resource_element, 'includes')
- for resource in resources:
- et.SubElement(includes, 'include').text = resource
- if test_resource_dirs:
- test_resource_element = et.SubElement(build, 'testResources')
- for test_resource_dir in test_resource_dirs:
- et.SubElement(et.SubElement(test_resource_element, 'testResource'), 'directory').text = '${basedir}' + (('/' + test_resource_dir) if test_resource_dir != '.' else '')
-
- plugins = et.SubElement(build, 'plugins')
-
- if packaging != 'pom':
- maven_plugin = et.SubElement(plugins, 'plugin')
- et.SubElement(maven_plugin, 'groupId').text = MAVEN_PLUGIN_GROUP_ID
- et.SubElement(maven_plugin, 'artifactId').text = MAVEN_PLUGIN_ARTIFACT_ID
- et.SubElement(maven_plugin, 'version').text = MAVEN_PLUGIN_VERSION
- configuration = et.SubElement(maven_plugin, 'configuration')
- et.SubElement(configuration, 'source').text = JAVA_LANGUAGE_LEVEL
- et.SubElement(configuration, 'target').text = JAVA_LANGUAGE_LEVEL
-
- if source_dirs or external_jars or test_source_dirs:
- build_helper_plugin = et.SubElement(plugins, 'plugin')
- et.SubElement(build_helper_plugin, 'groupId').text = MAVEN_BUILD_HELPER_GROUP_ID
- et.SubElement(build_helper_plugin, 'artifactId').text = MAVEN_BUILD_HELPER_ARTIFACT_ID
- et.SubElement(build_helper_plugin, 'version').text = MAVEN_BUILD_HELPER_VERSION
- executions = et.SubElement(build_helper_plugin, 'executions')
- if source_dirs:
- execution = et.SubElement(executions, 'execution')
- et.SubElement(execution, 'id').text = 'add-source'
- et.SubElement(execution, 'phase').text = 'generate-sources'
- et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'add-source'
- sources = et.SubElement(et.SubElement(execution, 'configuration'), 'sources')
- for source_dir in source_dirs:
- et.SubElement(sources, 'source').text = source_dir
- if external_jars:
- execution = et.SubElement(executions, 'execution')
- et.SubElement(execution, 'id').text = 'attach-artifacts'
- et.SubElement(execution, 'phase').text = 'generate-sources'
- et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'attach-artifact'
- artifacts = et.SubElement(et.SubElement(execution, 'configuration'), 'artifacts')
- for external_jar in external_jars:
- external_artifact = et.SubElement(artifacts, 'artifact')
- et.SubElement(external_artifact, 'file').text = '${basedir}/' + external_jar
- et.SubElement(external_artifact, 'type').text = 'jar'
- if test_source_dirs:
- execution = et.SubElement(executions, 'execution')
- et.SubElement(execution, 'id').text = 'add-test-source'
- et.SubElement(execution, 'phase').text = 'generate-test-sources'
- et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'add-test-source'
- sources = et.SubElement(et.SubElement(execution, 'configuration'), 'sources')
- for source_dir in source_dirs:
- et.SubElement(sources, 'source').text = source_dir
-
- if run_java_programs:
- exec_plugin = et.SubElement(plugins, 'plugin')
- et.SubElement(exec_plugin, 'groupId').text = MAVEN_EXEC_GROUP_ID
- et.SubElement(exec_plugin, 'artifactId').text = MAVEN_EXEC_ARTIFACT_ID
- et.SubElement(exec_plugin, 'version').text = MAVEN_EXEC_VERSION
- jp_dependencies = et.SubElement(exec_plugin, 'dependencies')
- executions = et.SubElement(exec_plugin, 'executions')
- for java_program in run_java_programs:
- execution = et.SubElement(executions, 'execution')
- et.SubElement(execution, 'phase').text = 'generate-sources'
- et.SubElement(et.SubElement(execution, 'goals'), 'goal').text = 'java'
- jp_configuration = et.SubElement(execution, 'configuration')
- main_cls, args = None, []
- for word in java_program['cmd']:
- if not main_cls and not word.startswith('-'):
- main_cls = word
- else:
- args.append(word)
- et.SubElement(jp_configuration, 'mainClass').text = main_cls
- et.SubElement(jp_configuration, 'includePluginDependencies').text = 'true'
- et.SubElement(jp_configuration, 'includeProjectDependencies').text = 'false'
- if args:
- jp_arguments = et.SubElement(jp_configuration, 'arguments')
- for arg in args:
- et.SubElement(jp_arguments, 'argument').text = arg
- if java_program['deps']:
- for jp_dep in java_program['deps']:
- jp_dependency = et.SubElement(jp_dependencies, 'dependency')
- jp_g, jp_a, jp_v = jp_dep.split(':')
- et.SubElement(jp_dependency, 'groupId').text = jp_g
- et.SubElement(jp_dependency, 'artifactId').text = jp_a
- et.SubElement(jp_dependency, 'version').text = jp_v
- et.SubElement(jp_dependency, 'type').text = 'jar'
-
- if target_dependencies + test_target_dependencies:
- dependencies = et.SubElement(project, 'dependencies')
- for target_dependency in target_dependencies + test_target_dependencies:
- dependency = et.SubElement(dependencies, 'dependency')
- dependency_info = split_artifacts(target_dependency)
-
- group_id, artifact_id, version, classifier = dependency_info[0].split(':')
-
- et.SubElement(dependency, 'groupId').text = group_id
- et.SubElement(dependency, 'artifactId').text = artifact_id
- et.SubElement(dependency, 'version').text = version
- if classifier:
- et.SubElement(dependency, 'classifier').text = classifier
- if target_dependency in test_target_dependencies:
- et.SubElement(dependency, 'scope').text = 'test'
-
- if len(dependency_info) > 1:
- exclusions = et.SubElement(dependency, 'exclusions')
- for exclude in dependency_info[1:]:
- group_id, artifact_id = exclude.split(':')
- exclusion_el = et.SubElement(exclusions, 'exclusion')
- et.SubElement(exclusion_el, 'groupId').text = group_id
- et.SubElement(exclusion_el, 'artifactId').text = artifact_id
-
- if test_target_dependencies_exclude:
- surefire_plugin = et.SubElement(plugins, 'plugin')
- et.SubElement(surefire_plugin, 'groupId').text = MAVEN_SUREFIRE_GROUP_ID
- et.SubElement(surefire_plugin, 'artifactId').text = MAVEN_SUREFIRE_ARTIFACT_ID
- et.SubElement(surefire_plugin, 'version').text = MAVEN_SUREFIRE_VERSION
- classpath_excludes = et.SubElement(et.SubElement(surefire_plugin, 'configuration'), 'classpathDependencyExcludes')
- for classpath_exclude in test_target_dependencies_exclude:
- et.SubElement(classpath_excludes, 'classpathDependencyExclude').text = classpath_exclude
-
- et.register_namespace('', DEFAULT_NAMESPACE)
- et.register_namespace('xsi', XSI_NAMESPACE)
-
- _indent(project)
-
- et.ElementTree(project).write(pom_path)
- sys.stderr.write("[MAVEN EXPORT] Generated {} file for target {}\n".format(os.path.basename(pom_path), target_path))
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument('--target-path', action='store', default='')
- parser.add_argument('--target', action='store')
- parser.add_argument('--pom-path', action='store')
- parser.add_argument('--source-dirs', action='append', default=[])
- parser.add_argument('--external-jars', action='append', default=[])
- parser.add_argument('--resources', action='append', default=[])
- parser.add_argument('--run-java-programs', action='append', default=[])
- parser.add_argument('--output-dir')
- parser.add_argument('--final-name')
- parser.add_argument('--packaging', default='jar')
- parser.add_argument('--target-dependencies', action='append', default=[])
- parser.add_argument('--test-target-dependencies', action='append', default=[])
- parser.add_argument('--test-target-dependencies-exclude', action='append', default=[])
- parser.add_argument('--modules-path', action='store')
- parser.add_argument('--properties')
- parser.add_argument('--test-source-dirs', action='append', default=[])
- parser.add_argument('--test-resource-dirs', action='append', default=[])
- args = parser.parse_args()
-
- build_pom_and_export_to_maven(**vars(args))
diff --git a/build/scripts/go_fake_include/go_asm.h b/build/scripts/go_fake_include/go_asm.h
deleted file mode 100644
index e69de29bb2..0000000000
--- a/build/scripts/go_fake_include/go_asm.h
+++ /dev/null
diff --git a/build/scripts/go_proto_wrapper.py b/build/scripts/go_proto_wrapper.py
deleted file mode 100644
index 065120b6eb..0000000000
--- a/build/scripts/go_proto_wrapper.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from __future__ import absolute_import
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-from six.moves import range
-
-
-OUT_DIR_FLAG_PATTERN = re.compile(r'^(--go(([-_]\w+))*_out=)')
-
-
-def move_tree(src_root, dst_root):
- for root, _, files in os.walk(src_root):
- rel_dir = os.path.relpath(root, src_root)
- dst_dir = os.path.join(dst_root, rel_dir)
- if not os.path.exists(dst_dir):
- os.mkdir(dst_dir)
- for file in files:
- os.rename(os.path.join(root, file), os.path.join(dst_dir, file))
-
-
-def main(arcadia_prefix, contrib_prefix, proto_namespace, args):
- out_dir_orig = None
- out_dir_temp = None
- for i in range(len(args)):
- m = re.match(OUT_DIR_FLAG_PATTERN, args[i])
- if m:
- out_dir_flag = m.group(1)
- index = max(len(out_dir_flag), args[i].rfind(':')+1)
- out_dir = args[i][index:]
- if out_dir_orig:
- assert out_dir_orig == out_dir, 'Output directories do not match: [{}] and [{}]'.format(out_dir_orig, out_dir)
- else:
- out_dir_orig = out_dir
- out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig)
- args[i] = (args[i][:index] + out_dir_temp).replace('|', ',')
- assert out_dir_temp is not None, 'Output directory is not specified'
-
- try:
- subprocess.check_output(args, stdin=None, stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError as e:
- sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output))
- return e.returncode
-
- # All Arcadia GO projects should have 'a.yandex-team.ru/' namespace prefix.
- # If the namespace doesn't start with 'a.yandex-team.ru/' prefix then this
- # project is from vendor directory under the root of Arcadia.
- out_dir_src = os.path.normpath(os.path.join(out_dir_temp, arcadia_prefix, proto_namespace))
- out_dir_dst = out_dir_orig
- is_from_contrib = False
- if not os.path.isdir(out_dir_src):
- is_from_contrib = True
- out_dir_src = out_dir_temp
- out_dir_dst = os.path.join(out_dir_orig, contrib_prefix)
-
- if not os.path.exists(out_dir_src) or is_from_contrib:
- protos = [x for x in args if x.endswith('.proto')]
- if not is_from_contrib or not all(x.startswith(contrib_prefix) for x in protos):
- proto_list = []
- option_re = re.compile(r'^\s*option\s+go_package\s*=\s*')
- for arg in protos:
- with open(arg, 'r') as f:
- if not any([re.match(option_re, line) for line in f]):
- proto_list.append(arg)
- if proto_list:
- sys.stderr.write(
- '\nError: Option go_package is not specified in the following proto files: {}\n'
- '\nNOTE! You can find detailed description of how to properly set go_package '
- 'option here https://wiki.yandex-team.ru/devrules/Go/#protobufigrpc'.format(', '.join(proto_list)))
- return 1
-
- move_tree(out_dir_src, out_dir_dst)
-
- shutil.rmtree(out_dir_temp)
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main(os.path.normpath(sys.argv[1]), os.path.normpath(sys.argv[2]), os.path.normpath(sys.argv[3]), sys.argv[4:]))
diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py
deleted file mode 100644
index 7aa2d700e1..0000000000
--- a/build/scripts/go_tool.py
+++ /dev/null
@@ -1,873 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-import argparse
-import codecs
-import copy
-import json
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-import threading
-import six
-from functools import reduce
-
-import process_command_files as pcf
-import process_whole_archive_option as pwa
-
-arc_project_prefix = 'a.yandex-team.ru/'
-std_lib_prefix = 'contrib/go/_std_1.18/src/'
-vendor_prefix = 'vendor/'
-vet_info_ext = '.vet.out'
-vet_report_ext = '.vet.txt'
-
-FIXED_CGO1_SUFFIX='.fixed.cgo1.go'
-
-COMPILE_OPTIMIZATION_FLAGS=('-N',)
-
-
-def get_trimpath_args(args):
- return ['-trimpath', args.trimpath] if args.trimpath else []
-
-
-def preprocess_cgo1(src_path, dst_path, source_root):
- with open(src_path, 'r') as f:
- content = f.read()
- content = content.replace('__ARCADIA_SOURCE_ROOT_PREFIX__', source_root)
- with open(dst_path, 'w') as f:
- f.write(content)
-
-
-def preprocess_args(args):
- # Temporary work around for noauto
- if args.cgo_srcs and len(args.cgo_srcs) > 0:
- cgo_srcs_set = set(args.cgo_srcs)
- args.srcs = [x for x in args.srcs if x not in cgo_srcs_set]
-
- args.pkg_root = os.path.join(args.toolchain_root, 'pkg')
- toolchain_tool_root = os.path.join(args.pkg_root, 'tool', '{}_{}'.format(args.host_os, args.host_arch))
- args.go_compile = os.path.join(toolchain_tool_root, 'compile')
- args.go_cgo = os.path.join(toolchain_tool_root, 'cgo')
- args.go_link = os.path.join(toolchain_tool_root, 'link')
- args.go_asm = os.path.join(toolchain_tool_root, 'asm')
- args.go_pack = os.path.join(toolchain_tool_root, 'pack')
- args.go_vet = os.path.join(toolchain_tool_root, 'vet') if args.vet is True else args.vet
- args.output = os.path.normpath(args.output)
- args.vet_report_output = vet_report_output_name(args.output, args.vet_report_ext)
- args.trimpath = None
- if args.debug_root_map:
- roots = {'build': args.build_root, 'source': args.source_root, 'tools': args.tools_root}
- replaces = []
- for root in args.debug_root_map.split(';'):
- src, dst = root.split('=', 1)
- assert src in roots
- replaces.append('{}=>{}'.format(roots[src], dst))
- del roots[src]
- assert len(replaces) > 0
- args.trimpath = ';'.join(replaces)
- args.build_root = os.path.normpath(args.build_root)
- args.build_root_dir = args.build_root + os.path.sep
- args.source_root = os.path.normpath(args.source_root)
- args.source_root_dir = args.source_root + os.path.sep
- args.output_root = os.path.normpath(args.output_root)
- args.import_map = {}
- args.module_map = {}
- if args.cgo_peers:
- args.cgo_peers = [x for x in args.cgo_peers if not x.endswith('.fake.pkg')]
-
- srcs = []
- for f in args.srcs:
- if f.endswith('.gosrc'):
- with tarfile.open(f, 'r') as tar:
- srcs.extend(os.path.join(args.output_root, src) for src in tar.getnames())
- tar.extractall(path=args.output_root)
- else:
- srcs.append(f)
- args.srcs = srcs
-
- assert args.mode == 'test' or args.test_srcs is None and args.xtest_srcs is None
- # add lexical oreder by basename for go sources
- args.srcs.sort(key=lambda x: os.path.basename(x))
- if args.test_srcs:
- args.srcs += sorted(args.test_srcs, key=lambda x: os.path.basename(x))
- del args.test_srcs
- if args.xtest_srcs:
- args.xtest_srcs.sort(key=lambda x: os.path.basename(x))
-
- # compute root relative module dir path
- assert args.output is None or args.output_root == os.path.dirname(args.output)
- assert args.output_root.startswith(args.build_root_dir)
- args.module_path = args.output_root[len(args.build_root_dir):]
- args.source_module_dir = os.path.join(args.source_root, args.test_import_path or args.module_path) + os.path.sep
- assert len(args.module_path) > 0
- args.import_path, args.is_std = get_import_path(args.module_path)
-
- assert args.asmhdr is None or args.word == 'go'
-
- srcs = []
- for f in args.srcs:
- if f.endswith(FIXED_CGO1_SUFFIX) and f.startswith(args.build_root_dir):
- path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[:-len(FIXED_CGO1_SUFFIX)])))
- srcs.append(path)
- preprocess_cgo1(f, path, args.source_root)
- else:
- srcs.append(f)
- args.srcs = srcs
-
- if args.extldflags:
- args.extldflags = pwa.ProcessWholeArchiveOption(args.targ_os).construct_cmd(args.extldflags)
-
- classify_srcs(args.srcs, args)
-
-
-def compare_versions(version1, version2):
- def last_index(version):
- index = version.find('beta')
- return len(version) if index < 0 else index
-
- v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.'))
- v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.'))
- if v1 == v2:
- return 0
- return 1 if v1 < v2 else -1
-
-
-def get_symlink_or_copyfile():
- os_symlink = getattr(os, 'symlink', None)
- if os_symlink is None:
- os_symlink = shutil.copyfile
- return os_symlink
-
-
-def copy_args(args):
- return copy.copy(args)
-
-
-def get_vendor_index(import_path):
- index = import_path.rfind('/' + vendor_prefix)
- if index < 0:
- index = 0 if import_path.startswith(vendor_prefix) else index
- else:
- index = index + 1
- return index
-
-
-def get_import_path(module_path):
- assert len(module_path) > 0
- import_path = module_path.replace('\\', '/')
- is_std_module = import_path.startswith(std_lib_prefix)
- if is_std_module:
- import_path = import_path[len(std_lib_prefix):]
- elif import_path.startswith(vendor_prefix):
- import_path = import_path[len(vendor_prefix):]
- else:
- import_path = arc_project_prefix + import_path
- assert len(import_path) > 0
- return import_path, is_std_module
-
-
-def call(cmd, cwd, env=None):
- # sys.stderr.write('{}\n'.format(' '.join(cmd)))
- return subprocess.check_output(cmd, stdin=None, stderr=subprocess.STDOUT, cwd=cwd, env=env)
-
-
-def classify_srcs(srcs, args):
- args.go_srcs = [x for x in srcs if x.endswith('.go')]
- args.asm_srcs = [x for x in srcs if x.endswith('.s')]
- args.objects = [x for x in srcs if x.endswith('.o') or x.endswith('.obj')]
- args.symabis = [x for x in srcs if x.endswith('.symabis')]
- args.sysos = [x for x in srcs if x.endswith('.syso')]
-
-
-def get_import_config_info(peers, gen_importmap, import_map={}, module_map={}):
- info = {'importmap': [], 'packagefile': [], 'standard': {}}
- if gen_importmap:
- for key, value in six.iteritems(import_map):
- info['importmap'].append((key, value))
- for peer in peers:
- peer_import_path, is_std = get_import_path(os.path.dirname(peer))
- if gen_importmap:
- index = get_vendor_index(peer_import_path)
- if index >= 0:
- index += len(vendor_prefix)
- info['importmap'].append((peer_import_path[index:], peer_import_path))
- info['packagefile'].append((peer_import_path, os.path.join(args.build_root, peer)))
- if is_std:
- info['standard'][peer_import_path] = True
- for key, value in six.iteritems(module_map):
- info['packagefile'].append((key, value))
- return info
-
-
-def create_import_config(peers, gen_importmap, import_map={}, module_map={}):
- lines = []
- info = get_import_config_info(peers, gen_importmap, import_map, module_map)
- for key in ('importmap', 'packagefile'):
- for item in info[key]:
- lines.append('{} {}={}'.format(key, *item))
- if len(lines) > 0:
- lines.append('')
- content = '\n'.join(lines)
- # sys.stderr.writelines('{}\n'.format(l) for l in lines)
- with tempfile.NamedTemporaryFile(delete=False) as f:
- f.write(content.encode('UTF-8'))
- return f.name
- return None
-
-
-def create_embed_config(args):
- data = {
- 'Patterns': {},
- 'Files': {},
- }
- for info in args.embed:
- pattern = info[0]
- if pattern.endswith('/**/*'):
- pattern = pattern[:-3]
- files = {os.path.relpath(f, args.source_module_dir).replace('\\', '/'): f for f in info[1:]}
- data['Patterns'][pattern] = list(files.keys())
- data['Files'].update(files)
- # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4)))
- with tempfile.NamedTemporaryFile(delete=False, suffix='.embedcfg') as f:
- f.write(json.dumps(data).encode('UTF-8'))
- return f.name
-
-
-def vet_info_output_name(path, ext=None):
- return '{}{}'.format(path, ext or vet_info_ext)
-
-
-def vet_report_output_name(path, ext=None):
- return '{}{}'.format(path, ext or vet_report_ext)
-
-
-def get_source_path(args):
- return args.test_import_path or args.module_path
-
-
-def gen_vet_info(args):
- import_path = args.real_import_path if hasattr(args, 'real_import_path') else args.import_path
- info = get_import_config_info(args.peers, True, args.import_map, args.module_map)
-
- import_map = dict(info['importmap'])
- # FIXME(snermolaev): it seems that adding import map for 'fake' package
- # does't make any harm (it needs to be revised later)
- import_map['unsafe'] = 'unsafe'
-
- for (key, _) in info['packagefile']:
- if key not in import_map:
- import_map[key] = key
-
- data = {
- 'ID': import_path,
- 'Compiler': 'gc',
- 'Dir': os.path.join(args.source_root, get_source_path(args)),
- 'ImportPath': import_path,
- 'GoFiles': [x for x in args.go_srcs if x.endswith('.go')],
- 'NonGoFiles': [x for x in args.go_srcs if not x.endswith('.go')],
- 'ImportMap': import_map,
- 'PackageFile': dict(info['packagefile']),
- 'Standard': dict(info['standard']),
- 'PackageVetx': dict((key, vet_info_output_name(value)) for key, value in info['packagefile']),
- 'VetxOnly': False,
- 'VetxOutput': vet_info_output_name(args.output),
- 'SucceedOnTypecheckFailure': False
- }
- # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4)))
- return data
-
-
-def create_vet_config(args, info):
- with tempfile.NamedTemporaryFile(delete=False, suffix='.cfg') as f:
- f.write(json.dumps(info).encode('UTF-8'))
- return f.name
-
-
-def decode_vet_report(json_report):
- report = ''
- if json_report:
- try:
- full_diags = json.JSONDecoder().decode(json_report.decode('UTF-8'))
- except ValueError:
- report = json_report
- else:
- messages = []
- for _, module_diags in six.iteritems(full_diags):
- for _, type_diags in six.iteritems(module_diags):
- for diag in type_diags:
- messages.append('{}: {}'.format(diag['posn'], json.dumps(diag['message'])))
- report = '\n'.join(messages)
-
- return report
-
-
-def dump_vet_report(args, report):
- if report:
- report = report.replace(args.build_root, '$B')
- report = report.replace(args.source_root, '$S')
- with open(args.vet_report_output, 'w') as f:
- f.write(report)
-
-
-def read_vet_report(args):
- assert args
- report = ''
- if os.path.exists(args.vet_report_output):
- with open(args.vet_report_output, 'r') as f:
- report += f.read()
- return report
-
-
-def dump_vet_report_for_tests(args, *test_args_list):
- dump_vet_report(args, reduce(lambda x, y: x + read_vet_report(y), [_f for _f in test_args_list if _f], ''))
-
-
-def do_vet(args):
- assert args.vet
- info = gen_vet_info(args)
- vet_config = create_vet_config(args, info)
- cmd = [args.go_vet, '-json']
- if args.vet_flags:
- cmd.extend(args.vet_flags)
- cmd.append(vet_config)
- # sys.stderr.write('>>>> [{}]\n'.format(' '.join(cmd)))
- p_vet = subprocess.Popen(cmd, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=args.source_root)
- vet_out, vet_err = p_vet.communicate()
- report = decode_vet_report(vet_out) if vet_out else ''
- dump_vet_report(args, report)
- if p_vet.returncode:
- raise subprocess.CalledProcessError(returncode=p_vet.returncode, cmd=cmd, output=vet_err)
-
-
-def _do_compile_go(args):
- import_path, is_std_module = args.import_path, args.is_std
- cmd = [
- args.go_compile,
- '-o',
- args.output,
- '-p',
- import_path,
- '-D',
- '""',
- '-goversion',
- 'go{}'.format(args.goversion)
- ]
- if args.lang:
- cmd.append('-lang=go{}'.format(args.lang))
- cmd.extend(get_trimpath_args(args))
- compiling_runtime = False
- if is_std_module:
- cmd.append('-std')
- if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith('runtime/internal/'):
- cmd.append('-+')
- compiling_runtime = True
- import_config_name = create_import_config(args.peers, True, args.import_map, args.module_map)
- if import_config_name:
- cmd += ['-importcfg', import_config_name]
- else:
- if import_path == 'unsafe' or len(args.objects) > 0 or args.asmhdr:
- pass
- else:
- cmd.append('-complete')
- # if compare_versions('1.16', args.goversion) >= 0:
- if args.embed:
- embed_config_name = create_embed_config(args)
- cmd.extend(['-embedcfg', embed_config_name])
- if args.asmhdr:
- cmd += ['-asmhdr', args.asmhdr]
- # Use .symabis (starting from 1.12 version)
- if args.symabis:
- cmd += ['-symabis'] + args.symabis
- # If 1.12 <= version < 1.13 we have to pass -allabis for 'runtime' and 'runtime/internal/atomic'
- # if compare_versions('1.13', args.goversion) >= 0:
- # pass
- # elif import_path in ('runtime', 'runtime/internal/atomic'):
- # cmd.append('-allabis')
- compile_workers = '4'
- if args.compile_flags:
- if compiling_runtime:
- cmd.extend(x for x in args.compile_flags if x not in COMPILE_OPTIMIZATION_FLAGS)
- else:
- cmd.extend(args.compile_flags)
- if any([x in ('-race', '-shared') for x in args.compile_flags]):
- compile_workers = '1'
- cmd += ['-pack', '-c={}'.format(compile_workers)]
- cmd += args.go_srcs
- call(cmd, args.build_root)
-
-
-class VetThread(threading.Thread):
-
- def __init__(self, target, args):
- super(VetThread, self).__init__(target=target, args=args)
- self.exc_info = None
-
- def run(self):
- try:
- super(VetThread, self).run()
- except:
- self.exc_info = sys.exc_info()
-
- def join_with_exception(self, reraise_exception):
- self.join()
- if reraise_exception and self.exc_info:
- six.reraise(self.exc_info[0], self.exc_info[1], self.exc_info[2])
-
-
-def do_compile_go(args):
- raise_exception_from_vet = False
- if args.vet:
- run_vet = VetThread(target=do_vet, args=(args,))
- run_vet.start()
- try:
- _do_compile_go(args)
- raise_exception_from_vet = True
- finally:
- if args.vet:
- run_vet.join_with_exception(raise_exception_from_vet)
-
-
-def do_compile_asm(args):
- def need_compiling_runtime(import_path):
- return import_path in ('runtime', 'reflect', 'syscall') or \
- import_path.startswith('runtime/internal/') or \
- compare_versions('1.17', args.goversion) >= 0 and import_path == 'internal/bytealg'
-
- assert(len(args.srcs) == 1 and len(args.asm_srcs) == 1)
- cmd = [args.go_asm]
- cmd += get_trimpath_args(args)
- cmd += ['-I', args.output_root, '-I', os.path.join(args.pkg_root, 'include')]
- cmd += ['-D', 'GOOS_' + args.targ_os, '-D', 'GOARCH_' + args.targ_arch, '-o', args.output]
-
- # if compare_versions('1.16', args.goversion) >= 0:
- cmd += ['-p', args.import_path]
- if need_compiling_runtime(args.import_path):
- cmd += ['-compiling-runtime']
-
- if args.asm_flags:
- cmd += args.asm_flags
- cmd += args.asm_srcs
- call(cmd, args.build_root)
-
-
-def do_link_lib(args):
- if len(args.asm_srcs) > 0:
- asmargs = copy_args(args)
- asmargs.asmhdr = os.path.join(asmargs.output_root, 'go_asm.h')
- do_compile_go(asmargs)
- for src in asmargs.asm_srcs:
- asmargs.srcs = [src]
- asmargs.asm_srcs = [src]
- asmargs.output = os.path.join(asmargs.output_root, os.path.basename(src) + '.o')
- do_compile_asm(asmargs)
- args.objects.append(asmargs.output)
- else:
- do_compile_go(args)
- if args.objects or args.sysos:
- cmd = [args.go_pack, 'r', args.output] + args.objects + args.sysos
- call(cmd, args.build_root)
-
-
-def do_link_exe(args):
- assert args.extld is not None
- assert args.non_local_peers is not None
- compile_args = copy_args(args)
- compile_args.output = os.path.join(args.output_root, 'main.a')
- compile_args.real_import_path = compile_args.import_path
- compile_args.import_path = 'main'
-
- if args.vcs and os.path.isfile(compile_args.vcs):
- build_info = os.path.join('library', 'go', 'core', 'buildinfo')
- if any([x.startswith(build_info) for x in compile_args.peers]):
- compile_args.go_srcs.append(compile_args.vcs)
-
- do_link_lib(compile_args)
- cmd = [args.go_link, '-o', args.output]
- import_config_name = create_import_config(args.peers + args.non_local_peers, False, args.import_map, args.module_map)
- if import_config_name:
- cmd += ['-importcfg', import_config_name]
- if args.link_flags:
- cmd += args.link_flags
-
- if args.mode in ('exe', 'test'):
- cmd.append('-buildmode=exe')
- elif args.mode == 'dll':
- cmd.append('-buildmode=c-shared')
- else:
- assert False, 'Unexpected mode: {}'.format(args.mode)
- cmd.append('-extld={}'.format(args.extld))
-
- extldflags = []
- if args.extldflags is not None:
- filter_musl = bool
- if args.musl:
- cmd.append('-linkmode=external')
- extldflags.append('-static')
- filter_musl = lambda x: x not in ('-lc', '-ldl', '-lm', '-lpthread', '-lrt')
- extldflags += [x for x in args.extldflags if filter_musl(x)]
- cgo_peers = []
- if args.cgo_peers is not None and len(args.cgo_peers) > 0:
- is_group = args.targ_os == 'linux'
- if is_group:
- cgo_peers.append('-Wl,--start-group')
- cgo_peers.extend(args.cgo_peers)
- if is_group:
- cgo_peers.append('-Wl,--end-group')
- try:
- index = extldflags.index('--cgo-peers')
- extldflags = extldflags[:index] + cgo_peers + extldflags[index+1:]
- except ValueError:
- extldflags.extend(cgo_peers)
- if len(extldflags) > 0:
- cmd.append('-extldflags={}'.format(' '.join(extldflags)))
- cmd.append(compile_args.output)
- call(cmd, args.build_root)
-
-
-def gen_cover_info(args):
- lines = []
- lines.extend([
- """
-var (
- coverCounters = make(map[string][]uint32)
- coverBlocks = make(map[string][]testing.CoverBlock)
-)
- """,
- 'func init() {',
- ])
- for var, file in (x.split(':') for x in args.cover_info):
- lines.append(' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format(file=file, var=var))
- lines.extend([
- '}',
- """
-func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts []uint16) {
- if 3*len(counter) != len(pos) || len(counter) != len(numStmts) {
- panic("coverage: mismatched sizes")
- }
- if coverCounters[fileName] != nil {
- // Already registered.
- return
- }
- coverCounters[fileName] = counter
- block := make([]testing.CoverBlock, len(counter))
- for i := range counter {
- block[i] = testing.CoverBlock{
- Line0: pos[3*i+0],
- Col0: uint16(pos[3*i+2]),
- Line1: pos[3*i+1],
- Col1: uint16(pos[3*i+2]>>16),
- Stmts: numStmts[i],
- }
- }
- coverBlocks[fileName] = block
-}
- """,
- ])
- return lines
-
-
-def filter_out_skip_tests(tests, skip_tests):
- skip_set = set()
- star_skip_set = set()
- for t in skip_tests:
- work_set = star_skip_set if '*' in t else skip_set
- work_set.add(t)
-
- re_star_tests = None
- if len(star_skip_set) > 0:
- re_star_tests = re.compile(re.sub(r'(\*)+', r'.\1', '^({})$'.format('|'.join(star_skip_set))))
-
- return [x for x in tests if not (x in skip_tests or re_star_tests and re_star_tests.match(x))]
-
-
-def gen_test_main(args, test_lib_args, xtest_lib_args):
- assert args and (test_lib_args or xtest_lib_args)
- test_miner = args.test_miner
- test_module_path = test_lib_args.import_path if test_lib_args else xtest_lib_args.import_path
- is_cover = args.cover_info and len(args.cover_info) > 0
-
- # Prepare GOPATH
- # $BINDIR
- # |- __go__
- # |- src
- # |- pkg
- # |- ${TARGET_OS}_${TARGET_ARCH}
- go_path_root = os.path.join(args.output_root, '__go__')
- test_src_dir = os.path.join(go_path_root, 'src')
- target_os_arch = '_'.join([args.targ_os, args.targ_arch])
- test_pkg_dir = os.path.join(go_path_root, 'pkg', target_os_arch, os.path.dirname(test_module_path))
- os.makedirs(test_pkg_dir)
-
- my_env = os.environ.copy()
- my_env['GOROOT'] = ''
- my_env['GOPATH'] = go_path_root
- my_env['GOARCH'] = args.targ_arch
- my_env['GOOS'] = args.targ_os
-
- tests = []
- xtests = []
- os_symlink = get_symlink_or_copyfile()
-
- # Get the list of "internal" tests
- if test_lib_args:
- os.makedirs(os.path.join(test_src_dir, test_module_path))
- os_symlink(test_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(test_module_path) + '.a'))
- cmd = [test_miner, '-benchmarks', '-tests', test_module_path]
- tests = [x for x in (call(cmd, test_lib_args.output_root, my_env).decode('UTF-8') or '').strip().split('\n') if len(x) > 0]
- if args.skip_tests:
- tests = filter_out_skip_tests(tests, args.skip_tests)
- test_main_found = '#TestMain' in tests
-
- # Get the list of "external" tests
- if xtest_lib_args:
- xtest_module_path = xtest_lib_args.import_path
- os.makedirs(os.path.join(test_src_dir, xtest_module_path))
- os_symlink(xtest_lib_args.output, os.path.join(test_pkg_dir, os.path.basename(xtest_module_path) + '.a'))
- cmd = [test_miner, '-benchmarks', '-tests', xtest_module_path]
- xtests = [x for x in (call(cmd, xtest_lib_args.output_root, my_env).decode('UTF-8') or '').strip().split('\n') if len(x) > 0]
- if args.skip_tests:
- xtests = filter_out_skip_tests(xtests, args.skip_tests)
- xtest_main_found = '#TestMain' in xtests
-
- test_main_package = None
- if test_main_found and xtest_main_found:
- assert False, 'multiple definition of TestMain'
- elif test_main_found:
- test_main_package = '_test'
- elif xtest_main_found:
- test_main_package = '_xtest'
-
- shutil.rmtree(go_path_root)
-
- lines = ['package main', '', 'import (']
- if test_main_package is None:
- lines.append(' "os"')
- lines.extend([' "testing"', ' "testing/internal/testdeps"'])
-
- if len(tests) > 0:
- lines.append(' _test "{}"'.format(test_module_path))
- elif test_lib_args:
- lines.append(' _ "{}"'.format(test_module_path))
-
- if len(xtests) > 0:
- lines.append(' _xtest "{}"'.format(xtest_module_path))
- elif xtest_lib_args:
- lines.append(' _ "{}"'.format(xtest_module_path))
-
- if is_cover:
- lines.append(' _cover0 "{}"'.format(test_module_path))
- lines.extend([')', ''])
-
- if compare_versions('1.18', args.goversion) < 0:
- kinds = ['Test', 'Benchmark', 'Example']
- else:
- kinds = ['Test', 'Benchmark', 'FuzzTarget', 'Example']
-
- var_names = []
- for kind in kinds:
- var_name = '{}s'.format(kind.lower())
- var_names.append(var_name)
- lines.append('var {} = []testing.Internal{}{{'.format(var_name, kind))
- for test in [x for x in tests if x.startswith(kind)]:
- lines.append(' {{"{test}", _test.{test}}},'.format(test=test))
- for test in [x for x in xtests if x.startswith(kind)]:
- lines.append(' {{"{test}", _xtest.{test}}},'.format(test=test))
- lines.extend(['}', ''])
-
- if is_cover:
- lines.extend(gen_cover_info(args))
-
- lines.append('func main() {')
- if is_cover:
- lines.extend([
- ' testing.RegisterCover(testing.Cover{',
- ' Mode: "set",',
- ' Counters: coverCounters,',
- ' Blocks: coverBlocks,',
- ' CoveredPackages: "",',
- ' })',
- ])
- lines.extend([
- ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)),
- '',
- ])
-
- if test_main_package:
- lines.append(' {}.TestMain(m)'.format(test_main_package))
- else:
- lines.append(' os.Exit(m.Run())')
- lines.extend(['}', ''])
-
- content = '\n'.join(lines)
- # sys.stderr.write('{}\n'.format(content))
- return content
-
-
-def do_link_test(args):
- assert args.srcs or args.xtest_srcs
- assert args.test_miner is not None
-
- test_module_path = get_source_path(args)
- test_import_path, _ = get_import_path(test_module_path)
-
- test_lib_args = copy_args(args) if args.srcs else None
- xtest_lib_args = copy_args(args) if args.xtest_srcs else None
- if xtest_lib_args is not None:
- xtest_lib_args.embed = args.embed_xtest if args.embed_xtest else None
-
- ydx_file_name = None
- xtest_ydx_file_name = None
- need_append_ydx = test_lib_args and xtest_lib_args and args.ydx_file and args.vet_flags
- if need_append_ydx:
- def find_ydx_file_name(name, flags):
- for i, elem in enumerate(flags):
- if elem.endswith(name):
- return (i, elem)
- assert False, 'Unreachable code'
-
- idx, ydx_file_name = find_ydx_file_name(xtest_lib_args.ydx_file, xtest_lib_args.vet_flags)
- xtest_ydx_file_name = '{}_xtest'.format(ydx_file_name)
- xtest_lib_args.vet_flags = copy.copy(xtest_lib_args.vet_flags)
- xtest_lib_args.vet_flags[idx] = xtest_ydx_file_name
-
- if test_lib_args:
- test_lib_args.output = os.path.join(args.output_root, 'test.a')
- test_lib_args.vet_report_output = vet_report_output_name(test_lib_args.output)
- test_lib_args.module_path = test_module_path
- test_lib_args.import_path = test_import_path
- do_link_lib(test_lib_args)
-
- if xtest_lib_args:
- xtest_lib_args.srcs = xtest_lib_args.xtest_srcs
- classify_srcs(xtest_lib_args.srcs, xtest_lib_args)
- xtest_lib_args.output = os.path.join(args.output_root, 'xtest.a')
- xtest_lib_args.vet_report_output = vet_report_output_name(xtest_lib_args.output)
- xtest_lib_args.module_path = test_module_path + '_test'
- xtest_lib_args.import_path = test_import_path + '_test'
- if test_lib_args:
- xtest_lib_args.module_map[test_import_path] = test_lib_args.output
- need_append_ydx = args.ydx_file and args.srcs and args.vet_flags
- do_link_lib(xtest_lib_args)
-
- if need_append_ydx:
- with open(os.path.join(args.build_root, ydx_file_name), 'ab') as dst_file:
- with open(os.path.join(args.build_root, xtest_ydx_file_name), 'rb') as src_file:
- dst_file.write(src_file.read())
-
- test_main_content = gen_test_main(args, test_lib_args, xtest_lib_args)
- test_main_name = os.path.join(args.output_root, '_test_main.go')
- with open(test_main_name, "w") as f:
- f.write(test_main_content)
- test_args = copy_args(args)
- test_args.embed = None
- test_args.srcs = [test_main_name]
- if test_args.test_import_path is None:
- # it seems that we can do it unconditionally, but this kind
- # of mangling doesn't really looks good to me and we leave it
- # for pure GO_TEST module
- test_args.module_path = test_args.module_path + '___test_main__'
- test_args.import_path = test_args.import_path + '___test_main__'
- classify_srcs(test_args.srcs, test_args)
- if test_lib_args:
- test_args.module_map[test_lib_args.import_path] = test_lib_args.output
- if xtest_lib_args:
- test_args.module_map[xtest_lib_args.import_path] = xtest_lib_args.output
-
- if args.vet:
- dump_vet_report_for_tests(test_args, test_lib_args, xtest_lib_args)
- test_args.vet = False
-
- do_link_exe(test_args)
-
-
-if __name__ == '__main__':
- reload(sys)
- sys.setdefaultencoding('utf-8')
- sys.stdout = codecs.getwriter('utf8')(sys.stdout)
- sys.stderr = codecs.getwriter('utf8')(sys.stderr)
-
- args = pcf.get_args(sys.argv[1:])
-
- parser = argparse.ArgumentParser(prefix_chars='+')
- parser.add_argument('++mode', choices=['dll', 'exe', 'lib', 'test'], required=True)
- parser.add_argument('++srcs', nargs='*', required=True)
- parser.add_argument('++cgo-srcs', nargs='*')
- parser.add_argument('++test_srcs', nargs='*')
- parser.add_argument('++xtest_srcs', nargs='*')
- parser.add_argument('++cover_info', nargs='*')
- parser.add_argument('++output', nargs='?', default=None)
- parser.add_argument('++source-root', default=None)
- parser.add_argument('++build-root', required=True)
- parser.add_argument('++tools-root', default=None)
- parser.add_argument('++output-root', required=True)
- parser.add_argument('++toolchain-root', required=True)
- parser.add_argument('++host-os', choices=['linux', 'darwin', 'windows'], required=True)
- parser.add_argument('++host-arch', choices=['amd64', 'arm64'], required=True)
- parser.add_argument('++targ-os', choices=['linux', 'darwin', 'windows'], required=True)
- parser.add_argument('++targ-arch', choices=['amd64', 'x86', 'arm64'], required=True)
- parser.add_argument('++peers', nargs='*')
- parser.add_argument('++non-local-peers', nargs='*')
- parser.add_argument('++cgo-peers', nargs='*')
- parser.add_argument('++asmhdr', nargs='?', default=None)
- parser.add_argument('++test-import-path', nargs='?')
- parser.add_argument('++test-miner', nargs='?')
- parser.add_argument('++arc-project-prefix', nargs='?', default=arc_project_prefix)
- parser.add_argument('++std-lib-prefix', nargs='?', default=std_lib_prefix)
- parser.add_argument('++vendor-prefix', nargs='?', default=vendor_prefix)
- parser.add_argument('++extld', nargs='?', default=None)
- parser.add_argument('++extldflags', nargs='+', default=None)
- parser.add_argument('++goversion', required=True)
- parser.add_argument('++lang', nargs='?', default=None)
- parser.add_argument('++asm-flags', nargs='*')
- parser.add_argument('++compile-flags', nargs='*')
- parser.add_argument('++link-flags', nargs='*')
- parser.add_argument('++vcs', nargs='?', default=None)
- parser.add_argument('++vet', nargs='?', const=True, default=False)
- parser.add_argument('++vet-flags', nargs='*', default=None)
- parser.add_argument('++vet-info-ext', default=vet_info_ext)
- parser.add_argument('++vet-report-ext', default=vet_report_ext)
- parser.add_argument('++musl', action='store_true')
- parser.add_argument('++skip-tests', nargs='*', default=None)
- parser.add_argument('++ydx-file', default='')
- parser.add_argument('++debug-root-map', default=None)
- parser.add_argument('++embed', action='append', nargs='*')
- parser.add_argument('++embed_xtest', action='append', nargs='*')
- args = parser.parse_args(args)
-
- arc_project_prefix = args.arc_project_prefix
- std_lib_prefix = args.std_lib_prefix
- vendor_prefix = args.vendor_prefix
- vet_info_ext = args.vet_info_ext
- vet_report_ext = args.vet_report_ext
-
- preprocess_args(args)
-
- try:
- os.unlink(args.output)
- except OSError:
- pass
-
- # We are going to support only 'lib', 'exe' and 'cgo' build modes currently
- # and as a result we are going to generate only one build node per module
- # (or program)
- dispatch = {
- 'exe': do_link_exe,
- 'dll': do_link_exe,
- 'lib': do_link_lib,
- 'test': do_link_test
- }
-
- exit_code = 1
- try:
- dispatch[args.mode](args)
- exit_code = 0
- except KeyError:
- sys.stderr.write('Unknown build mode [{}]...\n'.format(args.mode))
- except subprocess.CalledProcessError as e:
- sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output))
- exit_code = e.returncode
- except Exception as e:
- sys.stderr.write('Unhandled exception [{}]...\n'.format(str(e)))
- sys.exit(exit_code)
diff --git a/build/scripts/ios_wrapper.py b/build/scripts/ios_wrapper.py
deleted file mode 100644
index d3aa48387a..0000000000
--- a/build/scripts/ios_wrapper.py
+++ /dev/null
@@ -1,180 +0,0 @@
-import errno
-import json
-import os
-import shutil
-import subprocess
-import sys
-import tarfile
-import plistlib
-
-
-def ensure_dir(path):
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno != errno.EEXIST or not os.path.isdir(path):
- raise
-
-
-def just_do_it(args):
- if not args:
- raise Exception('Not enough args!')
- parts = [[]]
- for arg in args:
- if arg == '__DELIM__':
- parts.append([])
- else:
- parts[-1].append(arg)
- if len(parts) != 3 or len(parts[0]) != 5:
- raise Exception('Bad call')
- bin_name, ibtool_path, main_out, app_name, module_dir = parts[0]
- bin_name = os.path.basename(bin_name)
- inputs, storyboard_user_flags = parts[1:]
- plists, storyboards, signs, nibs, resources, signed_resources, plist_jsons, strings = [], [], [], [], [], [], [], []
- for i in inputs:
- if i.endswith('.plist') or i.endswith('.partial_plist'):
- plists.append(i)
- elif i.endswith('.compiled_storyboard_tar'):
- storyboards.append(i)
- elif i.endswith('.xcent'):
- signs.append(i)
- elif i.endswith('.nib'):
- nibs.append(i)
- elif i.endswith('.resource_tar'):
- resources.append(i)
- elif i.endswith('.signed_resource_tar'):
- signed_resources.append(i)
- elif i.endswith('.plist_json'):
- plist_jsons.append(i)
- elif i.endswith('.strings_tar'):
- strings.append(i)
- else:
- print >> sys.stderr, 'Unknown input:', i, 'ignoring'
- if not plists:
- raise Exception("Can't find plist files")
- if not plists[0].endswith('.plist'):
- print >> sys.stderr, "Main plist file can be defined incorretly"
- if not storyboards:
- print >> sys.stderr, "Storyboards list are empty"
- if len(signs) > 1:
- raise Exception("Too many .xcent files")
- app_dir = os.path.join(module_dir, app_name + '.app')
- ensure_dir(app_dir)
- copy_nibs(nibs, module_dir, app_dir)
- replaced_parameters = {
- 'DEVELOPMENT_LANGUAGE': 'en',
- 'EXECUTABLE_NAME': bin_name,
- 'PRODUCT_BUNDLE_IDENTIFIER': 'Yandex.' + app_name,
- 'PRODUCT_NAME': app_name,
- }
- replaced_templates = {}
- for plist_json in plist_jsons:
- with open(plist_json) as jsonfile:
- for k, v in json.loads(jsonfile.read()).items():
- replaced_parameters[k] = v
- for k, v in replaced_parameters.items():
- replaced_templates['$(' + k + ')'] = v
- replaced_templates['${' + k + '}'] = v
- make_main_plist(plists, os.path.join(app_dir, 'Info.plist'), replaced_templates)
- link_storyboards(ibtool_path, storyboards, app_name, app_dir, storyboard_user_flags)
- if resources:
- extract_resources(resources, app_dir)
- if signed_resources:
- extract_resources(signed_resources, app_dir, sign=True)
- if strings:
- extract_resources(strings, app_dir, strings=True)
- if not signs:
- sign_file = os.path.join(module_dir, app_name + '.xcent')
- with open(sign_file, 'w') as f:
- f.write('''<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
- <key>com.apple.security.get-task-allow</key>
- <true/>
-</dict>
-</plist>
- ''')
- else:
- sign_file = signs[0]
- sign_application(sign_file, app_dir)
- make_archive(app_dir, main_out)
-
-
-def is_exe(fpath):
- return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
-
-def copy_nibs(nibs, module_dir, app_dir):
- for nib in nibs:
- dst = os.path.join(app_dir, os.path.relpath(nib, module_dir))
- ensure_dir(os.path.dirname(dst))
- shutil.copyfile(nib, dst)
-
-
-def make_main_plist(inputs, out, replaced_parameters):
- united_data = {}
- for i in inputs:
- united_data.update(plistlib.readPlist(i))
-
- def scan_n_replace(root):
- if not isinstance(root, dict):
- raise Exception('Invalid state')
- for k in root:
- if isinstance(root[k], list):
- for i in xrange(len(root[k])):
- if isinstance(root[k][i], dict):
- scan_n_replace(root[k][i])
- elif root[k][i] in replaced_parameters:
- root[k][i] = replaced_parameters[root[k][i]]
- elif isinstance(root[k], dict):
- scan_n_replace(root[k])
- else:
- if root[k] in replaced_parameters:
- root[k] = replaced_parameters[root[k]]
- scan_n_replace(united_data)
- plistlib.writePlist(united_data, out)
- subprocess.check_call(['/usr/bin/plutil', '-convert', 'binary1', out])
-
-
-def link_storyboards(ibtool, archives, app_name, app_dir, flags):
- unpacked = []
- for arc in archives:
- unpacked.append(os.path.splitext(arc)[0] + 'c')
- ensure_dir(unpacked[-1])
- with tarfile.open(arc) as a:
- a.extractall(path=unpacked[-1])
- flags += [
- '--module', app_name,
- '--link', app_dir,
- ]
- subprocess.check_call([ibtool] + flags +
- ['--errors', '--warnings', '--notices', '--output-format', 'human-readable-text'] +
- unpacked)
-
-
-def sign_application(xcent, app_dir):
- subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', '--entitlements', xcent, '--timestamp=none', app_dir])
-
-
-def extract_resources(resources, app_dir, strings=False, sign=False):
- for res in resources:
- with tarfile.open(res) as tf:
- for tfinfo in tf:
- tf.extract(tfinfo.name, app_dir)
- if strings:
- subprocess.check_call(['/usr/bin/plutil', '-convert', 'binary1', os.path.join(app_dir, tfinfo.name)])
- if sign:
- subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', os.path.join(app_dir, tfinfo.name)])
-
-
-def make_archive(app_dir, output):
- with tarfile.open(output, "w") as tar_handle:
- for root, _, files in os.walk(app_dir):
- for f in files:
- tar_handle.add(os.path.join(root, f), arcname=os.path.join(os.path.basename(app_dir),
- os.path.relpath(os.path.join(root, f), app_dir)))
-
-
-if __name__ == '__main__':
- just_do_it(sys.argv[1:])
diff --git a/build/scripts/java_pack_to_file.py b/build/scripts/java_pack_to_file.py
deleted file mode 100644
index c8ab7c311b..0000000000
--- a/build/scripts/java_pack_to_file.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import os
-import re
-import optparse
-
-JAVA_PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?);', flags=re.MULTILINE | re.DOTALL)
-KOTLIN_PACKAGE_REGEX = re.compile(r'^\s*package\s+(.*?)^', flags=re.MULTILINE | re.DOTALL)
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.add_option('-o', '--output')
- parser.add_option('-a', '--source-root', dest='source_root')
- return parser.parse_args()
-
-
-def get_package_name(filename):
- with open(filename) as afile:
- content = afile.read()
- if filename.endswith(".kt"):
- match = KOTLIN_PACKAGE_REGEX.search(content)
- if match:
- return match.group(1).strip().replace('.', '/')
- else:
- match = JAVA_PACKAGE_REGEX.search(content)
- if match:
- return match.group(1).replace('\n\t ', '').replace('.', '/')
- return ''
-
-
-def write_coverage_sources(output, srcroot, files):
- with open(output, 'w') as afile:
- for filename in files:
- pname = get_package_name(os.path.join(srcroot, filename))
- afile.write(os.path.join(pname, os.path.basename(filename)) + ':' + filename + '\n')
-
-
-def main():
- opts, files = parse_args()
- write_coverage_sources(opts.output, opts.source_root, files)
-
-
-if __name__ == '__main__':
- exit(main())
diff --git a/build/scripts/jni_swig.py b/build/scripts/jni_swig.py
deleted file mode 100644
index 4b2220430b..0000000000
--- a/build/scripts/jni_swig.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import argparse
-import subprocess
-import re
-import os
-import tarfile
-
-def parse_args():
- parser = argparse.ArgumentParser(description='Wrapper script to invoke swig.')
- parser.add_argument('--swig', help='path to the swig executable')
- parser.add_argument('--default-module', type=str, help='swig -module argument value for inputs without %module statement')
- parser.add_argument('--package-by-file', help='path to file which dir must be converted to swig -package argument')
- parser.add_argument('--jsrc', help='jsrc output archive filename')
- parser.add_argument('--src', help='input .swg file path')
- parser.add_argument('--out-header', help='header file which must exist even if it was not generated by swig')
- parser.add_argument('args', nargs="*", help='regular swig arguments')
-
- return parser.parse_args()
-
-
-def path2pkg(path):
- return path.replace('/', '.').replace('-', '_')
-
-
-def main(args):
- package = path2pkg(os.path.dirname(args.package_by_file))
- outdir = None
- if args.jsrc:
- outdir = package.replace('.', '/')
- outdir_abs = os.path.join(os.path.dirname(args.jsrc), outdir)
- if not os.path.exists(outdir_abs):
- os.makedirs(outdir_abs)
- cmd = [args.swig, '-c++', '-java', '-package', package] + (['-outdir', outdir_abs] if outdir is not None else []) + args.args
- if '-module' not in args.args and args.default_module:
- with open(args.src, 'r') as f:
- if not re.search(r'(?m)^%module\b', f.read()):
- cmd += ['-module', args.default_module]
- subprocess.check_call(cmd + [args.src])
- if args.out_header and not os.path.exists(args.out_header):
- open(args.out_header, 'w').close()
- if args.jsrc:
- with tarfile.open(args.jsrc, 'a') as tf:
- tf.add(outdir_abs, arcname=outdir)
-
-
-if __name__ == '__main__':
- main(parse_args())
diff --git a/build/scripts/link_asrc.py b/build/scripts/link_asrc.py
deleted file mode 100644
index eec5fe09a8..0000000000
--- a/build/scripts/link_asrc.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import argparse
-import itertools
-import os
-import tarfile
-
-
-DELIM_JAVA = '__DELIM_JAVA__'
-DELIM_RES = '__DELIM_RES__'
-DELIM_ASSETS = '__DELIM_ASSETS__'
-DELIM_AIDL = '__DELIM_AIDL__'
-
-DELIMS = (
- DELIM_JAVA,
- DELIM_RES,
- DELIM_ASSETS,
- DELIM_AIDL,
-)
-
-DESTS = {
- DELIM_JAVA: 'src',
- DELIM_RES: 'res',
- DELIM_ASSETS: 'assets',
- DELIM_AIDL: 'aidl',
-}
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--asrcs', nargs='*')
- parser.add_argument('--input', nargs='*')
- parser.add_argument('--jsrcs', nargs='*')
- parser.add_argument('--output', required=True)
- parser.add_argument('--work', required=True)
-
- return parser.parse_args()
-
-
-def main():
- args = parse_args()
-
- files = []
- parts = []
-
- if args.input and len(args.input) > 0:
- for x in args.input:
- if x in DELIMS:
- assert(len(parts) == 0 or len(parts[-1]) > 1)
- parts.append([x])
- else:
- assert(len(parts) > 0)
- parts[-1].append(x)
- assert(len(parts[-1]) > 1)
-
- if args.jsrcs and len(args.jsrcs):
- src_dir = os.path.join(args.work, DESTS[DELIM_JAVA])
- os.makedirs(src_dir)
-
- for jsrc in filter(lambda x: x.endswith('.jsrc'), args.jsrcs):
- with tarfile.open(jsrc, 'r') as tar:
- names = tar.getnames()
- if names and len(names) > 0:
- parts.append([DELIM_JAVA, src_dir])
- parts[-1].extend(itertools.imap(lambda x: os.path.join(src_dir, x), names))
- tar.extractall(path=src_dir)
-
- if args.asrcs and len(args.asrcs):
- for asrc in filter(lambda x: x.endswith('.asrc') and os.path.exists(x), args.asrcs):
- with tarfile.open(asrc, 'r') as tar:
- files.extend(tar.getnames())
- tar.extractall(path=args.work)
-
- with tarfile.open(args.output, 'w') as out:
- for part in parts:
- dest = DESTS[part[0]]
- prefix = part[1]
- for f in part[2:]:
- out.add(f, arcname=os.path.join(dest, os.path.relpath(f, prefix)))
-
- for f in files:
- out.add(os.path.join(args.work, f), arcname=f)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py
deleted file mode 100644
index c189668b9e..0000000000
--- a/build/scripts/link_fat_obj.py
+++ /dev/null
@@ -1,91 +0,0 @@
-import argparse
-import subprocess
-import sys
-
-from process_whole_archive_option import ProcessWholeArchiveOption
-
-YA_ARG_PREFIX = '-Ya,'
-
-
-def get_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--obj')
- parser.add_argument('--globals-lib')
- parser.add_argument('--lib', required=True)
- parser.add_argument('--arch', required=True)
- parser.add_argument('--build-root', default=None)
- parser.add_argument('--with-own-obj', action='store_true', default=False)
- parser.add_argument('--with-global-srcs', action='store_true', default=False)
-
- groups = {}
- args_list = groups.setdefault('default', [])
- for arg in sys.argv[1:]:
- if arg == '--with-own-obj':
- groups['default'].append(arg)
- elif arg == '--globals-lib':
- groups['default'].append(arg)
- elif arg == '--with-global-srcs':
- groups['default'].append(arg)
- elif arg.startswith(YA_ARG_PREFIX):
- group_name = arg[len(YA_ARG_PREFIX):]
- args_list = groups.setdefault(group_name, [])
- else:
- args_list.append(arg)
-
- return parser.parse_args(groups['default']), groups
-
-
-def strip_suppression_files(srcs):
- return [s for s in srcs if not s.endswith('.supp')]
-
-
-def main():
- args, groups = get_args()
-
- # Inputs
- auto_input = groups['input']
-
- # Outputs
- lib_output = args.lib
- obj_output = args.obj
-
- # Dependencies
- global_srcs = groups['global_srcs']
- global_srcs = strip_suppression_files(global_srcs)
- global_srcs = ProcessWholeArchiveOption(args.arch).construct_cmd(global_srcs)
- peers = groups['peers']
-
- # Tools
- linker = groups['linker']
- archiver = groups['archiver']
-
- do_link = linker + ['-o', obj_output, '-Wl,-r', '-nodefaultlibs', '-nostartfiles'] + global_srcs + auto_input
- do_archive = archiver + [lib_output] + peers
- do_globals = None
- if args.globals_lib:
- do_globals = archiver + [args.globals_lib] + auto_input + global_srcs
- if args.with_own_obj:
- do_archive += auto_input
- if args.with_global_srcs:
- do_archive += global_srcs
-
- def call(c):
- proc = subprocess.Popen(c, shell=False, stderr=sys.stderr, stdout=sys.stdout, cwd=args.build_root)
- proc.communicate()
- return proc.returncode
-
- if obj_output:
- link_res = call(do_link)
- if link_res:
- sys.exit(link_res)
-
- if do_globals:
- glob_res = call(do_globals)
- if glob_res:
- sys.exit(glob_res)
-
- sys.exit(call(do_archive))
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/make_java_classpath_file.py b/build/scripts/make_java_classpath_file.py
deleted file mode 100644
index c70a7876d7..0000000000
--- a/build/scripts/make_java_classpath_file.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import os
-import sys
-
-import process_command_files as pcf
-
-
-def make_cp_file(args):
- source = args[0]
- destination = args[1]
- with open(source) as src:
- lines = [l.strip() for l in src if l.strip()]
- with open(destination, 'w') as dst:
- dst.write(os.pathsep.join(lines))
-
-def make_cp_file_from_args(args):
- destination = args[0]
- with open(destination, 'w') as dst:
- dst.write(os.pathsep.join(args[1:]))
-
-
-if __name__ == '__main__':
- args = pcf.get_args(sys.argv[1:])
- if sys.argv[1] != '--from-args':
- make_cp_file(args)
- else:
- make_cp_file_from_args(args[1:])
diff --git a/build/scripts/make_java_srclists.py b/build/scripts/make_java_srclists.py
deleted file mode 100644
index 65174bafd7..0000000000
--- a/build/scripts/make_java_srclists.py
+++ /dev/null
@@ -1,128 +0,0 @@
-import os
-import sys
-import argparse
-
-import process_command_files as pcf
-import java_pack_to_file as jcov
-
-
-def writelines(f, rng):
- f.writelines(item + '\n' for item in rng)
-
-
-def add_rel_src_to_coverage(coverage, src, source_root):
- rel = os.path.relpath(src, source_root)
- if not rel.startswith('..' + os.path.sep):
- coverage.append(rel)
-
-
-def main():
- args = pcf.get_args(sys.argv[1:])
- parser = argparse.ArgumentParser()
- parser.add_argument('--moddir')
- parser.add_argument('--java')
- parser.add_argument('--groovy')
- parser.add_argument('--kotlin')
- parser.add_argument('--coverage')
- parser.add_argument('--source-root')
- args, remaining_args = parser.parse_known_args(args)
-
- java = []
- kotlin = []
- groovy = []
- coverage = []
-
- cur_resources_list_file = None
- cur_jsources_list_file = None
- cur_srcdir = None
- cur_resources = []
- cur_jsources = []
-
- FILE_ARG = 1
- RESOURCES_DIR_ARG = 2
- SRCDIR_ARG = 3
- JSOURCES_DIR_ARG = 4
-
- next_arg=FILE_ARG
-
- for src in remaining_args:
- if next_arg == RESOURCES_DIR_ARG:
- assert cur_resources_list_file is None
- cur_resources_list_file = src
- next_arg = FILE_ARG
- continue
- elif next_arg == JSOURCES_DIR_ARG:
- assert cur_jsources_list_file is None
- cur_jsources_list_file = src
- next_arg = FILE_ARG
- continue
- elif next_arg == SRCDIR_ARG:
- assert cur_srcdir is None
- cur_srcdir = src if os.path.isabs(src) else os.path.join(args.moddir, src)
- next_arg = FILE_ARG
- continue
-
- if src.endswith(".java"):
- java.append(src)
- kotlin.append(src)
- if args.coverage and args.source_root:
- add_rel_src_to_coverage(coverage, src, args.source_root)
- elif args.kotlin and src.endswith(".kt"):
- kotlin.append(src)
- if args.coverage and args.source_root:
- add_rel_src_to_coverage(coverage, src, args.source_root)
- elif args.groovy and src.endswith(".groovy"):
- groovy.append(src)
- else:
- if src == '--resources':
- if cur_resources_list_file is not None:
- with open(cur_resources_list_file, 'w') as f:
- writelines(f, cur_resources)
- cur_resources_list_file = None
- cur_srcdir = None
- cur_resources = []
- next_arg = RESOURCES_DIR_ARG
- continue
- if src == '--jsources':
- if cur_jsources_list_file is not None:
- with open(cur_jsources_list_file, 'w') as f:
- writelines(f, cur_jsources)
- cur_jsources_list_file = None
- cur_jsources = []
- next_arg = JSOURCES_DIR_ARG
- continue
- elif src == '--srcdir':
- next_arg = SRCDIR_ARG
- continue
- else:
- assert cur_srcdir is not None and cur_resources_list_file is not None
- cur_resources.append(os.path.relpath(src, cur_srcdir))
-
- if cur_jsources_list_file is not None:
- assert cur_srcdir is not None
- cur_jsources.append(os.path.relpath(src, cur_srcdir))
-
- if cur_resources_list_file is not None:
- with open(cur_resources_list_file, 'w') as f:
- writelines(f, cur_resources)
- if cur_jsources_list_file is not None:
- with open(cur_jsources_list_file, 'w') as f:
- writelines(f, cur_jsources)
-
- if args.java:
- with open(args.java, 'w') as f:
- writelines(f, java)
- if args.kotlin:
- with open(args.kotlin, 'w') as f:
- writelines(f, kotlin)
- if args.groovy:
- with open(args.groovy, 'w') as f:
- writelines(f, groovy)
- if args.coverage:
- jcov.write_coverage_sources(args.coverage, args.source_root, coverage)
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/scripts/make_manifest_from_bf.py b/build/scripts/make_manifest_from_bf.py
deleted file mode 100644
index bfea3ba3de..0000000000
--- a/build/scripts/make_manifest_from_bf.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import sys
-import zipfile
-import os
-import re
-
-
-def prepare_path(path):
- return ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path
-
-
-def main(args):
- bf, mf = args[0], args[1]
- if not os.path.exists(os.path.dirname(mf)):
- os.makedirs(os.path.dirname(mf))
- with open(bf) as f:
- class_path = f.read().strip()
- class_path = ' '.join(map(prepare_path, class_path.split('\n')))
- with zipfile.ZipFile(mf, 'w') as zf:
- lines = []
- while class_path:
- lines.append(class_path[:60])
- class_path = class_path[60:]
- if lines:
- zf.writestr('META-INF/MANIFEST.MF', 'Manifest-Version: 1.0\nClass-Path: \n ' + '\n '.join(lines) + ' \n\n')
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/scripts/merge_coverage_data.py b/build/scripts/merge_coverage_data.py
deleted file mode 100644
index b7fa3c6a86..0000000000
--- a/build/scripts/merge_coverage_data.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import sys
-import tarfile
-import copy
-import os
-import uuid
-
-
-def main(args):
- output_file, args = args[0], args[1:]
- # heretic@: Splits files on which could be merged( files ) and which should not be merged( expendables )
- # expendables will be in output_file in form {name}{ordinal number of archive in args[]}.{extension}
- try:
- split_i = args.index('-no-merge')
- except ValueError:
- split_i = len(args)
- files, expendables = args[:split_i], args[split_i + 1:]
-
- with tarfile.open(output_file, 'w') as outf:
- for x in files:
- with tarfile.open(x) as tf:
- for tarinfo in tf:
- new_tarinfo = copy.deepcopy(tarinfo)
- if new_tarinfo.name in expendables:
- dirname, basename = os.path.split(new_tarinfo.name)
- basename_parts = basename.split('.', 1)
- new_basename = '.'.join([basename_parts[0] + str(uuid.uuid4())] + basename_parts[1:])
- new_tarinfo.name = os.path.join(dirname, new_basename)
- outf.addfile(new_tarinfo, tf.extractfile(tarinfo))
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/scripts/mkdir.py b/build/scripts/mkdir.py
deleted file mode 100755
index a326b29300..0000000000
--- a/build/scripts/mkdir.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-import os
-import sys
-
-
-def mkdir_p(directory):
- if not os.path.exists(directory):
- os.makedirs(directory)
-
-if __name__ == "__main__":
- for directory in sys.argv[1:]:
- mkdir_p(directory)
diff --git a/build/scripts/mkdocs_builder_wrapper.py b/build/scripts/mkdocs_builder_wrapper.py
deleted file mode 100644
index 7e10dfac07..0000000000
--- a/build/scripts/mkdocs_builder_wrapper.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-import subprocess
-import sys
-
-
-def main():
- cmd = []
- build_root = sys.argv[1]
- length = len(build_root)
- is_dep = False
- for arg in sys.argv[2:]:
- if is_dep:
- is_dep = False
- if not arg.endswith('.tar.gz'):
- continue
- basename = os.path.basename(arg)
- assert arg.startswith(build_root) and len(arg) > length + len(basename) and arg[length] in ('/', '\\')
- cmd.extend(['--dep', '{}:{}:{}'.format(build_root, os.path.dirname(arg[length+1:]), basename)])
- elif arg == '--dep':
- is_dep = True
- else:
- cmd.append(arg)
- assert not is_dep
- p = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- if p.returncode:
- if out:
- sys.stderr.write('stdout:\n{}\n'.format(out))
- if err:
- sys.stderr.write('stderr:\n{}\n'.format(err))
- sys.exit(p.returncode)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/mkver.py b/build/scripts/mkver.py
deleted file mode 100755
index 321cdaade1..0000000000
--- a/build/scripts/mkver.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import sys
-
-if __name__ == '__main__':
- with open(sys.argv[1], 'r') as f:
- data = f.readline()
-
- beg = data.find('(') + 1
- end = data.find(')')
- version = data[beg:end]
-
- print '#pragma once'
- print '#define DEBIAN_VERSION "%s"' % version
diff --git a/build/scripts/move.py b/build/scripts/move.py
deleted file mode 100644
index 3f611fbc2e..0000000000
--- a/build/scripts/move.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import os
-import sys
-
-# /script/move.py <src-1> <tgt-1> <src-2> <tgt-2> ... <src-n> <tgt-n>
-# renames src-1 to tgt-1, src-2 to tgt-2, ..., src-n to tgt-n.
-
-
-def main():
- assert len(sys.argv) % 2 == 1
- for index in range(1, len(sys.argv), 2):
- os.rename(sys.argv[index], sys.argv[index + 1])
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/pack_ios.py b/build/scripts/pack_ios.py
deleted file mode 100644
index 37c36d1f95..0000000000
--- a/build/scripts/pack_ios.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import argparse
-import os
-import shutil
-import subprocess
-import sys
-import tarfile
-
-
-def just_do_it():
- parser = argparse.ArgumentParser()
- parser.add_argument("--binary", required=True, help="executable file")
- parser.add_argument("--target", required=True, help="target archive path")
- parser.add_argument("--temp-dir", required=True, help="temp dir")
- parser.add_argument("peers", nargs='*')
- args = parser.parse_args()
- app_tar = [p for p in args.peers if p.endswith('.ios.interface')]
- if not app_tar:
- print >> sys.stderr, 'No one IOS_INTERFACE module found'
- shutil.copyfile(args.binary, os.path.join(args.temp_dir, 'bin'))
- if os.path.exists(args.target):
- os.remove(args.target)
- with tarfile.open(args.target, 'w') as tf:
- tf.add(os.path.join(args.temp_dir, 'bin'), arcname=os.path.join(os.path.basename(args.binary) + '.app', 'bin'))
- return
- if len(app_tar) > 1:
- app_tar = [p for p in args.peers if not p.endswith('.default.ios.interface')]
- if len(app_tar) > 1:
- print >> sys.stderr, 'Many IOS_INTERFACE modules found, {} will be used'.format(app_tar[-1])
- app_tar = app_tar[-1]
- with tarfile.open(app_tar) as tf:
- tf.extractall(args.temp_dir)
- tar_suffix = '.default.ios.interface' if app_tar.endswith('.default.ios.interface') else '.ios.interface'
- app_unpacked_path = os.path.join(args.temp_dir, os.path.basename(app_tar)[:-len(tar_suffix)] + '.app')
- if not os.path.exists(app_unpacked_path):
- raise Exception('Bad IOS_INTERFACE resource: {}'.format(app_tar))
- shutil.copyfile(args.binary, os.path.join(app_unpacked_path, 'bin'))
- subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', app_unpacked_path])
- if os.path.exists(args.target):
- os.remove(args.target)
- binary_origin_name = os.path.basename(args.binary)
- while os.path.splitext(binary_origin_name)[1]:
- binary_origin_name = os.path.splitext(binary_origin_name)[0]
- with tarfile.open(args.target, 'w') as tf:
- tf.add(app_unpacked_path, arcname=binary_origin_name + '.app', recursive=True)
-
-
-if __name__ == '__main__':
- just_do_it()
diff --git a/build/scripts/pack_jcoverage_resources.py b/build/scripts/pack_jcoverage_resources.py
deleted file mode 100644
index f6e181067a..0000000000
--- a/build/scripts/pack_jcoverage_resources.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import sys
-import tarfile
-import os
-import subprocess
-
-
-def main(args):
- output_file = args[0]
- report_file = args[1]
-
- res = subprocess.call(args[args.index('-end') + 1:])
-
- if not os.path.exists(report_file):
- print>>sys.stderr, 'Can\'t find jacoco exec file'
- return res
-
- with tarfile.open(output_file, 'w') as outf:
- outf.add(report_file, arcname=os.path.basename(report_file))
-
- return res
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/build/scripts/perl_wrapper.py b/build/scripts/perl_wrapper.py
deleted file mode 100644
index cb4027f1d3..0000000000
--- a/build/scripts/perl_wrapper.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import os
-import sys
-import shutil
-
-if __name__ == '__main__':
- path = sys.argv[1]
- to = sys.argv[-1]
- fr = sys.argv[-2]
- to_dir = os.path.dirname(to)
-
- os.chdir(to_dir)
-
- f1 = os.path.basename(fr)
- fr_ = os.path.dirname(fr)
- f2 = os.path.basename(fr_)
- fr_ = os.path.dirname(fr_)
-
- os.makedirs(f2)
- shutil.copyfile(fr, os.path.join(f2, f1))
-
- if path[0] != '/':
- path = os.path.join(os.path.dirname(__file__), path)
-
- os.execv(path, [path] + sys.argv[2:])
diff --git a/build/scripts/postprocess_go_fbs.py b/build/scripts/postprocess_go_fbs.py
deleted file mode 100644
index 325fa07ea6..0000000000
--- a/build/scripts/postprocess_go_fbs.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import argparse
-import re
-import os
-
-
-# very simple regexp to find go import statement in the source code
-# NOTE! only one-line comments are somehow considered
-IMPORT_DECL=re.compile(r'''
- \bimport
- (
- \s+((\.|\w+)\s+)?"[^"]+" ( \s+//[^\n]* )?
- | \s* \( \s* ( ( \s+ ((\.|\w+)\s+)? "[^"]+" )? ( \s* //[^\n]* )? )* \s* \)
- )''', re.MULTILINE | re.DOTALL | re.VERBOSE)
-
-
-def parse_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--input-dir', required=True)
- parser.add_argument('--map', nargs='*', default=None)
-
- return parser.parse_args()
-
-
-def process_go_file(file_name, import_map):
- content = ''
- with open(file_name, 'r') as f:
- content = f.read()
-
- start = -1
- end = -1
- for it in IMPORT_DECL.finditer(content):
- if start < 0:
- start = it.start()
- end = it.end()
-
- if start < 0:
- return
-
- imports = content[start:end]
- for namespace, path in import_map.iteritems():
- ns = namespace.split('.')
- name = '__'.join(ns)
- import_path = '/'.join(ns)
- imports = imports.replace('{} "{}"'.format(name, import_path), '{} "a.yandex-team.ru/{}"'.format(name, path))
-
- if imports != content[start:end]:
- with open(file_name, 'w') as f:
- f.write(content[:start])
- f.write(imports)
- f.write(content[end:])
-
-
-def main():
- args = parse_args()
-
- if not args.map:
- return
-
- raw_import_map = sorted(set(args.map))
- import_map = dict(z.split('=', 1) for z in raw_import_map)
- if len(raw_import_map) != len(import_map):
- for k, v in (z.split('=', 1) for z in raw_import_map):
- if v != import_map[k]:
- raise Exception('import map [{}] contains different values for key [{}]: [{}] and [{}].'.format(args.map, k, v, import_map[k]))
-
- for root, _, files in os.walk(args.input_dir):
- for src in (f for f in files if f.endswith('.go')):
- process_go_file(os.path.join(root, src), import_map)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/process_whole_archive_option.py b/build/scripts/process_whole_archive_option.py
deleted file mode 100644
index a9c4ef676a..0000000000
--- a/build/scripts/process_whole_archive_option.py
+++ /dev/null
@@ -1,176 +0,0 @@
-import os
-import sys
-
-import process_command_files as pcf
-
-
-class ProcessWholeArchiveOption():
- def __init__(self, arch, peers=None, libs=None):
- self.arch = arch.upper()
- self.peers = { x : 0 for x in peers } if peers else None
- self.libs = { x : 0 for x in libs } if libs else None
- self.start_wa_marker = '--start-wa'
- self.end_wa_marker = '--end-wa'
-
- def _match_peer_lib(self, arg, ext):
- key = None
- if arg.endswith(ext):
- key = os.path.dirname(arg)
- return key if key and self.peers and key in self.peers else None
-
- def _match_lib(self, arg):
- return arg if self.libs and arg in self.libs else None
-
- def _process_arg(self, arg, ext='.a'):
- peer_key = self._match_peer_lib(arg, ext)
- lib_key = self._match_lib(arg)
- if peer_key:
- self.peers[peer_key] += 1
- if lib_key:
- self.libs[lib_key] += 1
- return peer_key if peer_key else lib_key
-
- def _check_peers(self):
- if self.peers:
- for key, value in self.peers.items():
- assert value > 0, '"{}" specified in WHOLE_ARCHIVE() macro is not used on link command'.format(key)
-
- def _construct_cmd_apple(self, args):
- force_load_flag = '-Wl,-force_load,'
- is_inside_wa_markers = False
-
- cmd = []
- for arg in args:
- if arg.startswith(force_load_flag):
- cmd.append(arg)
- elif arg == self.start_wa_marker:
- is_inside_wa_markers = True
- elif arg == self.end_wa_marker:
- is_inside_wa_markers = False
- elif is_inside_wa_markers:
- cmd.append(force_load_flag + arg)
- else:
- key = self._process_arg(arg)
- cmd.append(force_load_flag + arg if key else arg)
-
- self._check_peers()
-
- return cmd
-
- def _construct_cmd_win(self, args):
- whole_archive_prefix = '/WHOLEARCHIVE:'
- is_inside_wa_markers = False
-
- def add_prefix(arg, need_check_peers_and_libs):
- key = self._process_arg(arg, '.lib') if need_check_peers_and_libs else arg
- return whole_archive_prefix + arg if key else arg
-
- def add_whole_archive_prefix(arg, need_check_peers_and_libs):
- if not pcf.is_cmdfile_arg(arg):
- return add_prefix(arg, need_check_peers_and_libs)
-
- cmd_file_path = pcf.cmdfile_path(arg)
- cf_args = pcf.read_from_command_file(cmd_file_path)
- with open(cmd_file_path, 'w') as afile:
- for cf_arg in cf_args:
- afile.write(add_prefix(cf_arg, need_check_peers_and_libs) + "\n")
- return arg
-
- cmd = []
- for arg in args:
- if arg == self.start_wa_marker:
- is_inside_wa_markers = True
- elif arg == self.end_wa_marker:
- is_inside_wa_markers = False
- elif is_inside_wa_markers:
- cmd.append(add_whole_archive_prefix(arg, False))
- continue
- elif self.peers or self.libs:
- cmd.append(add_whole_archive_prefix(arg, True))
- else:
- cmd.append(arg)
-
- self._check_peers()
-
- return cmd
-
- def _construct_cmd_linux(self, args):
- whole_archive_flag = '-Wl,--whole-archive'
- no_whole_archive_flag = '-Wl,--no-whole-archive'
-
- def replace_markers(arg):
- if arg == self.start_wa_marker:
- return whole_archive_flag
- if arg == self.end_wa_marker:
- return no_whole_archive_flag
- return arg
-
- args = [replace_markers(arg) for arg in args]
-
- cmd = []
- is_inside_whole_archive = False
- is_whole_archive = False
- # We are trying not to create excessive sequences of consecutive flags
- # -Wl,--no-whole-archive -Wl,--whole-archive ('externally' specified
- # flags -Wl,--[no-]whole-archive are not taken for consideration in this
- # optimization intentionally)
- for arg in args:
- if arg == whole_archive_flag:
- is_inside_whole_archive = True
- is_whole_archive = False
- elif arg == no_whole_archive_flag:
- is_inside_whole_archive = False
- is_whole_archive = False
- else:
- key = self._process_arg(arg)
- if not is_inside_whole_archive:
- if key:
- if not is_whole_archive:
- cmd.append(whole_archive_flag)
- is_whole_archive = True
- elif is_whole_archive:
- cmd.append(no_whole_archive_flag)
- is_whole_archive = False
-
- cmd.append(arg)
-
- if is_whole_archive:
- cmd.append(no_whole_archive_flag)
-
- self._check_peers()
-
- return cmd
-
- def construct_cmd(self, args):
- if self.arch in ('DARWIN', 'IOS', 'IOSSIM'):
- return self._construct_cmd_apple(args)
-
- if self.arch == 'WINDOWS':
- return self._construct_cmd_win(args)
-
- return self._construct_cmd_linux(args)
-
-
-def get_whole_archive_peers_and_libs(args):
- remaining_args = []
- peers = []
- libs = []
- peers_flag = '--whole-archive-peers'
- libs_flag = '--whole-archive-libs'
-
- next_is_peer = False
- next_is_lib = False
- for arg in args:
- if arg == peers_flag:
- next_is_peer = True
- elif arg == libs_flag:
- next_is_lib = True
- elif next_is_peer:
- peers.append(arg)
- next_is_peer = False
- elif next_is_lib:
- libs.append(arg)
- next_is_lib = False
- else:
- remaining_args.append(arg)
- return remaining_args, peers, libs
diff --git a/build/scripts/python_yndexer.py b/build/scripts/python_yndexer.py
deleted file mode 100644
index 3180665387..0000000000
--- a/build/scripts/python_yndexer.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import os
-import sys
-import threading
-import subprocess
-
-
-def _try_to_kill(process):
- try:
- process.kill()
- except Exception:
- pass
-
-
-def touch(path):
- if not os.path.exists(path):
- with open(path, 'w') as _:
- pass
-
-
-class Process(object):
- def __init__(self, args):
- self._process = subprocess.Popen(args)
- self._event = threading.Event()
- self._result = None
- thread = threading.Thread(target=self._run)
- thread.setDaemon(True)
- thread.start()
-
- def _run(self):
- self._process.communicate()
- self._result = self._process.returncode
- self._event.set()
-
- def wait(self, timeout):
- self._event.wait(timeout=timeout)
- _try_to_kill(self._process)
- return self._result
-
-
-if __name__ == '__main__':
- yndexer = sys.argv[1]
- timeout = int(sys.argv[2])
- output_file = sys.argv[3]
- input_file = sys.argv[4]
- partition_count = sys.argv[5]
- partition_index = sys.argv[6]
-
- process = Process([yndexer, '-f', input_file, '-y', output_file, '-c', partition_count, '-i', partition_index])
- result = process.wait(timeout=timeout)
-
- if result != 0:
- print >> sys.stderr, 'Yndexing process finished with code', result
- touch(output_file)
diff --git a/build/scripts/resolve_java_srcs.py b/build/scripts/resolve_java_srcs.py
deleted file mode 100644
index a2e6c20012..0000000000
--- a/build/scripts/resolve_java_srcs.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import os
-import argparse
-import re
-import sys
-
-
-def list_all_files(directory, prefix='/', hidden_files=False):
- result = []
- if os.path.exists(directory):
- for i in os.listdir(directory):
- abs_path = os.path.join(directory, i)
- result += list_all_files(os.path.join(directory, abs_path), prefix + i + '/', hidden_files) \
- if os.path.isdir(abs_path) else ([prefix + i] if (hidden_files or not i.startswith('.')) else [])
- return result
-
-
-def pattern_to_regexp(p):
- return '^' + \
- ('/' if not p.startswith('**') else '') + \
- re.escape(p).replace(
- r'\*\*\/', '[_DIR_]'
- ).replace(
- r'\*', '[_FILE_]'
- ).replace(
- '[_DIR_]', '(.*/)?'
- ).replace(
- '[_FILE_]', '([^/]*)'
- ) + '$'
-
-
-def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, resolve_kotlin=False, resolve_groovy=False):
- result = {'java': [], 'not_java': [], 'kotlin': [], 'groovy': []}
- include_patterns_normal, include_patterns_hidden, exclude_patterns_normal, exclude_patterns_hidden = [], [], [], []
- for vis, hid, patterns in ((include_patterns_normal, include_patterns_hidden, include_patterns), (exclude_patterns_normal, exclude_patterns_hidden, exclude_patterns),):
- for pattern in patterns:
- if (pattern if pattern.find('/') == -1 else pattern.rsplit('/', 1)[1]).startswith('.'):
- hid.append(pattern)
- else:
- vis.append(pattern)
- re_patterns = map(pattern_to_regexp, vis + hid)
- if sys.platform in ('win32', 'darwin'):
- re_patterns = [re.compile(i, re.IGNORECASE) for i in re_patterns]
- else:
- re_patterns = [re.compile(i) for i in re_patterns]
- vis[:], hid[:] = re_patterns[:len(vis)], re_patterns[len(vis):]
-
- for inc_patterns, exc_patterns, with_hidden_files in (
- (include_patterns_normal, exclude_patterns_normal, False),
- (include_patterns_hidden, exclude_patterns_hidden, True),
- ):
- for f in list_all_files(srcdir, hidden_files=with_hidden_files):
- excluded = False
-
- for exc_re in exc_patterns:
- if exc_re.match(f):
- excluded = True
- break
-
- if excluded:
- continue
-
- for inc_re in inc_patterns:
- if inc_re.match(f):
- s = os.path.normpath(f[1:])
- if all_resources or not (f.endswith('.java') or f.endswith('.kt') or f.endswith('.groovy')):
- result['not_java'].append(s)
- elif f.endswith('.java'):
- result['java'].append(os.path.join(srcdir, s))
- elif f.endswith('.kt') and resolve_kotlin:
- result['kotlin'].append(os.path.join(srcdir, s))
- elif f.endswith('.groovy') and resolve_groovy:
- result['groovy'].append(os.path.join(srcdir, s))
- else:
- result['not_java'].append(s)
- break
-
- return sorted(result['java']), sorted(result['not_java']), sorted(result['kotlin']), sorted(result['groovy'])
-
-
-def do_it(directory, sources_file, resources_file, kotlin_sources_file, groovy_sources_file, include_patterns, exclude_patterns, resolve_kotlin, resolve_groovy, append, all_resources):
- j, r, k, g = resolve_java_srcs(directory, include_patterns, exclude_patterns, all_resources, resolve_kotlin, resolve_groovy)
- mode = 'a' if append else 'w'
- open(sources_file, mode).writelines(i + '\n' for i in j)
- open(resources_file, mode).writelines(i + '\n' for i in r)
- if kotlin_sources_file:
- open(kotlin_sources_file, mode).writelines(i + '\n' for i in k + j)
- if groovy_sources_file:
- open(groovy_sources_file, mode).writelines(i + '\n' for i in g + j)
-
-
-if __name__ == '__main__':
- parser = argparse.ArgumentParser()
- parser.add_argument('-d', '--directory', required=True)
- parser.add_argument('-s', '--sources-file', required=True)
- parser.add_argument('-r', '--resources-file', required=True)
- parser.add_argument('-k', '--kotlin-sources-file', default=None)
- parser.add_argument('-g', '--groovy-sources-file', default=None)
- parser.add_argument('--append', action='store_true', default=False)
- parser.add_argument('--all-resources', action='store_true', default=False)
- parser.add_argument('--resolve-kotlin', action='store_true', default=False)
- parser.add_argument('--resolve-groovy', action='store_true', default=False)
- parser.add_argument('--include-patterns', nargs='*', default=[])
- parser.add_argument('--exclude-patterns', nargs='*', default=[])
- args = parser.parse_args()
-
- do_it(**vars(args))
diff --git a/build/scripts/retry.py b/build/scripts/retry.py
deleted file mode 100644
index d14170bfec..0000000000
--- a/build/scripts/retry.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import time
-import functools
-
-
-# Partly copy-pasted from contrib/python/retry
-def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backoff=1):
- _tries, _delay = tries, delay
- while _tries:
- try:
- return f()
- except exceptions as e:
- _tries -= 1
- if not _tries:
- raise
-
- time.sleep(_delay)
- _delay *= backoff
-
- if max_delay is not None:
- _delay = min(_delay, max_delay)
-
-
-def retry(**retry_kwargs):
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- return retry_func(lambda: func(*args, **kwargs), **retry_kwargs)
- return wrapper
- return decorator
diff --git a/build/scripts/rodata2cpp.py b/build/scripts/rodata2cpp.py
deleted file mode 100644
index be67d3af53..0000000000
--- a/build/scripts/rodata2cpp.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import argparse
-
-
-def main():
- parser = argparse.ArgumentParser(description='Convert rodata into C++ source with embedded file content')
- parser.add_argument('symbol', help='symbol name exported from generated file')
- parser.add_argument('rodata', type=argparse.FileType('rb'), help='input .rodata file path')
- parser.add_argument('cpp', type=argparse.FileType('w', encoding='UTF-8'), help='destination .cpp file path')
-
- args = parser.parse_args()
- args.cpp.write('static_assert(sizeof(unsigned int) == 4, "ups, something gone wrong");\n\n')
- args.cpp.write('extern "C" {\n')
- args.cpp.write(' extern const unsigned char ' + args.symbol + '[] = {\n')
-
- cnt = 0
-
- for ch in args.rodata.read():
- args.cpp.write('0x%02x, ' % ch)
-
- cnt += 1
-
- if cnt % 50 == 1:
- args.cpp.write('\n')
-
- args.cpp.write(' };\n')
- args.cpp.write(' extern const unsigned int ' + args.symbol + 'Size = sizeof(' + args.symbol + ');\n')
- args.cpp.write('}\n')
-
- args.rodata.close()
- args.cpp.close()
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/run_ios_simulator.py b/build/scripts/run_ios_simulator.py
deleted file mode 100644
index 052c855b77..0000000000
--- a/build/scripts/run_ios_simulator.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import argparse
-import json
-import os
-import subprocess
-import sys
-
-
-def just_do_it():
- parser = argparse.ArgumentParser()
- parser.add_argument("--action", choices=["create", "spawn", "kill"])
- parser.add_argument("--simctl", help="simctl binary path")
- parser.add_argument("--profiles", help="profiles path")
- parser.add_argument("--device-dir", help="devices directory")
- parser.add_argument("--device-name", help="temp device name")
- args, tail = parser.parse_known_args()
- if args.action == 'create':
- action_create(args.simctl, args.profiles, args.device_dir, args.device_name, tail)
- elif args.action == "spawn":
- action_spawn(args.simctl, args.profiles, args.device_dir, args.device_name, tail)
- elif args.action == "kill":
- action_kill(args.simctl, args.profiles, args.device_dir, args.device_name)
-
-
-def action_create(simctl, profiles, device_dir, name, args):
- parser = argparse.ArgumentParser()
- parser.add_argument("--device-type", default="com.apple.CoreSimulator.SimDeviceType.iPhone-X")
- parser.add_argument("--device-runtime", default="com.apple.CoreSimulator.SimRuntime.iOS-12-1")
- args = parser.parse_args(args)
- all_devices = list(get_all_devices(simctl, profiles, device_dir))
- if filter(lambda x: x["name"] == name, all_devices):
- raise Exception("Device named {} already exists".format(name))
- subprocess.check_call([simctl, "--profiles", profiles, "--set", device_dir, "create", name, args.device_type, args.device_runtime])
- created = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir))
- if not created:
- raise Exception("Creation error: temp device named {} not found".format(name))
- created = created[0]
- if created["availability"] != "(available)":
- raise Exception("Creation error: temp device {} status is {} ((available) expected)".format(name, created["availability"]))
-
-
-def action_spawn(simctl, profiles, device_dir, name, args):
- device = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir))
- if not device:
- raise Exception("Can't spawn process: device named {} not found".format(name))
- if len(device) > 1:
- raise Exception("Can't spawn process: too many devices named {} found".format(name))
- device = device[0]
- os.execv(simctl, [simctl, "--profiles", profiles, "--set", device_dir, "spawn", device["udid"]] + args)
-
-
-def action_kill(simctl, profiles, device_dir, name):
- device = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir))
- if not device:
- print >> sys.stderr, "Device named {} not found; do nothing".format(name)
- return
- if len(device) > 1:
- raise Exception("Can't remove: too many devices named {}:\n{}".format(name, '\n'.join(i for i in device)))
- device = device[0]
- os.execv(simctl, [simctl, "--profiles", profiles, "--set", device_dir, "delete", device["udid"]])
-
-
-def get_all_devices(simctl, profiles, device_dir):
- p = subprocess.Popen([simctl, "--profiles", profiles, "--set", device_dir, "list", "--json", "devices"], stdout=subprocess.PIPE)
- out, _ = p.communicate()
- rc = p.wait()
- if rc:
- raise Exception("Devices list command return code is {}\nstdout:\n{}".format(rc, out))
- raw_object = json.loads(out)
- if "devices" not in raw_object:
- raise Exception("Devices not found in\n{}".format(json.dumps(raw_object)))
- raw_object = raw_object["devices"]
- for os_name, devices in raw_object.items():
- for device in devices:
- device["os_name"] = os_name
- yield device
-
-
-if __name__ == '__main__':
- just_do_it()
diff --git a/build/scripts/run_javac.py b/build/scripts/run_javac.py
deleted file mode 100644
index c35546e0fe..0000000000
--- a/build/scripts/run_javac.py
+++ /dev/null
@@ -1,122 +0,0 @@
-import sys
-import subprocess
-import optparse
-import re
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.disable_interspersed_args()
- parser.add_option('--sources-list')
- parser.add_option('--verbose', default=False, action='store_true')
- parser.add_option('--remove-notes', default=False, action='store_true')
- parser.add_option('--ignore-errors', default=False, action='store_true')
- parser.add_option('--kotlin', default=False, action='store_true')
- return parser.parse_args()
-
-
-COLORING = {
- r'^(?P<path>.*):(?P<line>\d*): error: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format(
- path=m.group('path'),
- line=m.group('line'),
- msg=m.group('msg'),
- ),
- r'^(?P<path>.*):(?P<line>\d*): warning: (?P<msg>.*)': lambda m: '[[unimp]]{path}[[rst]]:[[alt2]]{line}[[rst]]: [[c:light-yellow]]warning[[rst]]: {msg}'.format(
- path=m.group('path'),
- line=m.group('line'),
- msg=m.group('msg'),
- ),
- r'^warning: ': lambda m: '[[c:light-yellow]]warning[[rst]]: ',
- r'^error: (?P<msg>.*)': lambda m: '[[c:light-red]]error[[rst]]: [[bad]]{msg}[[rst]]'.format(msg=m.group('msg')),
- r'^Note: ': lambda m: '[[c:light-cyan]]Note[[rst]]: ',
-}
-
-
-def colorize(err):
- for regex, sub in COLORING.iteritems():
- err = re.sub(regex, sub, err, flags=re.MULTILINE)
- return err
-
-
-def remove_notes(err):
- return '\n'.join([line for line in err.split('\n') if not line.startswith('Note:')])
-
-
-def find_javac(cmd):
- if not cmd:
- return None
- if cmd[0].endswith('javac') or cmd[0].endswith('javac.exe'):
- return cmd[0]
- if len(cmd) > 2 and cmd[1].endswith('build_java_with_error_prone.py'):
- for javas in ('java', 'javac'):
- if cmd[2].endswith(javas) or cmd[2].endswith(javas + '.exe'):
- return cmd[2]
- return None
-
-
-# temporary, for jdk8/jdk9+ compatibility
-def fix_cmd(cmd):
- if not cmd:
- return cmd
- javac = find_javac(cmd)
- if not javac:
- return cmd
- p = subprocess.Popen([javac, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- out, err = out.strip(), err.strip()
- for prefix in ('javac 1.8', 'java version "1.8'):
- for raw_out in ((out or ''), (err or '')):
- for line in raw_out.split('\n'):
- if line.startswith(prefix):
- res = []
- i = 0
- while i < len(cmd):
- for option in ('--add-exports', '--add-modules'):
- if cmd[i] == option:
- i += 1
- break
- elif cmd[i].startswith(option + '='):
- break
- else:
- res.append(cmd[i])
- i += 1
- return res
- return cmd
-
-
-def main():
- opts, cmd = parse_args()
-
- with open(opts.sources_list) as f:
- input_files = f.read().strip().split()
-
- if opts.kotlin:
- input_files = [i for i in input_files if i.endswith('.kt')]
-
- if not input_files:
- if opts.verbose:
- sys.stderr.write('No files to compile, javac is not launched.\n')
-
- else:
- p = subprocess.Popen(fix_cmd(cmd), stderr=subprocess.PIPE)
- _, err = p.communicate()
- rc = p.wait()
-
- if opts.remove_notes:
- err = remove_notes(err)
-
- try:
- err = colorize(err)
-
- except Exception:
- pass
-
- if opts.ignore_errors and rc:
- sys.stderr.write('error: javac actually failed with exit code {}\n'.format(rc))
- rc = 0
- sys.stderr.write(err)
- sys.exit(rc)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/run_junit.py b/build/scripts/run_junit.py
deleted file mode 100644
index 089f149f72..0000000000
--- a/build/scripts/run_junit.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import os
-import sys
-
-SHUTDOWN_SIGNAL = 'SIGUSR1'
-
-
-class SignalInterruptionError(Exception):
- pass
-
-
-def on_shutdown(s, f):
- raise SignalInterruptionError()
-
-
-def main():
- args = sys.argv[1:]
-
- def execve():
- os.execve(args[0], args, os.environ)
-
- jar_binary = args[args.index('--jar-binary') + 1]
- java_bin_dir = os.path.dirname(jar_binary)
- jstack_binary = os.path.join(java_bin_dir, 'jstack')
-
- if not os.path.exists(jstack_binary):
- sys.stderr.write("jstack is missing: {}\n".format(jstack_binary))
- execve()
-
- import signal
-
- signum = getattr(signal, SHUTDOWN_SIGNAL, None)
-
- if signum is None:
- execve()
-
- import subprocess
-
- proc = subprocess.Popen(args)
- signal.signal(signum, on_shutdown)
- timeout = False
-
- try:
- proc.wait()
- except SignalInterruptionError:
- sys.stderr.write("\nGot {} signal: going to shutdown junit\n".format(signum))
- # Dump stack traces
- subprocess.call([jstack_binary, str(proc.pid)], stdout=sys.stderr)
- # Kill junit - for more info see DEVTOOLS-7636
- os.kill(proc.pid, signal.SIGKILL)
- proc.wait()
- timeout = True
-
- if proc.returncode:
- sys.stderr.write('java exit code: {}\n'.format(proc.returncode))
- if timeout:
- # In case of timeout return specific exit code
- # https://a.yandex-team.ru/arc/trunk/arcadia/devtools/ya/test/const/__init__.py?rev=r8578188#L301
- proc.returncode = 10
- sys.stderr.write('java exit code changed to {}\n'.format(proc.returncode))
-
- return proc.returncode
-
-
-if __name__ == '__main__':
- exit(main())
diff --git a/build/scripts/run_msvc_wine.py b/build/scripts/run_msvc_wine.py
deleted file mode 100644
index 9305db97ee..0000000000
--- a/build/scripts/run_msvc_wine.py
+++ /dev/null
@@ -1,584 +0,0 @@
-import sys
-import os
-import re
-import subprocess
-import signal
-import time
-import json
-import argparse
-import errno
-
-import process_command_files as pcf
-import process_whole_archive_option as pwa
-
-
-procs = []
-build_kekeke = 45
-
-
-def stringize(s):
- return s.encode('utf-8') if isinstance(s, unicode) else s
-
-
-def run_subprocess(*args, **kwargs):
- if 'env' in kwargs:
- kwargs['env'] = {stringize(k): stringize(v) for k, v in kwargs['env'].iteritems()}
-
- p = subprocess.Popen(*args, **kwargs)
-
- procs.append(p)
-
- return p
-
-
-def run_subprocess_with_timeout(timeout, args):
- attempts_remaining = 5
- delay = 1
- p = None
- while True:
- try:
- p = run_subprocess(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = p.communicate(timeout=timeout)
- return p, stdout, stderr
- except subprocess.TimeoutExpired as e:
- print >>sys.stderr, 'timeout running {0}, error {1}, delay {2} seconds'.format(args, str(e), delay)
- if p is not None:
- try:
- p.kill()
- p.wait(timeout=1)
- except Exception:
- pass
- attempts_remaining -= 1
- if attempts_remaining == 0:
- raise
- time.sleep(delay)
- delay = min(2 * delay, 4)
-
-
-def terminate_slaves():
- for p in procs:
- try:
- p.terminate()
- except Exception:
- pass
-
-
-def sig_term(sig, fr):
- terminate_slaves()
- sys.exit(sig)
-
-
-def subst_path(l):
- if len(l) > 3:
- if l[:3].lower() in ('z:\\', 'z:/'):
- return l[2:].replace('\\', '/')
-
- return l
-
-
-def call_wine_cmd_once(wine, cmd, env, mode):
- p = run_subprocess(wine + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, close_fds=True, shell=False)
-
- output = find_cmd_out(cmd)
- error = None
- if output is not None and os.path.exists(output):
- try:
- os.remove(output)
- except OSError as e:
- if e.errno != errno.ENOENT:
- error = e
- except Exception as e:
- error = e
-
- if error is not None:
- print >> sys.stderr, 'Output {} already exists and we have failed to remove it: {}'.format(output, error)
-
- # print >>sys.stderr, cmd, env, wine
-
- stdout_and_stderr, _ = p.communicate()
-
- return_code = p.returncode
- if not stdout_and_stderr:
- if return_code != 0:
- raise Exception('wine did something strange')
-
- return return_code
- elif ' : fatal error ' in stdout_and_stderr:
- return_code = 1
- elif ' : error ' in stdout_and_stderr:
- return_code = 2
-
- lines = [x.strip() for x in stdout_and_stderr.split('\n')]
-
- prefixes = [
- 'Microsoft (R)',
- 'Copyright (C)',
- 'Application tried to create a window',
- 'The graphics driver is missing',
- 'Could not load wine-gecko',
- 'wine: configuration in',
- 'wine: created the configuration directory',
- 'libpng warning:'
- ]
-
- suffixes = [
- '.c',
- '.cxx',
- '.cc',
- '.cpp',
- '.masm',
- ]
-
- substrs = [
- 'Creating library Z:',
- 'err:heap',
- 'err:menubuilder:',
- 'err:msvcrt',
- 'err:ole:',
- 'err:wincodecs:',
- 'err:winediag:',
- ]
-
- def good_line(l):
- for x in prefixes:
- if l.startswith(x):
- return False
-
- for x in suffixes:
- if l.endswith(x):
- return False
-
- for x in substrs:
- if x in l:
- return False
-
- return True
-
- def filter_lines():
- for l in lines:
- if good_line(l):
- yield subst_path(l.strip())
-
- stdout_and_stderr = '\n'.join(filter_lines()).strip()
-
- if stdout_and_stderr:
- print >>sys.stderr, stdout_and_stderr
-
- return return_code
-
-
-def prepare_vc(fr, to):
- for p in os.listdir(fr):
- fr_p = os.path.join(fr, p)
- to_p = os.path.join(to, p)
-
- if not os.path.exists(to_p):
- print >>sys.stderr, 'install %s -> %s' % (fr_p, to_p)
-
- os.link(fr_p, to_p)
-
-
-def run_slave():
- args = json.loads(sys.argv[3])
- wine = sys.argv[1]
-
- signal.signal(signal.SIGTERM, sig_term)
-
- if args.get('tout', None):
- signal.signal(signal.SIGALRM, sig_term)
- signal.alarm(args['tout'])
-
- tout = 0.1
-
- while True:
- try:
- return call_wine_cmd_once([wine], args['cmd'], args['env'], args['mode'])
- except Exception as e:
- print >>sys.stderr, '%s, will retry in %s' % (str(e), tout)
-
- time.sleep(tout)
- tout = min(2 * tout, 4)
-
-
-def find_cmd_out(args):
- for arg in args:
- if arg.startswith('/Fo'):
- return arg[3:]
-
- if arg.startswith('/OUT:'):
- return arg[5:]
-
-
-def calc_zero_cnt(data):
- zero_cnt = 0
-
- for ch in data:
- if ch == chr(0):
- zero_cnt += 1
-
- return zero_cnt
-
-
-def is_good_file(p):
- if not os.path.isfile(p):
- return False
-
- if os.path.getsize(p) < 300:
- return False
-
- asm_pattern = re.compile(r'asm(\.\w+)?\.obj$')
- if asm_pattern.search(p):
- pass
- elif p.endswith('.obj'):
- with open(p, 'rb') as f:
- prefix = f.read(200)
-
- if ord(prefix[0]) != 0:
- return False
-
- if ord(prefix[1]) != 0:
- return False
-
- if ord(prefix[2]) != 0xFF:
- return False
-
- if ord(prefix[3]) != 0xFF:
- return False
-
- if calc_zero_cnt(prefix) > 195:
- return False
-
- f.seek(-100, os.SEEK_END)
- last = f.read(100)
-
- if calc_zero_cnt(last) > 95:
- return False
-
- if last[-1] != chr(0):
- return False
- elif p.endswith('.lib'):
- with open(p, 'rb') as f:
- if f.read(7) != '!<arch>':
- return False
-
- return True
-
-
-RED = '\x1b[31;1m'
-GRAY = '\x1b[30;1m'
-RST = '\x1b[0m'
-MGT = '\x1b[35m'
-YEL = '\x1b[33m'
-GRN = '\x1b[32m'
-CYA = '\x1b[36m'
-
-
-def colorize_strings(l):
- p = l.find("'")
-
- if p >= 0:
- yield l[:p]
-
- l = l[p + 1:]
-
- p = l.find("'")
-
- if p >= 0:
- yield CYA + "'" + subst_path(l[:p]) + "'" + RST
-
- for x in colorize_strings(l[p + 1:]):
- yield x
- else:
- yield "'" + l
- else:
- yield l
-
-
-def colorize_line(l):
- lll = l
-
- try:
- parts = []
-
- if l.startswith('(compiler file'):
- return ''.join(colorize_strings(l))
-
- if l.startswith('/'):
- p = l.find('(')
- parts.append(GRAY + l[:p] + RST)
- l = l[p:]
-
- if l and l.startswith('('):
- p = l.find(')')
- parts.append(':' + MGT + l[1:p] + RST)
- l = l[p + 1:]
-
- if l:
- if l.startswith(' : '):
- l = l[1:]
-
- if l.startswith(': error'):
- parts.append(': ' + RED + 'error' + RST)
- l = l[7:]
- elif l.startswith(': warning'):
- parts.append(': ' + YEL + 'warning' + RST)
- l = l[9:]
- elif l.startswith(': note'):
- parts.append(': ' + GRN + 'note' + RST)
- l = l[6:]
- elif l.startswith('fatal error'):
- parts.append(RED + 'fatal error' + RST)
- l = l[11:]
-
- if l:
- parts.extend(colorize_strings(l))
-
- return ''.join(parts)
- except Exception:
- return lll
-
-
-def colorize(out):
- return '\n'.join(colorize_line(l) for l in out.split('\n'))
-
-
-def trim_path(path, winepath):
- p1, p1_stdout, p1_stderr = run_subprocess_with_timeout(60, [winepath, '-w', path])
- win_path = p1_stdout.strip()
-
- if p1.returncode != 0 or not win_path:
- # Fall back to only winepath -s
- win_path = path
-
- p2, p2_stdout, p2_stderr = run_subprocess_with_timeout(60, [winepath, '-s', win_path])
- short_path = p2_stdout.strip()
-
- check_path = short_path
- if check_path.startswith(('Z:', 'z:')):
- check_path = check_path[2:]
-
- if not check_path[1:].startswith((path[1:4], path[1:4].upper())):
- raise Exception(
- 'Cannot trim path {}; 1st winepath exit code: {}, stdout:\n{}\n stderr:\n{}\n 2nd winepath exit code: {}, stdout:\n{}\n stderr:\n{}'.format(
- path, p1.returncode, p1_stdout, p1_stderr, p2.returncode, p2_stdout, p2_stderr
- ))
-
- return short_path
-
-
-def downsize_path(path, short_names):
- flag = ''
- if path.startswith('/Fo'):
- flag = '/Fo'
- path = path[3:]
-
- for full_name, short_name in short_names.items():
- if path.startswith(full_name):
- path = path.replace(full_name, short_name)
-
- return flag + path
-
-
-def make_full_path_arg(arg, bld_root, short_root):
- if arg[0] != '/' and len(os.path.join(bld_root, arg)) > 250:
- return os.path.join(short_root, arg)
- return arg
-
-
-def fix_path(p):
- topdirs = ['/%s/' % d for d in os.listdir('/')]
- def abs_path_start(path, pos):
- if pos < 0:
- return False
- return pos == 0 or path[pos - 1] == ':'
-
- pp = None
- for pr in topdirs:
- pp2 = p.find(pr)
- if abs_path_start(p, pp2) and (pp is None or pp > pp2):
- pp = pp2
- if pp is not None:
- return p[:pp] + 'Z:' + p[pp:].replace('/', '\\')
- if p.startswith('/Fo'):
- return '/Fo' + p[3:].replace('/', '\\')
- return p
-
-
-def process_free_args(args, wine, bld_root, mode):
- whole_archive_prefix = '/WHOLEARCHIVE:'
- short_names = {}
- winepath = os.path.join(os.path.dirname(wine), 'winepath')
- short_names[bld_root] = trim_path(bld_root, winepath)
- # Slow for no benefit.
- # arc_root = args.arcadia_root
- # short_names[arc_root] = trim_path(arc_root, winepath)
-
- free_args, wa_peers, wa_libs = pwa.get_whole_archive_peers_and_libs(pcf.skip_markers(args))
-
- process_link = lambda x: make_full_path_arg(x, bld_root, short_names[bld_root]) if mode in ('link', 'lib') else x
- def process_arg(arg):
- with_wa_prefix = arg.startswith(whole_archive_prefix)
- prefix = whole_archive_prefix if with_wa_prefix else ''
- without_prefix_arg = arg[len(prefix):]
- return prefix + fix_path(process_link(downsize_path(without_prefix_arg, short_names)))
-
- result = []
- for arg in free_args:
- if pcf.is_cmdfile_arg(arg):
- cmd_file_path = pcf.cmdfile_path(arg)
- cf_args = pcf.read_from_command_file(cmd_file_path)
- with open(cmd_file_path, 'w') as afile:
- for cf_arg in cf_args:
- afile.write(process_arg(cf_arg) + "\n")
- result.append(arg)
- else:
- result.append(process_arg(arg))
- return pwa.ProcessWholeArchiveOption('WINDOWS', wa_peers, wa_libs).construct_cmd(result)
-
-
-def run_main():
- parser = argparse.ArgumentParser()
- parser.add_argument('wine', action='store')
- parser.add_argument('-v', action='store', dest='version', default='120')
- parser.add_argument('-I', action='append', dest='incl_paths')
- parser.add_argument('mode', action='store')
- parser.add_argument('arcadia_root', action='store')
- parser.add_argument('arcadia_build_root', action='store')
- parser.add_argument('binary', action='store')
- parser.add_argument('free_args', nargs=argparse.REMAINDER)
- # By now just unpack. Ideally we should fix path and pack arguments back into command file
- args = parser.parse_args()
-
- wine = args.wine
- mode = args.mode
- binary = args.binary
- version = args.version
- incl_paths = args.incl_paths
- bld_root = args.arcadia_build_root
- free_args = args.free_args
-
- wine_dir = os.path.dirname(os.path.dirname(wine))
- bin_dir = os.path.dirname(binary)
- tc_dir = os.path.dirname(os.path.dirname(os.path.dirname(bin_dir)))
- if not incl_paths:
- incl_paths = [tc_dir + '/VC/include', tc_dir + '/include']
-
- cmd_out = find_cmd_out(free_args)
-
- env = os.environ.copy()
-
- env.pop('DISPLAY', None)
-
- env['WINEDLLOVERRIDES'] = 'msvcr{}=n'.format(version)
- env['WINEDEBUG'] = 'fixme-all'
- env['INCLUDE'] = ';'.join(fix_path(p) for p in incl_paths)
- env['VSINSTALLDIR'] = fix_path(tc_dir)
- env['VCINSTALLDIR'] = fix_path(tc_dir + '/VC')
- env['WindowsSdkDir'] = fix_path(tc_dir)
- env['LIBPATH'] = fix_path(tc_dir + '/VC/lib/amd64')
- env['LIB'] = fix_path(tc_dir + '/VC/lib/amd64')
- env['LD_LIBRARY_PATH'] = ':'.join(wine_dir + d for d in ['/lib', '/lib64', '/lib64/wine'])
-
- cmd = [binary] + process_free_args(free_args, wine, bld_root, mode)
-
- for x in ('/NOLOGO', '/nologo', '/FD'):
- try:
- cmd.remove(x)
- except ValueError:
- pass
-
- def run_process(sleep, tout):
- if sleep:
- time.sleep(sleep)
-
- args = {
- 'cmd': cmd,
- 'env': env,
- 'mode': mode,
- 'tout': tout
- }
-
- slave_cmd = [sys.executable, sys.argv[0], wine, 'slave', json.dumps(args)]
- p = run_subprocess(slave_cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=False)
- out, _ = p.communicate()
- return p.wait(), out
-
- def print_err_log(log):
- if not log:
- return
- if mode == 'cxx':
- log = colorize(log)
- print >>sys.stderr, log
-
- tout = 200
-
- while True:
- rc, out = run_process(0, tout)
-
- if rc in (-signal.SIGALRM, signal.SIGALRM):
- print_err_log(out)
- print >>sys.stderr, '##append_tag##time out'
- elif out and ' stack overflow ' in out:
- print >>sys.stderr, '##append_tag##stack overflow'
- elif out and 'recvmsg: Connection reset by peer' in out:
- print >>sys.stderr, '##append_tag##wine gone'
- elif out and 'D8037' in out:
- print >>sys.stderr, '##append_tag##repair wine'
-
- try:
- os.unlink(os.path.join(os.environ['WINEPREFIX'], '.update-timestamp'))
- except Exception as e:
- print >>sys.stderr, e
-
- else:
- print_err_log(out)
-
- # non-zero return code - bad, return it immediately
- if rc:
- print >>sys.stderr, '##win_cmd##' + ' '.join(cmd)
- print >>sys.stderr, '##args##' + ' '.join(free_args)
- return rc
-
- # check for output existence(if we expect it!) and real length
- if cmd_out:
- if is_good_file(cmd_out):
- return 0
- else:
- # retry!
- print >>sys.stderr, '##append_tag##no output'
- else:
- return 0
-
- tout *= 3
-
-
-def main():
- prefix_suffix = os.environ.pop('WINEPREFIX_SUFFIX', None)
- if prefix_suffix is not None:
- prefix = os.environ.pop('WINEPREFIX', None)
- if prefix is not None:
- os.environ['WINEPREFIX'] = os.path.join(prefix, prefix_suffix)
-
- # just in case
- signal.alarm(2000)
-
- if sys.argv[2] == 'slave':
- func = run_slave
- else:
- func = run_main
-
- try:
- try:
- sys.exit(func())
- finally:
- terminate_slaves()
- except KeyboardInterrupt:
- sys.exit(4)
- except Exception as e:
- print >>sys.stderr, str(e)
-
- sys.exit(3)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/run_sonar.py b/build/scripts/run_sonar.py
deleted file mode 100644
index 761cc34b78..0000000000
--- a/build/scripts/run_sonar.py
+++ /dev/null
@@ -1,121 +0,0 @@
-import os
-import sys
-import zipfile
-import tarfile
-import subprocess as sp
-import optparse
-import shutil
-import xml.etree.ElementTree as et
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.add_option(
- '--classes-jar-path',
- dest='classes_jar_paths',
- action='append',
- default=[],
- )
- parser.add_option('--sources-jar-path')
- parser.add_option('--sonar-scanner-jar-path')
- parser.add_option('--sonar-scanner-main-class')
- parser.add_option('--java-coverage-merged-tar')
- parser.add_option('--java-binary-path')
- parser.add_option('--log-path')
- parser.add_option('--gcov-report-path')
- parser.add_option('--source-root')
- parser.add_option('--java-args', action='append', default=[])
- return parser.parse_args()
-
-
-def extract_zip_file(zip_file_path, dest_dir):
- with zipfile.ZipFile(zip_file_path) as arch:
- arch.extractall(dest_dir)
-
-
-def get_source_real_path(source_root, path):
- parts = os.path.normpath(path).split(os.path.sep)
- for i in xrange(len(parts)):
- if os.path.exists(os.path.join(source_root, *parts[i:])):
- return os.path.join(*parts[i:])
- return None
-
-
-def collect_cpp_sources(report, source_root, destination):
- sources = set()
- with open(report) as f:
- root = et.fromstring(f.read())
- for f in root.findall('.//class[@filename]'):
- real_filename = get_source_real_path(source_root, f.attrib['filename'])
- if real_filename:
- f.attrib['filename'] = real_filename
- sources.add(real_filename)
- with open(report, 'w') as f:
- pref = '''<?xml version="1.0" ?>
-<!DOCTYPE coverage
- SYSTEM 'http://cobertura.sourceforge.net/xml/coverage-03.dtd'>\n'''
- f.write(pref + et.tostring(root, encoding='utf-8') + '\n\n')
- for src in sources:
- dst = os.path.join(destination, src)
- src = os.path.join(source_root, src)
- if os.path.isfile(src):
- if not os.path.exists(os.path.dirname(dst)):
- os.makedirs(os.path.dirname(dst))
- os.link(src, dst)
-
-
-def main(opts, props_args):
- sources_dir = os.path.abspath('src')
- base_props_args = ['-Dsonar.sources=' + sources_dir]
- os.mkdir(sources_dir)
- if opts.sources_jar_path:
- extract_zip_file(opts.sources_jar_path, sources_dir)
- if opts.gcov_report_path:
- collect_cpp_sources(opts.gcov_report_path, opts.source_root, sources_dir)
- base_props_args += ['-Dsonar.projectBaseDir=' + sources_dir, '-Dsonar.cxx.coverage.reportPath=' + opts.gcov_report_path]
-
- if opts.classes_jar_paths:
- classes_dir = os.path.abspath('cls')
- os.mkdir(classes_dir)
-
- for classes_jar_path in opts.classes_jar_paths:
- extract_zip_file(classes_jar_path, classes_dir)
-
- base_props_args.append('-Dsonar.java.binaries=' + classes_dir)
-
- if opts.java_coverage_merged_tar:
- jacoco_report_path = os.path.abspath('jacoco.exec')
- with open(jacoco_report_path, 'w') as dest:
- with tarfile.open(opts.java_coverage_merged_tar) as tar:
- for src in tar:
- extracted = tar.extractfile(src)
- if extracted is not None:
- shutil.copyfileobj(extracted, dest)
-
- base_props_args += [
- '-Dsonar.core.codeCoveragePlugin=jacoco',
- '-Dsonar.jacoco.reportPath=' + jacoco_report_path
- ]
- java_args = ['-{}'.format(i) for i in opts.java_args] + ['-Djava.net.preferIPv6Addresses=true', '-Djava.net.preferIPv4Addresses=false']
-
- sonar_cmd = [
- opts.java_binary_path,
- ] + java_args + [
- '-classpath',
- opts.sonar_scanner_jar_path,
- ] + base_props_args + props_args + [opts.sonar_scanner_main_class, '-X']
-
- p = sp.Popen(sonar_cmd, stdout=sp.PIPE, stderr=sp.STDOUT)
- out, _ = p.communicate()
-
- sys.stderr.write(out)
- with open(opts.log_path, 'a') as f:
- f.write(out)
-
- sys.exit(p.returncode)
-
-
-if __name__ == '__main__':
- opts, args = parse_args()
- props_args = ['-D' + arg for arg in args]
- main(opts, props_args)
diff --git a/build/scripts/setup_java_tmpdir.py b/build/scripts/setup_java_tmpdir.py
deleted file mode 100644
index e478d4aa96..0000000000
--- a/build/scripts/setup_java_tmpdir.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import os
-import sys
-import platform
-import subprocess
-
-
-def fix_tmpdir(cmd):
- if not cmd:
- return cmd
- java_id, option_name = None, None
- for i, java in enumerate(cmd):
- if java.endswith('java') or java.endswith('java.exe'):
- java_id = i
- option_name = '-Djava.io.tmpdir='
- break
- if java.endswith('javac') or java.endswith('javac.exe'):
- java_id = i
- option_name = '-J-Djava.io.tmpdir='
- break
- if java_id is None:
- return cmd
- for arg in cmd[java_id:]:
- if arg.startswith(option_name):
- return cmd
- tmpdir = os.environ.get('TMPDIR') or os.environ.get('TEMPDIR')
- if not tmpdir:
- return cmd
- return cmd[:java_id + 1] + ['{}{}'.format(option_name, tmpdir)] + cmd[java_id + 1:]
-
-
-def just_do_it():
- args = fix_tmpdir(sys.argv[1:])
- if platform.system() == 'Windows':
- sys.exit(subprocess.Popen(args).wait())
- else:
- os.execv(args[0], args)
-
-
-if __name__ == '__main__':
- just_do_it()
diff --git a/build/scripts/sky.py b/build/scripts/sky.py
deleted file mode 100644
index b703af7ed1..0000000000
--- a/build/scripts/sky.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import logging
-import os
-import subprocess
-
-import fetch_from
-
-
-class UnsupportedProtocolException(Exception):
- pass
-
-
-def executable_path():
- return "/usr/local/bin/sky"
-
-
-def is_avaliable():
- if not os.path.exists(executable_path()):
- return False
- try:
- subprocess.check_output([executable_path(), "--version"])
- return True
- except subprocess.CalledProcessError:
- return False
- except OSError:
- return False
-
-
-def fetch(skynet_id, file_name, timeout=None):
- if not is_avaliable():
- raise UnsupportedProtocolException("Skynet is not available")
-
- target_dir = os.path.abspath(fetch_from.uniq_string_generator())
- os.mkdir(target_dir)
-
- cmd_args = [executable_path(), "get", "-N", "Backbone", "--user", "--wait", "--dir", target_dir, skynet_id]
- if timeout is not None:
- cmd_args += ["--timeout", str(timeout)]
-
- logging.info("Call skynet with args: %s", cmd_args)
- stdout = subprocess.check_output(cmd_args).strip()
- logging.debug("Skynet call with args %s is finished, result is %s", cmd_args, stdout)
-
- return os.path.join(target_dir, file_name)
diff --git a/build/scripts/stderr2stdout.py b/build/scripts/stderr2stdout.py
deleted file mode 100644
index 0e510da373..0000000000
--- a/build/scripts/stderr2stdout.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import subprocess
-import sys
-
-if __name__ == '__main__':
- assert len(sys.argv) > 1
- sys.exit(subprocess.Popen(sys.argv[1:], stderr=sys.stdout).wait())
diff --git a/build/scripts/symlink.py b/build/scripts/symlink.py
deleted file mode 100755
index 17bc8447f1..0000000000
--- a/build/scripts/symlink.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import os
-import platform
-from subprocess import call
-
-
-def symlink():
- if len(sys.argv) < 3:
- print >>sys.stderr, "Usage: symlink.py <source> <target>"
- sys.exit(1)
-
- source = sys.argv[1]
- target = sys.argv[2]
-
- print "Making a symbolic link from {0} to {1}".format(source, target)
-
- sysName = platform.system()
- if sysName == "Windows": # and not os.path.exists(target)
- if os.path.isdir(source):
- call(["mklink", "/D", target, source], shell=True)
- else:
- call(["mklink", target, source], shell=True)
- else:
- call(["ln", "-f", "-s", "-n", source, target])
-
-if __name__ == '__main__':
- symlink()
diff --git a/build/scripts/tar_directory.py b/build/scripts/tar_directory.py
deleted file mode 100644
index a91889fa22..0000000000
--- a/build/scripts/tar_directory.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import os
-import sys
-import tarfile
-
-
-def is_exe(fpath):
- return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
-
-def main(args):
- if len(args) < 2 or len(args) > 3:
- raise Exception("Illegal usage: `tar_directory.py archive.tar directory [skip prefix]` or `tar_directory.py archive.tar output_directory --extract`")
- tar, directory, prefix, extract = args[0], args[1], None, False
- if len(args) == 3:
- if args[2] == '--extract':
- extract = True
- else:
- prefix = args[2]
- for tar_exe in ('/usr/bin/tar', '/bin/tar'):
- if not is_exe(tar_exe):
- continue
- if extract:
- dest = os.path.abspath(directory)
- if not os.path.exists(dest):
- os.makedirs(dest)
- os.execv(tar_exe, [tar_exe, '-xf', tar, '-C', dest])
- else:
- source = os.path.relpath(directory, prefix) if prefix else directory
- os.execv(tar_exe, [tar_exe, '-cf', tar] + (['-C', prefix] if prefix else []) + [source])
- break
- else:
- if extract:
- dest = os.path.abspath(directory)
- if not os.path.exists(dest):
- os.makedirs(dest)
- with tarfile.open(tar, 'r') as tar_file:
- tar_file.extractall(dest)
- else:
- source = directory
- with tarfile.open(tar, 'w') as out:
- out.add(os.path.abspath(source), arcname=os.path.relpath(source, prefix) if prefix else source)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py
deleted file mode 100644
index 9730dcd711..0000000000
--- a/build/scripts/unpacking_jtest_runner.py
+++ /dev/null
@@ -1,148 +0,0 @@
-import io
-import json
-import optparse
-import os
-import sys
-import subprocess
-import time
-import zipfile
-import platform
-
-# This script changes test run classpath by unpacking tests.jar -> tests-dir. The goal
-# is to launch tests with the same classpath as maven does.
-
-
-def parse_args():
- parser = optparse.OptionParser()
- parser.disable_interspersed_args()
- parser.add_option('--trace-file')
- parser.add_option('--jar-binary')
- parser.add_option('--tests-jar-path')
- parser.add_option('--classpath-option-type', choices=('manifest', 'command_file', 'list'), default='manifest')
- return parser.parse_args()
-
-
-# temporary, for jdk8/jdk9+ compatibility
-def fix_cmd(cmd):
- if not cmd:
- return cmd
- java = cmd[0]
- if not java.endswith('java') and not java.endswith('java.exe'):
- return cmd
- p = subprocess.Popen([java, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out, err = p.communicate()
- out, err = out.strip(), err.strip()
- if ((out or '').strip().startswith('java version "1.8') or (err or '').strip().startswith('java version "1.8')):
- res = []
- i = 0
- while i < len(cmd):
- for option in ('--add-exports', '--add-modules'):
- if cmd[i] == option:
- i += 1
- break
- elif cmd[i].startswith(option + '='):
- break
- else:
- res.append(cmd[i])
- i += 1
- return res
- return cmd
-
-
-def dump_event(etype, data, filename):
- event = {
- 'timestamp': time.time(),
- 'value': data,
- 'name': etype,
- }
-
- with io.open(filename, 'a', encoding='utf8') as afile:
- afile.write(unicode(json.dumps(event) + '\n'))
-
-
-def dump_chunk_event(data, filename):
- return dump_event('chunk-event', data, filename)
-
-
-def extract_jars(dest, archive):
- os.makedirs(dest)
- with zipfile.ZipFile(archive) as zf:
- zf.extractall(dest)
-
-
-def make_bfg_from_cp(class_path, out):
- class_path = ' '.join(
- map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path)
- )
- with zipfile.ZipFile(out, 'w') as zf:
- lines = []
- while class_path:
- lines.append(class_path[:60])
- class_path = class_path[60:]
- if lines:
- zf.writestr('META-INF/MANIFEST.MF', 'Manifest-Version: 1.0\nClass-Path: \n ' + '\n '.join(lines) + ' \n\n')
-
-
-def make_command_file_from_cp(class_path, out):
- with open(out, 'w') as cp_file:
- cp_file.write(os.pathsep.join(class_path))
-
-
-def main():
- s = time.time()
- opts, args = parse_args()
-
- # unpack tests jar
- try:
- build_root = args[args.index('--build-root') + 1]
- dest = os.path.join(build_root, 'test-classes')
- except Exception:
- build_root = ''
- dest = os.path.abspath('test-classes')
-
- extract_jars(dest, opts.tests_jar_path)
-
- metrics = {
- 'suite_jtest_extract_jars_(seconds)': time.time() - s,
- }
-
- s = time.time()
- # fix java classpath
- cp_idx = args.index('-classpath')
- if args[cp_idx + 1].startswith('@'):
- real_name = args[cp_idx + 1][1:]
- mf = os.path.join(os.path.dirname(real_name), 'fixed.bfg.jar')
- with open(real_name) as origin:
- class_path = [os.path.join(build_root, i.strip()) for i in origin]
- if opts.tests_jar_path in class_path:
- class_path.remove(opts.tests_jar_path)
- if opts.classpath_option_type == 'manifest':
- make_bfg_from_cp(class_path, mf)
- mf = os.pathsep.join([dest, mf])
- elif opts.classpath_option_type == 'command_file':
- mf = os.path.splitext(mf)[0] + '.txt'
- make_command_file_from_cp([dest] + class_path, mf)
- mf = "@" + mf
- elif opts.classpath_option_type == 'list':
- mf = os.pathsep.join([dest] + class_path)
- else:
- raise Exception("Unexpected classpath option type: " + opts.classpath_option_type)
- args = fix_cmd(args[:cp_idx + 1]) + [mf] + args[cp_idx + 2:]
- else:
- args[cp_idx + 1] = args[cp_idx + 1].replace(opts.tests_jar_path, dest)
- args = fix_cmd(args[:cp_idx]) + args[cp_idx:]
-
- metrics['suite_jtest_fix_classpath_(seconds)'] = time.time() - s
-
- if opts.trace_file:
- dump_chunk_event({'metrics': metrics}, opts.trace_file)
-
- # run java cmd
- if platform.system() == 'Windows':
- sys.exit(subprocess.Popen(args).wait())
- else:
- os.execv(args[0], args)
-
-
-if __name__ == '__main__':
- main()
diff --git a/build/scripts/with_coverage.py b/build/scripts/with_coverage.py
deleted file mode 100644
index d62435c3b8..0000000000
--- a/build/scripts/with_coverage.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# TODO prettyboy remove after ya-bin release
-
-import os
-import sys
-import subprocess
-import tarfile
-import random
-import shutil
-
-
-def mkdir_p(path):
- try:
- os.makedirs(path)
- except OSError:
- pass
-
-
-def main(args):
- coverage_path = os.path.abspath(args[0])
- coverage_dir = coverage_path + '.' + str(random.getrandbits(64))
-
- mkdir_p(coverage_dir)
-
- env = os.environ.copy()
- env['GCOV_PREFIX'] = coverage_dir
-
- subprocess.check_call(args[1:], env=env)
-
- arch_path = coverage_dir + '.archive'
-
- with tarfile.open(arch_path, 'w:') as tar:
- tar.add(coverage_dir, arcname='.')
-
- os.rename(arch_path, coverage_path)
-
- shutil.rmtree(coverage_dir)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/scripts/with_crash_on_timeout.py b/build/scripts/with_crash_on_timeout.py
deleted file mode 100644
index bde864ed29..0000000000
--- a/build/scripts/with_crash_on_timeout.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# TODO prettyboy remove after ya-bin release
-
-import os
-import sys
-import subprocess
-import json
-
-
-def main(args):
- meta_path = os.path.abspath(args[0])
- timeout_code = int(args[1])
- subprocess.check_call(args[2:])
- with open(meta_path) as f:
- meta_info = json.loads(f.read())
- if meta_info["exit_code"] == timeout_code:
- print >> sys.stderr, meta_info["project"], 'crashed by timeout, use --test-disable-timeout option'
- return 1
- return 0
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/build/scripts/with_pathsep_resolve.py b/build/scripts/with_pathsep_resolve.py
deleted file mode 100644
index 37c8c598ae..0000000000
--- a/build/scripts/with_pathsep_resolve.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import sys
-import os
-import subprocess
-import platform
-
-
-def fix_args(args):
- just_replace_it = False
- for arg in args:
- if arg == '--fix-path-sep':
- just_replace_it = True
- continue
- if just_replace_it:
- arg = arg.replace('::', os.pathsep)
- just_replace_it = False
- yield arg
-
-if __name__ == '__main__':
- res = list(fix_args(sys.argv[1:]))
- if platform.system() == 'Windows':
- sys.exit(subprocess.Popen(res).wait())
- else:
- os.execv(res[0], res)
diff --git a/build/scripts/wrap_groovyc.py b/build/scripts/wrap_groovyc.py
deleted file mode 100644
index 068b73fd87..0000000000
--- a/build/scripts/wrap_groovyc.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import platform
-import sys
-import os
-import subprocess
-
-
-def fix_windows(args):
- for arg in args:
- if os.path.basename(arg) == 'groovyc' and os.path.basename(os.path.dirname(arg)) == 'bin':
- yield arg + '.bat'
- else:
- yield arg
-
-
-if __name__ == '__main__':
- env = os.environ.copy()
- jdk = sys.argv[1]
- env['JAVA_HOME'] = jdk
- args = sys.argv[2:]
- if platform.system() == 'Windows':
- sys.exit(subprocess.Popen(list(fix_windows(args)), env=env).wait())
- else:
- os.execve(args[0], args, env)
diff --git a/build/scripts/wrapper.py b/build/scripts/wrapper.py
deleted file mode 100644
index 1e9d7955a5..0000000000
--- a/build/scripts/wrapper.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import os
-import sys
-
-
-if __name__ == '__main__':
- path = sys.argv[1]
-
- if path[0] != '/':
- path = os.path.join(os.path.dirname(__file__), path)
-
- os.execv(path, [path] + sys.argv[2:])
diff --git a/build/scripts/writer.py b/build/scripts/writer.py
deleted file mode 100644
index 21bb3006e5..0000000000
--- a/build/scripts/writer.py
+++ /dev/null
@@ -1,40 +0,0 @@
-import sys
-import argparse
-
-import process_command_files as pcf
-
-
-def parse_args():
- args = pcf.get_args(sys.argv[1:])
- parser = argparse.ArgumentParser()
- parser.add_argument('-f', '--file', dest='file_path')
- parser.add_argument('-a', '--append', action='store_true', default=False)
- parser.add_argument('-Q', '--quote', action='store_true', default=False)
- parser.add_argument('-s', '--addspace', action='store_true', default=False)
- parser.add_argument('-c', '--content', action='append', dest='content')
- parser.add_argument('-m', '--content-multiple', nargs='*', dest='content')
- parser.add_argument('-P', '--path-list', action='store_true', default=False)
- return parser.parse_args(args)
-
-
-def smart_shell_quote(v):
- if v is None:
- return None
- if ' ' in v or '"' in v or "'" in v:
- return "\"{0}\"".format(v.replace('"', '\\"'))
- return v
-
-if __name__ == '__main__':
- args = parse_args()
- open_type = 'a' if args.append else 'w'
-
- content = args.content
- if args.quote:
- content = [smart_shell_quote(ln) for ln in content] if content is not None else None
- content = '\n'.join(content)
-
- with open(args.file_path, open_type) as f:
- if args.addspace:
- f.write(' ')
- if content is not None:
- f.write(content)
diff --git a/build/scripts/yndexer.py b/build/scripts/yndexer.py
deleted file mode 100644
index a38e28ba99..0000000000
--- a/build/scripts/yndexer.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import sys
-import subprocess
-import threading
-import os
-import re
-
-
-rx_resource_dir = re.compile(r'libraries: =([^:]*)')
-
-
-def _try_to_kill(process):
- try:
- process.kill()
- except Exception:
- pass
-
-
-def touch(path):
- if not os.path.exists(path):
- with open(path, 'w'):
- pass
-
-
-class Process(object):
- def __init__(self, args):
- self._process = subprocess.Popen(args)
- self._event = threading.Event()
- self._result = None
- thread = threading.Thread(target=self._run)
- thread.setDaemon(True)
- thread.start()
-
- def _run(self):
- self._process.communicate()
- self._result = self._process.returncode
- self._event.set()
-
- def wait(self, timeout):
- self._event.wait(timeout=timeout)
- _try_to_kill(self._process)
- return self._result
-
-
-if __name__ == '__main__':
- args = sys.argv
-
- yndexer = args[1]
- timeout = int(args[2])
- arc_root = args[3]
- build_root = args[4]
- input_file = args[5]
- output_file = args[-1]
- tail_args = args[6:-1]
-
- subprocess.check_call(tail_args)
-
- clang = tail_args[0]
- out = subprocess.check_output([clang, '-print-search-dirs'])
- resource_dir = rx_resource_dir.search(out).group(1)
-
- yndexer_args = [
- yndexer, input_file,
- '-pb2',
- '-i', 'arc::{}'.format(arc_root),
- '-i', 'build::{}'.format(build_root),
- '-i', '.IGNORE::/',
- '-o', os.path.dirname(output_file),
- '-n', os.path.basename(output_file).rsplit('.ydx.pb2', 1)[0],
- '--'
- ] + tail_args + [
- '-resource-dir', resource_dir,
- ]
-
- process = Process(yndexer_args)
- result = process.wait(timeout=timeout)
-
- if result != 0:
- print >> sys.stderr, 'Yndexing process finished with code', result
- touch(output_file)