diff options
author | iaz1607 <iaz1607@yandex-team.com> | 2023-11-30 12:16:39 +0300 |
---|---|---|
committer | iaz1607 <iaz1607@yandex-team.com> | 2023-11-30 12:56:46 +0300 |
commit | 8951ddf780e02616cdb2ec54a02bc354e8507c0f (patch) | |
tree | 478097488957d3b554e25868c972a959bb40d78e /build | |
parent | a5acb7aa4ca5a4603215e878eb0cad786793262b (diff) | |
download | ydb-8951ddf780e02616cdb2ec54a02bc354e8507c0f.tar.gz |
`build/scripts` ya style --py
Diffstat (limited to 'build')
93 files changed, 871 insertions, 519 deletions
diff --git a/build/scripts/append_file.py b/build/scripts/append_file.py index 1413cec352..8efa38241d 100644 --- a/build/scripts/append_file.py +++ b/build/scripts/append_file.py @@ -2,7 +2,6 @@ import sys if __name__ == "__main__": - file_path = sys.argv[1] with open(file_path, "a") as f: for text in sys.argv[2:]: diff --git a/build/scripts/autotar_gendirs.py b/build/scripts/autotar_gendirs.py index a1228108aa..7784ba18c7 100644 --- a/build/scripts/autotar_gendirs.py +++ b/build/scripts/autotar_gendirs.py @@ -15,7 +15,9 @@ def pack_dir(dir_path, dest_path): dir_path = os.path.abspath(dir_path) for tar_exe in ('/usr/bin/tar', '/bin/tar'): if is_exe(tar_exe): - subprocess.check_call([tar_exe, '-cf', dest_path, '-C', os.path.dirname(dir_path), os.path.basename(dir_path)]) + subprocess.check_call( + [tar_exe, '-cf', dest_path, '-C', os.path.dirname(dir_path), os.path.basename(dir_path)] + ) break else: with tarfile.open(dest_path, 'w') as out: @@ -55,7 +57,10 @@ def main(args): elif args.unpack: for tared_dir in args.dirs: if not tared_dir.endswith(args.ext): - print("Requested to unpack '{}' which do not have required extension '{}'".format(tared_dir, args.ext), file=sys.stderr) + print( + "Requested to unpack '{}' which do not have required extension '{}'".format(tared_dir, args.ext), + file=sys.stderr, + ) return 1 dest = os.path.dirname(tared_dir) unpack_dir(tared_dir, dest) diff --git a/build/scripts/build_catboost.py b/build/scripts/build_catboost.py index 78334fc5f7..81d4e795a0 100755 --- a/build/scripts/build_catboost.py +++ b/build/scripts/build_catboost.py @@ -1,9 +1,9 @@ import sys import os import shutil -import re import subprocess + def get_value(val): dct = val.split('=', 1) if len(dct) > 1: @@ -13,7 +13,6 @@ def get_value(val): class BuildCbBase(object): def run(self, cbmodel, cbname, cb_cpp_path): - data_prefix = "CB_External_" data = data_prefix + cbname datasize = data + "Size" @@ -33,17 +32,20 @@ class BuildCbBase(object): cb_cpp_tmp.write(" extern const ui32 {1}{0}Size;\n".format(cbname, data_prefix)) cb_cpp_tmp.write(" }\n") cb_cpp_tmp.write("}\n") - archiverCall = subprocess.Popen([self.archiver, "-q", "-p", "-o", ro_data_path, cbmodel], stdout=None, stderr=subprocess.PIPE) + archiverCall = subprocess.Popen( + [self.archiver, "-q", "-p", "-o", ro_data_path, cbmodel], stdout=None, stderr=subprocess.PIPE + ) archiverCall.wait() cb_cpp_tmp.write("extern {0} {1};\n".format(cbtype, cbname)) cb_cpp_tmp.write("{0} {1}{2};".format(cbtype, cbname, cbload)) cb_cpp_tmp.close() shutil.move(cb_cpp_tmp_path, cb_cpp_path) + class BuildCb(BuildCbBase): def run(self, argv): if len(argv) < 5: - print >>sys.stderr, "BuildCb.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <cppOutput> [params...])" + print >> sys.stderr, "BuildCb.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <cppOutput> [params...])" sys.exit(1) self.SrcRoot = argv[0] @@ -62,10 +64,10 @@ def build_cb_f(argv): if __name__ == '__main__': if len(sys.argv) < 2: - print >>sys.stderr, "Usage: build_cb.py <funcName> <args...>" + print >> sys.stderr, "Usage: build_cb.py <funcName> <args...>" sys.exit(1) - if (sys.argv[2:]): + if sys.argv[2:]: globals()[sys.argv[1]](sys.argv[2:]) else: globals()[sys.argv[1]]() diff --git a/build/scripts/build_info_gen.py b/build/scripts/build_info_gen.py index ba59e50ac6..b357f9319e 100644 --- a/build/scripts/build_info_gen.py +++ b/build/scripts/build_info_gen.py @@ -46,8 +46,8 @@ def get_compiler_info(compiler): env['LOCALE'] = 'C' compiler_ver_out = ( subprocess.Popen(compiler_ver_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) - .stdout.read() - .decode('utf-8') + .stdout.read() + .decode('utf-8') ) return "\n".join( # fmt: off diff --git a/build/scripts/build_java_codenav_index.py b/build/scripts/build_java_codenav_index.py index d7ac4f3213..8a4b297652 100644 --- a/build/scripts/build_java_codenav_index.py +++ b/build/scripts/build_java_codenav_index.py @@ -29,7 +29,11 @@ def just_do_it(argv): arcadia_root: fake_arcadia_root, build_root: fake_build_root, } - modify_sources_file(sources_file, os.path.join(os.path.dirname(sources_file), '_' + os.path.basename(sources_file)), fake_source_roots) + modify_sources_file( + sources_file, + os.path.join(os.path.dirname(sources_file), '_' + os.path.basename(sources_file)), + fake_source_roots, + ) kindex_data_root = '{}/kindex'.format(os.path.join(build_root, os.path.dirname(corpus_name))) if not os.path.exists(kindex_data_root): os.makedirs(kindex_data_root) @@ -45,5 +49,6 @@ def just_do_it(argv): os.unlink(fake_arcadia_root) os.unlink(fake_build_root) + if __name__ == '__main__': just_do_it(sys.argv[1:]) diff --git a/build/scripts/build_java_with_error_prone.py b/build/scripts/build_java_with_error_prone.py index 910443552e..66fc7daed7 100644 --- a/build/scripts/build_java_with_error_prone.py +++ b/build/scripts/build_java_with_error_prone.py @@ -14,7 +14,7 @@ JAVA10_EXPORTS = [ '--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED', '--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED', '--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED', - '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED' + '--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED', ] @@ -27,9 +27,21 @@ def just_do_it(argv): for f in ERROR_PRONE_FLAGS: if f in javac_cmd: javac_cmd.remove(f) - os.execv(java, [java] + JAVA10_EXPORTS + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + javac_cmd) + os.execv( + java, + [java] + + JAVA10_EXPORTS + + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + + javac_cmd, + ) else: - os.execv(java, [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + ERROR_PRONE_FLAGS + javac_cmd) + os.execv( + java, + [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + + ERROR_PRONE_FLAGS + + javac_cmd, + ) if __name__ == '__main__': diff --git a/build/scripts/build_java_with_error_prone2.py b/build/scripts/build_java_with_error_prone2.py index ddf1ccbfc1..fa65cf19b9 100644 --- a/build/scripts/build_java_with_error_prone2.py +++ b/build/scripts/build_java_with_error_prone2.py @@ -55,7 +55,7 @@ def parse_args(argv): parsed.append(argv[i]) if len(parsed) >= 3: break - return parsed + [argv[i + 1:]] + return parsed + [argv[i + 1 :]] def just_do_it(argv): @@ -74,9 +74,19 @@ def just_do_it(argv): classpath = get_classpath(javac_cmd) if classpath: error_prone_tool = error_prone_tool + os.pathsep + classpath - cmd = [javac] + JAVA10_EXPORTS + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + javac_cmd + cmd = ( + [javac] + + JAVA10_EXPORTS + + ['-processorpath', error_prone_tool, '-XDcompilePolicy=byfile'] + + [(' '.join(['-Xplugin:ErrorProne'] + ERROR_PRONE_FLAGS))] + + javac_cmd + ) else: - cmd = [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + ERROR_PRONE_FLAGS + javac_cmd + cmd = ( + [java, '-Xbootclasspath/p:' + error_prone_tool, 'com.google.errorprone.ErrorProneCompiler'] + + ERROR_PRONE_FLAGS + + javac_cmd + ) if platform.system() == 'Windows': sys.exit(subprocess.Popen(cmd).wait()) else: diff --git a/build/scripts/c_templates/ya.make b/build/scripts/c_templates/ya.make index b395e11021..232f3b86de 100644 --- a/build/scripts/c_templates/ya.make +++ b/build/scripts/c_templates/ya.make @@ -1,7 +1,11 @@ OWNER(g:ymake) LIBRARY(dummy-vcs) + NO_PLATFORM() -SRCS(svn_interface.c) +SRCS( + svn_interface.c +) + END() diff --git a/build/scripts/cgo1_wrapper.py b/build/scripts/cgo1_wrapper.py index 986082f7e9..fbd573be5e 100644 --- a/build/scripts/cgo1_wrapper.py +++ b/build/scripts/cgo1_wrapper.py @@ -4,7 +4,7 @@ import subprocess import sys -CGO1_SUFFIX='.cgo1.go' +CGO1_SUFFIX = '.cgo1.go' def call(cmd, cwd, env=None): diff --git a/build/scripts/clang_tidy.py b/build/scripts/clang_tidy.py index 05c3ac33ca..e93b35774b 100644 --- a/build/scripts/clang_tidy.py +++ b/build/scripts/clang_tidy.py @@ -113,20 +113,20 @@ def find_header(p, h): raise Exception('can not find inc dir') -def fix_cmd(l, bin): +def fix_cmd(cmd, bin): sp = '--sysroot=' - for x in l: + for x in cmd: if '-isystem' in x and '/share/include' in x: # reparent compiler headers dir into clang-tidy install path yield '-isystem' + find_header(os.path.dirname(os.path.dirname(bin)), 'stddef.h') elif x.startswith(sp): yield '-nostdinc' - sr = x[len(sp):] + sr = x[len(sp) :] yield '-isystem' + sr + '/usr/include' yield '-isystem' + sr + '/usr/include/x86_64-linux-gnu' elif x == '-nostdinc++': - if '.c.o' in str(l): + if '.c.o' in str(cmd): pass else: yield x diff --git a/build/scripts/clang_wrapper.py b/build/scripts/clang_wrapper.py index c40f097d69..cd12da6b2b 100644 --- a/build/scripts/clang_wrapper.py +++ b/build/scripts/clang_wrapper.py @@ -42,7 +42,7 @@ if __name__ == '__main__': path = fix_path(path) try: i = args.index('-emit-llvm') - args[i:i+1] = ['-Xclang', '-emit-llvm'] + args[i : i + 1] = ['-Xclang', '-emit-llvm'] except ValueError: pass args.append('-fms-compatibility-version=19') diff --git a/build/scripts/collect_java_srcs.py b/build/scripts/collect_java_srcs.py index 170002520a..f361f271d1 100644 --- a/build/scripts/collect_java_srcs.py +++ b/build/scripts/collect_java_srcs.py @@ -22,14 +22,14 @@ if __name__ == '__main__': else: # Outside root - print>>sys.stderr, 'External src file "{}" is outside of srcdir {}, ignore'.format( + print('External src file "{}" is outside of srcdir {}, ignore'.format( os.path.relpath(src, build_root), os.path.relpath(root, build_root), ) continue if os.path.exists(dst): - print>>sys.stderr, 'Duplicate external src file {}, choice is undefined'.format( + print >> sys.stderr, 'Duplicate external src file {}, choice is undefined'.format( os.path.relpath(dst, root) ) @@ -48,4 +48,4 @@ if __name__ == '__main__': zf.extractall(dst) else: - print>>sys.stderr, 'Unrecognized file type', os.path.relpath(src, build_root) + print >> sys.stderr, 'Unrecognized file type', os.path.relpath(src, build_root) diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py index f8e1fa2b6d..eadb4519d2 100644 --- a/build/scripts/compile_cuda.py +++ b/build/scripts/compile_cuda.py @@ -27,8 +27,8 @@ def main(): if sys.argv[1] == '--mtime': mtime0 = sys.argv[2] cmd = 3 - command = sys.argv[cmd: spl] - cflags = sys.argv[spl + 1:] + command = sys.argv[cmd:spl] + cflags = sys.argv[spl + 1 :] dump_args = False if '--y_dump_args' in command: @@ -85,16 +85,20 @@ def main(): for flag in cflags: if all(not flag.startswith(skip_prefix) for skip_prefix in skip_prefix_list): if flag.startswith('-fopenmp-version='): - new_cflags.append('-fopenmp-version=45') # Clang 11 only supports OpenMP 4.5, but the default is 5.0, so we need to forcefully redefine it. + new_cflags.append( + '-fopenmp-version=45' + ) # Clang 11 only supports OpenMP 4.5, but the default is 5.0, so we need to forcefully redefine it. else: new_cflags.append(flag) cflags = new_cflags if not is_clang(command): + def good(arg): if arg.startswith('--target='): return False return True + cflags = filter(good, cflags) cpp_args = [] @@ -108,7 +112,6 @@ def main(): cflags_queue = collections.deque(cflags) while cflags_queue: - arg = cflags_queue.popleft() if arg == '-mllvm': compiler_args.append(arg) diff --git a/build/scripts/compile_java.py b/build/scripts/compile_java.py index e2c12852ea..d0176a9a56 100644 --- a/build/scripts/compile_java.py +++ b/build/scripts/compile_java.py @@ -78,11 +78,28 @@ def main(): ts.write(' '.join(ktsrcs + srcs)) kt_classes_dir = 'kt_cls' mkdir_p(kt_classes_dir) - sp.check_call([opts.java_bin, '-Didea.max.content.load.filesize=30720', '-jar', opts.kotlin_compiler, '-classpath', classpath, '-d', kt_classes_dir] + ktc_opts + ['@' + temp_kt_sources_file]) + sp.check_call( + [ + opts.java_bin, + '-Didea.max.content.load.filesize=30720', + '-jar', + opts.kotlin_compiler, + '-classpath', + classpath, + '-d', + kt_classes_dir, + ] + + ktc_opts + + ['@' + temp_kt_sources_file] + ) classpath = os.pathsep.join([kt_classes_dir, classpath]) if srcs: - sp.check_call([opts.javac_bin, '-nowarn', '-g', '-classpath', classpath, '-encoding', 'UTF-8', '-d', classes_dir] + javac_opts + ['@' + temp_sources_file]) + sp.check_call( + [opts.javac_bin, '-nowarn', '-g', '-classpath', classpath, '-encoding', 'UTF-8', '-d', classes_dir] + + javac_opts + + ['@' + temp_sources_file] + ) for s in jsrcs: if s.endswith('-sources.jar'): diff --git a/build/scripts/configure_file.py b/build/scripts/configure_file.py index 193ad7ec9e..66f425ec8d 100755 --- a/build/scripts/configure_file.py +++ b/build/scripts/configure_file.py @@ -24,7 +24,7 @@ def replaceLine(l, varDict, define): val = '0' sPos = l.find(cmakeDef01) ePos = l.find(var) + len(var) - l = l[:sPos] + define + ' ' + var + ' ' + val + l[ePos + 1:] + '\n' + l = l[:sPos] + define + ' ' + var + ' ' + val + l[ePos + 1 :] + '\n' finder = re.compile(".*?(@[a-zA-Z0-9_]+@).*") while True: @@ -32,7 +32,7 @@ def replaceLine(l, varDict, define): if not re_result: return l key = re_result.group(1)[1:-1] - l = l[:re_result.start(1)] + varDict.get(key, '') + l[re_result.end(1):] + l = l[: re_result.start(1)] + varDict.get(key, '') + l[re_result.end(1) :] def main(inputPath, outputPath, varDict): diff --git a/build/scripts/container.py b/build/scripts/container.py index 27e6f921f3..3d50cb6fbb 100644 --- a/build/scripts/container.py +++ b/build/scripts/container.py @@ -8,18 +8,17 @@ class ContainerError(Exception): def join_layers(input_paths, output_path, squashfs_path): - if len(input_paths) == 1: shutil.copy2(input_paths[0], output_path) else: # We cannot use appending here as it doesn't allow replacing files for input_path in input_paths: - unpack_cmd = [ os.path.join(squashfs_path, 'unsquashfs') ] - unpack_cmd.extend([ '-f', input_path ]) + unpack_cmd = [os.path.join(squashfs_path, 'unsquashfs')] + unpack_cmd.extend(['-f', input_path]) subprocess.run(unpack_cmd) - pack_cmd = [ os.path.join(squashfs_path, 'mksquashfs') ] + pack_cmd = [os.path.join(squashfs_path, 'mksquashfs')] pack_cmd.append(os.path.join(os.curdir, 'squashfs-root')) pack_cmd.append(output_path) pack_cmd.append('-all-root') diff --git a/build/scripts/copy_clang_profile_rt.py b/build/scripts/copy_clang_profile_rt.py index 7710615f32..f8058e9e64 100644 --- a/build/scripts/copy_clang_profile_rt.py +++ b/build/scripts/copy_clang_profile_rt.py @@ -8,7 +8,8 @@ import process_command_files as pcf # Remove after DTCC-1902 CLANG_RT_VERSIONS = [14, 16] -def copy_clang_rt_profile(cmd, build_root, arch) -> None: + +def copy_clang_rt_profile(cmd, build_root, arch): profile_rt_lib = None resource_dir = None @@ -18,7 +19,7 @@ def copy_clang_rt_profile(cmd, build_root, arch) -> None: profile_rt_lib = arg break if arg.startswith('-resource-dir='): - resource_dir = arg[len('-resource-dir='):] + resource_dir = arg[len('-resource-dir=') :] profile_rt_path = os.path.join(build_root, profile_rt_lib) profile_name = os.path.basename(profile_rt_path) diff --git a/build/scripts/copy_docs_files.py b/build/scripts/copy_docs_files.py index c444dd509d..361932bfe2 100644 --- a/build/scripts/copy_docs_files.py +++ b/build/scripts/copy_docs_files.py @@ -94,7 +94,7 @@ def main(): dst_file = os.path.join(dst_dir, f) if src_file == dst_file: continue - rel_path = src_file[len(root):] if is_from_source_root else None + rel_path = src_file[len(root) :] if is_from_source_root else None copy_file(src_file, dst_file, overwrite=is_overwrite_existing, orig_path=rel_path) diff --git a/build/scripts/copy_docs_files_to_dir.py b/build/scripts/copy_docs_files_to_dir.py index 45a39c2ae7..3af3ee8992 100644 --- a/build/scripts/copy_docs_files_to_dir.py +++ b/build/scripts/copy_docs_files_to_dir.py @@ -98,7 +98,9 @@ def main(): file_src = os.path.normpath(os.path.join(root, f)) assert file_src.startswith(source_root) file_dst = os.path.join(dst, os.path.relpath(root, abs_docs_dir), f) - copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=file_src[len(source_root):]) + copy_file( + file_src, file_dst, overwrite=is_overwrite_existing, orig_path=file_src[len(source_root) :] + ) if args.src_dirs: for item in args.src_dirs: @@ -122,8 +124,8 @@ def main(): for f in item[2:]: file_src = os.path.normpath(f) assert file_src.startswith(root) - rel_path = file_src[len(root):] if is_from_source_root else None - file_dst = os.path.join(dst, file_src[len(src_dir):]) + rel_path = file_src[len(root) :] if is_from_source_root else None + file_dst = os.path.join(dst, file_src[len(src_dir) :]) copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=rel_path) if args.bin_dir: @@ -139,7 +141,7 @@ def main(): for file_src in args.bin_dir[2:]: assert os.path.isfile(file_src) assert file_src.startswith(bin_dir) - file_dst = os.path.join(dst, file_src[len(bin_dir):]) + file_dst = os.path.join(dst, file_src[len(bin_dir) :]) copy_file(file_src, file_dst, overwrite=is_overwrite_existing, orig_path=None) for src in args.files: @@ -148,10 +150,10 @@ def main(): rel_path = file_src orig_path = None if file_src.startswith(source_root): - rel_path = file_src[len(source_root):] + rel_path = file_src[len(source_root) :] orig_path = rel_path elif file_src.startswith(build_root): - rel_path = file_src[len(build_root):] + rel_path = file_src[len(build_root) :] else: raise Exception('Unexpected file path [{}].'.format(file_src)) assert not os.path.isabs(rel_path) diff --git a/build/scripts/copy_files_to_dir.py b/build/scripts/copy_files_to_dir.py index ead57ba16e..2403f0a683 100644 --- a/build/scripts/copy_files_to_dir.py +++ b/build/scripts/copy_files_to_dir.py @@ -43,7 +43,7 @@ def main(): rel_dst = src for prefix in prefixes: if src.startswith(prefix): - rel_dst = src[len(prefix):] + rel_dst = src[len(prefix) :] break assert not os.path.isabs(rel_dst) dst = os.path.join(args.dest_dir, rel_dst) diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py index b53db8d066..ab71a3e1e2 100644 --- a/build/scripts/copy_to_dir.py +++ b/build/scripts/copy_to_dir.py @@ -52,12 +52,16 @@ def main(): dest_arch = tarfile.open(opts.dest_arch, 'w:gz', dereference=True) else: # TODO: move check to graph generation stage - raise Exception('Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format(os.path.basename(opts.dest_arch))) + raise Exception( + 'Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format( + os.path.basename(opts.dest_arch) + ) + ) for arg in opts.args: dst = arg if dst.startswith(opts.build_root): - dst = dst[len(opts.build_root) + 1:] + dst = dst[len(opts.build_root) + 1 :] if dest_arch and not arg.endswith('.pkg.fake'): dest_arch.add(arg, arcname=dst) diff --git a/build/scripts/coverage-info.py b/build/scripts/coverage-info.py index 94491d9256..ddc5f275f8 100644 --- a/build/scripts/coverage-info.py +++ b/build/scripts/coverage-info.py @@ -49,7 +49,7 @@ def recast(in_file, out_file, probe_path, update_stat): if line.startswith('TN:'): output.write(line + '\n') elif line.startswith(PREFIX): - path = line[len(PREFIX):] + path = line[len(PREFIX) :] probed_path = probe_path(path) if probed_path: output.write(PREFIX + probed_path + '\n') @@ -72,16 +72,28 @@ def print_stat(da, fnda, teamcity_stat_output): func_total = len(fnda.values()) func_coverage = 100.0 * func_hit / func_total if func_total else 0 - print >>sys.stderr, '[[imp]]Lines[[rst]] {: >16} {: >16} {: >16.1f}%'.format(lines_hit, lines_total, lines_coverage) - print >>sys.stderr, '[[imp]]Functions[[rst]] {: >16} {: >16} {: >16.1f}%'.format(func_hit, func_total, func_coverage) + print >> sys.stderr, '[[imp]]Lines[[rst]] {: >16} {: >16} {: >16.1f}%'.format( + lines_hit, lines_total, lines_coverage + ) + print >> sys.stderr, '[[imp]]Functions[[rst]] {: >16} {: >16} {: >16.1f}%'.format( + func_hit, func_total, func_coverage + ) if teamcity_stat_output: with open(teamcity_stat_output, 'w') as tc_file: tc_file.write("##teamcity[blockOpened name='Code Coverage Summary']\n") - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLTotal\' value='{}']\n".format(lines_total)) - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsLCovered\' value='{}']\n".format(lines_hit)) - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMTotal\' value='{}']\n".format(func_total)) - tc_file.write("##teamcity[buildStatisticValue key=\'CodeCoverageAbsMCovered\' value='{}']\n".format(func_hit)) + tc_file.write( + "##teamcity[buildStatisticValue key=\'CodeCoverageAbsLTotal\' value='{}']\n".format(lines_total) + ) + tc_file.write( + "##teamcity[buildStatisticValue key=\'CodeCoverageAbsLCovered\' value='{}']\n".format(lines_hit) + ) + tc_file.write( + "##teamcity[buildStatisticValue key=\'CodeCoverageAbsMTotal\' value='{}']\n".format(func_total) + ) + tc_file.write( + "##teamcity[buildStatisticValue key=\'CodeCoverageAbsMCovered\' value='{}']\n".format(func_hit) + ) tc_file.write("##teamcity[blockClosed name='Code Coverage Summary']\n") @@ -93,7 +105,7 @@ def chunks(l, n): [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]] """ for i in xrange(0, len(l), n): - yield l[i:i + n] + yield l[i : i + n] def combine_info_files(lcov, files, out_file): @@ -107,7 +119,7 @@ def combine_info_files(lcov, files, out_file): for trace in chunk: assert os.path.exists(trace), "Trace file does not exist: {} (cwd={})".format(trace, os.getcwd()) combine_cmd += ["-a", os.path.abspath(trace)] - print >>sys.stderr, '## lcov', ' '.join(combine_cmd[1:]) + print >> sys.stderr, '## lcov', ' '.join(combine_cmd[1:]) out_file_tmp = "combined.tmp" with open(out_file_tmp, "w") as stdout: subprocess.check_call(combine_cmd, stdout=stdout) @@ -121,7 +133,9 @@ def probe_path_global(path, source_root, prefix_filter, exclude_files): return None for suff in reversed(list(suffixes(path))): - if (not prefix_filter or suff.startswith(prefix_filter)) and (not exclude_files or not exclude_files.match(suff)): + if (not prefix_filter or suff.startswith(prefix_filter)) and ( + not exclude_files or not exclude_files.match(suff) + ): full_path = source_root + os.sep + suff if os.path.isfile(full_path): return full_path @@ -131,11 +145,11 @@ def probe_path_global(path, source_root, prefix_filter, exclude_files): def update_stat_global(src_file, line, fnda, da): if line.startswith("FNDA:"): - visits, func_name = line[len("FNDA:"):].split(',') + visits, func_name = line[len("FNDA:") :].split(',') fnda[src_file + func_name] += int(visits) if line.startswith("DA"): - line_number, visits = line[len("DA:"):].split(',') + line_number, visits = line[len("DA:") :].split(',') if visits == '=====': visits = 0 @@ -143,25 +157,29 @@ def update_stat_global(src_file, line, fnda, da): def gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args): - print >>sys.stderr, '## geninfo', ' '.join(cmd) + print >> sys.stderr, '## geninfo', ' '.join(cmd) subprocess.check_call(cmd) if recast(cov_info + '.tmp', cov_info, probe_path, update_stat): lcov_args.append(cov_info) -def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files): +def init_all_coverage_files( + gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files +): with tarfile.open(gcno_archive) as gcno_tf: for gcno_item in gcno_tf: if gcno_item.isfile() and gcno_item.name.endswith(GCNO_EXT): gcno_tf.extract(gcno_item) gcno_name = gcno_item.name - source_fname = gcno_name[:-len(GCNO_EXT)] + source_fname = gcno_name[: -len(GCNO_EXT)] if prefix_filter and not source_fname.startswith(prefix_filter): sys.stderr.write("Skipping {} (doesn't match prefix '{}')\n".format(source_fname, prefix_filter)) continue if exclude_files and exclude_files.search(source_fname): - sys.stderr.write("Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern)) + sys.stderr.write( + "Skipping {} (matched exclude pattern '{}')\n".format(source_fname, exclude_files.pattern) + ) continue fname2gcno[source_fname] = gcno_name @@ -171,9 +189,12 @@ def init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_execut fname2info[source_fname].append(coverage_info) geninfo_cmd = [ geninfo_executable, - '--gcov-tool', gcov_tool, - '-i', gcno_name, - '-o', coverage_info + '.tmp' + '--gcov-tool', + gcov_tool, + '-i', + gcno_name, + '-o', + coverage_info + '.tmp', ] gen_info(geninfo_cmd, coverage_info) @@ -183,7 +204,7 @@ def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_exe for gcda_item in gcda_tf: if gcda_item.isfile() and gcda_item.name.endswith(GCDA_EXT): gcda_name = gcda_item.name - source_fname = gcda_name[:-len(GCDA_EXT)] + source_fname = gcda_name[: -len(GCDA_EXT)] for suff in suffixes(source_fname): if suff in fname2gcno: gcda_new_name = suff + GCDA_EXT @@ -194,27 +215,38 @@ def process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_exe fname2info[suff].append(coverage_info) geninfo_cmd = [ geninfo_executable, - '--gcov-tool', gcov_tool, + '--gcov-tool', + gcov_tool, gcda_new_name, - '-o', coverage_info + '.tmp' + '-o', + coverage_info + '.tmp', ] gen_info(geninfo_cmd, coverage_info) def gen_cobertura(tool, output, combined_info): - cmd = [ - tool, - combined_info, - '-b', '#hamster#', - '-o', output - ] + cmd = [tool, combined_info, '-b', '#hamster#', '-o', output] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode: - raise Exception('lcov_cobertura failed with exit code {}\nstdout: {}\nstderr: {}'.format(p.returncode, out, err)) - - -def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filter, exclude_regexp, teamcity_stat_output, coverage_report_path, gcov_report, lcov_cobertura): + raise Exception( + 'lcov_cobertura failed with exit code {}\nstdout: {}\nstderr: {}'.format(p.returncode, out, err) + ) + + +def main( + source_root, + output, + gcno_archive, + gcda_archive, + gcov_tool, + prefix_filter, + exclude_regexp, + teamcity_stat_output, + coverage_report_path, + gcov_report, + lcov_cobertura, +): exclude_files = re.compile(exclude_regexp) if exclude_regexp else None fname2gcno = {} @@ -234,7 +266,9 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt def gen_info(cmd, cov_info): gen_info_global(cmd, cov_info, probe_path, update_stat, lcov_args) - init_all_coverage_files(gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files) + init_all_coverage_files( + gcno_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info, prefix_filter, exclude_files + ) process_all_coverage_files(gcda_archive, fname2gcno, fname2info, geninfo_executable, gcov_tool, gen_info) if coverage_report_path: @@ -253,8 +287,17 @@ def main(source_root, output, gcno_archive, gcda_archive, gcov_tool, prefix_filt if lcov_args: output_trace = "combined.info" combine_info_files(os.path.join(source_root, 'devtools', 'lcov', 'lcov'), lcov_args, output_trace) - cmd = [os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), '-p', source_root, '--ignore-errors', 'source', '-o', output_dir, output_trace] - print >>sys.stderr, '## genhtml', ' '.join(cmd) + cmd = [ + os.path.join(source_root, 'devtools', 'lcov', 'genhtml'), + '-p', + source_root, + '--ignore-errors', + 'source', + '-o', + output_dir, + output_trace, + ] + print >> sys.stderr, '## genhtml', ' '.join(cmd) subprocess.check_call(cmd) if lcov_cobertura: gen_cobertura(lcov_cobertura, gcov_report, output_trace) diff --git a/build/scripts/cpp_flatc_wrapper.py b/build/scripts/cpp_flatc_wrapper.py index 9f74b65570..8497ff3528 100644 --- a/build/scripts/cpp_flatc_wrapper.py +++ b/build/scripts/cpp_flatc_wrapper.py @@ -8,8 +8,8 @@ def main(): h_file = None try: index = cmd.index('-o') - h_file = cmd[index+1] - cmd[index+1] = os.path.dirname(h_file) + h_file = cmd[index + 1] + cmd[index + 1] = os.path.dirname(h_file) except (ValueError, IndexError): pass p = subprocess.run(cmd, capture_output=True, text=True) diff --git a/build/scripts/create_jcoverage_report.py b/build/scripts/create_jcoverage_report.py index 45083ff4f7..f24827d8ae 100644 --- a/build/scripts/create_jcoverage_report.py +++ b/build/scripts/create_jcoverage_report.py @@ -15,7 +15,6 @@ def mkdir_p(path): class Timer(object): - def __init__(self): self.start = time.time() @@ -24,7 +23,18 @@ class Timer(object): self.start = time.time() -def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_format, tar_output, agent_disposition, runners_paths): +def main( + source, + output, + java, + prefix_filter, + exclude_filter, + jars_list, + output_format, + tar_output, + agent_disposition, + runners_paths, +): timer = Timer() reports_dir = 'jacoco_reports_dir' mkdir_p(reports_dir) @@ -72,7 +82,7 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_ timer.step("Jar files extracted") if not agent_disposition: - print>>sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.' + print >> sys.stderr, 'Can\'t find jacoco agent. Will not generate html report for java coverage.' if tar_output: report_dir = 'java.report.temp' @@ -81,7 +91,17 @@ def main(source, output, java, prefix_filter, exclude_filter, jars_list, output_ mkdir_p(report_dir) if agent_disposition: - agent_cmd = [java, '-jar', agent_disposition, src_dir, cls_dir, prefix_filter or '.', exclude_filter or '__no_exclude__', report_dir, output_format] + agent_cmd = [ + java, + '-jar', + agent_disposition, + src_dir, + cls_dir, + prefix_filter or '.', + exclude_filter or '__no_exclude__', + report_dir, + output_format, + ] agent_cmd += reports subprocess.check_call(agent_cmd) timer.step("Jacoco finished") diff --git a/build/scripts/custom_link_green_mysql.py b/build/scripts/custom_link_green_mysql.py index 13bb9e4ac7..f754135b8f 100644 --- a/build/scripts/custom_link_green_mysql.py +++ b/build/scripts/custom_link_green_mysql.py @@ -14,6 +14,7 @@ SYMBOLS_TO_PATCH = ( 'sendto', ) + class Error(Exception): pass diff --git a/build/scripts/decimal_md5.py b/build/scripts/decimal_md5.py index e70ca80a09..684d39e767 100644 --- a/build/scripts/decimal_md5.py +++ b/build/scripts/decimal_md5.py @@ -9,9 +9,9 @@ import argparse def print_code(checksum, func_name): - if len(func_name) == 0: # safe fallback for old ya.make files + if len(func_name) == 0: # safe fallback for old ya.make files func_name = "DecimalMD5" - print 'const char* ' + func_name + '() {return "' + checksum + '";}' + print('const char* ' + func_name + '() {return "' + checksum + '";}') def ensure_paths_exist(paths): @@ -76,4 +76,3 @@ def main(): if __name__ == "__main__": main() - diff --git a/build/scripts/error.py b/build/scripts/error.py index f7d8ecb2cc..5d8702e282 100644 --- a/build/scripts/error.py +++ b/build/scripts/error.py @@ -32,6 +32,7 @@ def merge_exit_codes(exit_codes): def is_temporary_error(exc): import logging + logger = logging.getLogger(__name__) if getattr(exc, 'temporary', False): @@ -39,6 +40,7 @@ def is_temporary_error(exc): return True import errno + err = getattr(exc, 'errno', None) if err == errno.ECONNREFUSED or err == errno.ENETUNREACH: @@ -57,7 +59,7 @@ def is_temporary_error(exc): import urllib2 - if isinstance(exc, urllib2.HTTPError) and exc.code in (429, ): + if isinstance(exc, urllib2.HTTPError) and exc.code in (429,): logger.debug("urllib2.HTTPError: %s", exc) return True diff --git a/build/scripts/extract_docs.py b/build/scripts/extract_docs.py index 20e8311346..5fb84a8dcf 100644 --- a/build/scripts/extract_docs.py +++ b/build/scripts/extract_docs.py @@ -27,7 +27,7 @@ def main(): rel_dst = os.path.dirname(os.path.normpath(src)) for prefix in prefixes: if src.startswith(prefix): - rel_dst = rel_dst[len(prefix):] + rel_dst = rel_dst[len(prefix) :] continue assert not os.path.isabs(rel_dst) dest_dir = os.path.join(args.dest_dir, rel_dst) diff --git a/build/scripts/f2c.py b/build/scripts/f2c.py index 7021e1391f..878580e4d4 100644 --- a/build/scripts/f2c.py +++ b/build/scripts/f2c.py @@ -41,16 +41,19 @@ if __name__ == '__main__': # should parse includes, really p = subprocess.Popen( [args.tool, '-w', '-R', '-a', '-I' + os.path.dirname(args.input), '-T' + tmpdir], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=subprocess.PIPE, + ) stdout, stderr = p.communicate(input=open(args.input).read()) ret = p.wait() if ret: - print >>sys.stderr, 'f2c failed: %s, %s' % (stderr, ret) + print >> sys.stderr, 'f2c failed: %s, %s' % (stderr, ret) sys.exit(ret) if 'Error' in stderr: - print >>sys.stderr, stderr + print >> sys.stderr, stderr with open(args.output, 'w') as f: f.write(header) diff --git a/build/scripts/fail_module_cmd.py b/build/scripts/fail_module_cmd.py index fa14c0d851..a7af64da9b 100644 --- a/build/scripts/fail_module_cmd.py +++ b/build/scripts/fail_module_cmd.py @@ -3,5 +3,9 @@ import sys if __name__ == '__main__': assert len(sys.argv) == 2, 'Unexpected number of arguments...' - sys.stderr.write('Error: module command for target [[bad]]{}[[rst]] was not executed due to build graph configuration errors...\n'.format(sys.argv[1])) + sys.stderr.write( + 'Error: module command for target [[bad]]{}[[rst]] was not executed due to build graph configuration errors...\n'.format( + sys.argv[1] + ) + ) sys.exit(1) diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py index b6ea7cefa1..2ebe6f0e51 100755 --- a/build/scripts/fetch_from.py +++ b/build/scripts/fetch_from.py @@ -25,10 +25,18 @@ def add_common_arguments(parser): parser.add_argument('--rename-to') # used by test_node in inject_mds_resource_to_graph parser.add_argument('--copy-to-dir') parser.add_argument('--untar-to') - parser.add_argument('--rename', action='append', default=[], metavar='FILE', help='rename FILE to the corresponding output') + parser.add_argument( + '--rename', action='append', default=[], metavar='FILE', help='rename FILE to the corresponding output' + ) parser.add_argument('--executable', action='store_true', help='make outputs executable') parser.add_argument('--log-path') - parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity') + parser.add_argument( + '-v', + '--verbose', + action='store_true', + default=os.environ.get('YA_VERBOSE_FETCHER'), + help='increase stderr verbosity', + ) parser.add_argument('outputs', nargs='*', default=[]) @@ -50,7 +58,9 @@ def hardlink_or_copy(src, dst): if e.errno == errno.EEXIST: return elif e.errno in (errno.EXDEV, errno.EMLINK, errno.EINVAL, errno.EACCES): - sys.stderr.write("Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst)) + sys.stderr.write( + "Can't make hardlink (errno={}) - fallback to copy: {} -> {}\n".format(e.errno, src, dst) + ) shutil.copy(src, dst) else: sys.stderr.write("src: {} dst: {}\n".format(src, dst)) @@ -111,7 +121,6 @@ def setup_logging(args, base_name): def is_temporary(e): - def is_broken(e): return isinstance(e, urllib2.HTTPError) and e.code in (410, 404) @@ -140,7 +149,11 @@ def report_to_snowden(value): urllib2.urlopen( 'https://back-snowden.qloud.yandex-team.ru/report/add', - json.dumps([body, ]), + json.dumps( + [ + body, + ] + ), timeout=5, ) @@ -151,7 +164,7 @@ def report_to_snowden(value): def copy_stream(read, *writers, **kwargs): - chunk_size = kwargs.get('size', 1024*1024) + chunk_size = kwargs.get('size', 1024 * 1024) while True: data = read(chunk_size) if not data: @@ -177,7 +190,7 @@ def git_like_hash_with_size(filepath): with open(filepath, 'rb') as f: while True: - block = f.read(2 ** 16) + block = f.read(2**16) if not block: break @@ -200,7 +213,9 @@ def size_printer(display_name, size): now = dt.datetime.now() if last_stamp[0] + dt.timedelta(seconds=10) < now: if size: - print >>sys.stderr, "##status##{} - [[imp]]{:.1f}%[[rst]]".format(display_name, 100.0 * sz[0] / size if size else 0) + print >> sys.stderr, "##status##{} - [[imp]]{:.1f}%[[rst]]".format( + display_name, 100.0 * sz[0] / size if size else 0 + ) last_stamp[0] = now return printer @@ -247,13 +262,7 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1= logging.info('File sha1 %s (expected %s)', real_sha1, expected_sha1) if expected_md5 and real_md5 != expected_md5: - report_to_snowden( - { - 'headers': req.headers.headers, - 'expected_md5': expected_md5, - 'real_md5': real_md5 - } - ) + report_to_snowden({'headers': req.headers.headers, 'expected_md5': expected_md5, 'real_md5': real_md5}) raise BadChecksumFetchError( 'Downloaded {}, but expected {} for {}'.format( @@ -264,13 +273,7 @@ def fetch_url(url, unpack, resource_file_name, expected_md5=None, expected_sha1= ) if expected_sha1 and real_sha1 != expected_sha1: - report_to_snowden( - { - 'headers': req.headers.headers, - 'expected_sha1': expected_sha1, - 'real_sha1': real_sha1 - } - ) + report_to_snowden({'headers': req.headers.headers, 'expected_sha1': expected_sha1, 'real_sha1': real_sha1}) raise BadChecksumFetchError( 'Downloaded {}, but expected {} for {}'.format( @@ -305,13 +308,15 @@ def chmod(filename, mode): os.chmod(filename, mode) except OSError: import pwd - sys.stderr.write("{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid))) + + sys.stderr.write( + "{} st_mode: {} pwuid: {}\n".format(filename, stat.st_mode, pwd.getpwuid(os.stat(filename).st_uid)) + ) raise def process(fetched_file, file_name, args, remove=True): - assert len(args.rename) <= len(args.outputs), ( - 'too few outputs to rename', args.rename, 'into', args.outputs) + assert len(args.rename) <= len(args.outputs), ('too few outputs to rename', args.rename, 'into', args.outputs) fetched_file_is_dir = os.path.isdir(fetched_file) if fetched_file_is_dir and not args.untar_to: @@ -325,7 +330,6 @@ def process(fetched_file, file_name, args, remove=True): else: chmod(fetched_file, 0o444) - if args.copy_to: hardlink_or_copy(fetched_file, args.copy_to) if not args.outputs: @@ -341,7 +345,7 @@ def process(fetched_file, file_name, args, remove=True): if args.untar_to: ensure_dir(args.untar_to) - inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename):])) + inputs = set(map(os.path.normpath, args.rename + args.outputs[len(args.rename) :])) if fetched_file_is_dir: for member in inputs: base, name = member.split('/', 1) @@ -349,10 +353,12 @@ def process(fetched_file, file_name, args, remove=True): dst = os.path.normpath(os.path.join(args.untar_to, member)) hardlink_or_copy(src, dst) else: - # Extract only requested files + # Extract only requested files try: with tarfile.open(fetched_file, mode='r:*') as tar: - members = [entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs] + members = [ + entry for entry in tar if os.path.normpath(os.path.join(args.untar_to, entry.name)) in inputs + ] tar.extractall(args.untar_to, members=members) except tarfile.ReadError as e: logging.exception(e) @@ -360,9 +366,8 @@ def process(fetched_file, file_name, args, remove=True): # Forbid changes to the loaded resource data for root, _, files in os.walk(args.untar_to): - for filename in files: - chmod(os.path.join(root, filename), 0o444) - + for filename in files: + chmod(os.path.join(root, filename), 0o444) for src, dst in zip(args.rename, args.outputs): if src == 'RESOURCE': diff --git a/build/scripts/fetch_from_archive.py b/build/scripts/fetch_from_archive.py index 57aff91b5e..e7bbe23362 100644 --- a/build/scripts/fetch_from_archive.py +++ b/build/scripts/fetch_from_archive.py @@ -14,6 +14,7 @@ def parse_args(): return parser.parse_args() + def main(args): archive = args.archive file_name = args.file_name.rstrip('-') @@ -29,8 +30,9 @@ if __name__ == '__main__': main(args) except Exception as e: logging.exception(e) - print >>sys.stderr, open(args.abs_log_path).read() + print >> sys.stderr, open(args.abs_log_path).read() sys.stderr.flush() import error + sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/fetch_from_mds.py b/build/scripts/fetch_from_mds.py index e9c838a7a8..3fd264ebb1 100644 --- a/build/scripts/fetch_from_mds.py +++ b/build/scripts/fetch_from_mds.py @@ -43,8 +43,9 @@ if __name__ == '__main__': main(args) except Exception as e: logging.exception(e) - print >>sys.stderr, open(args.abs_log_path).read() + print >> sys.stderr, open(args.abs_log_path).read() sys.stderr.flush() import error + sys.exit(error.ExitCodes.INFRASTRUCTURE_ERROR if fetch_from.is_temporary(e) else 1) diff --git a/build/scripts/fix_java_command_file_cp.py b/build/scripts/fix_java_command_file_cp.py index fc87048c32..1313bba0f9 100644 --- a/build/scripts/fix_java_command_file_cp.py +++ b/build/scripts/fix_java_command_file_cp.py @@ -22,7 +22,7 @@ def fix_files(args): fixed_name = ''.join(fixed_name) with open(fixed_name[1:], 'w') as f: f.write(fixed) - tail[idx:idx + 1] = [fixed_name] + tail[idx : idx + 1] = [fixed_name] return tail diff --git a/build/scripts/gen_aar_gradle_script.py b/build/scripts/gen_aar_gradle_script.py index 35431c083b..9a997c0c73 100644 --- a/build/scripts/gen_aar_gradle_script.py +++ b/build/scripts/gen_aar_gradle_script.py @@ -2,9 +2,9 @@ import argparse import os import tarfile -FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n' -MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n' -KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' +FLAT_DIRS_REPO_TEMPLATE = 'flatDir {{ dirs {dirs} }}\n' +MAVEN_REPO_TEMPLATE = 'maven {{ url "{repo}" }}\n' +KEYSTORE_TEMLATE = 'signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' DO_NOT_STRIP = '''\ packagingOptions { @@ -191,7 +191,6 @@ android {{ def gen_build_script(args): - def wrap(items): return ',\n '.join('"{}"'.format(x) for x in items) @@ -199,9 +198,9 @@ def gen_build_script(args): bundles_dirs = set(args.flat_repos) for bundle in args.bundles: dir_name, base_name = os.path.split(bundle) - assert(len(dir_name) > 0 and len(base_name) > 0) + assert len(dir_name) > 0 and len(base_name) > 0 name, ext = os.path.splitext(base_name) - assert(len(name) > 0 and ext == '.aar') + assert len(name) > 0 and ext == '.aar' bundles_dirs.add(dir_name) bundles.append('com.yandex:{}@aar'.format(name)) diff --git a/build/scripts/gen_java_codenav_entry.py b/build/scripts/gen_java_codenav_entry.py index ff0a5c737d..2959dc4843 100644 --- a/build/scripts/gen_java_codenav_entry.py +++ b/build/scripts/gen_java_codenav_entry.py @@ -22,26 +22,43 @@ def just_do_it(java, kythe, kythe_to_proto, out_name, binding_only, kindexes): for kindex in kindex_inputs: print >> sys.stderr, '[INFO] Processing:', kindex indexer_start = datetime.datetime.now() - p = subprocess.Popen([java, '-jar', os.path.join(kythe, 'indexers/java_indexer.jar'), kindex], stdout=subprocess.PIPE) + p = subprocess.Popen( + [java, '-jar', os.path.join(kythe, 'indexers/java_indexer.jar'), kindex], stdout=subprocess.PIPE + ) indexer_out, _ = p.communicate() - print >> sys.stderr, '[INFO] Indexer execution time:', (datetime.datetime.now() - indexer_start).total_seconds(), 'seconds' + print >> sys.stderr, '[INFO] Indexer execution time:', ( + datetime.datetime.now() - indexer_start + ).total_seconds(), 'seconds' if p.returncode: raise Exception('java_indexer failed with exit code {}'.format(p.returncode)) dedup_start = datetime.datetime.now() p = subprocess.Popen([os.path.join(kythe, 'tools/dedup_stream')], stdin=subprocess.PIPE, stdout=subprocess.PIPE) dedup_out, _ = p.communicate(indexer_out) - print >> sys.stderr, '[INFO] Dedup execution time:', (datetime.datetime.now() - dedup_start).total_seconds(), 'seconds' + print >> sys.stderr, '[INFO] Dedup execution time:', ( + datetime.datetime.now() - dedup_start + ).total_seconds(), 'seconds' if p.returncode: raise Exception('dedup_stream failed with exit code {}'.format(p.returncode)) entrystream_start = datetime.datetime.now() - p = subprocess.Popen([os.path.join(kythe, 'tools/entrystream'), '--write_json'], stdin=subprocess.PIPE, stdout=open(temp_out_name, 'a')) + p = subprocess.Popen( + [os.path.join(kythe, 'tools/entrystream'), '--write_json'], + stdin=subprocess.PIPE, + stdout=open(temp_out_name, 'a'), + ) p.communicate(dedup_out) if p.returncode: raise Exception('entrystream failed with exit code {}'.format(p.returncode)) - print >> sys.stderr, '[INFO] Entrystream execution time:', (datetime.datetime.now() - entrystream_start).total_seconds(), 'seconds' + print >> sys.stderr, '[INFO] Entrystream execution time:', ( + datetime.datetime.now() - entrystream_start + ).total_seconds(), 'seconds' preprocess_start = datetime.datetime.now() - subprocess.check_call([kythe_to_proto, '--preprocess-entry', '--entries', temp_out_name, '--out', out_name] + (['--only-binding-data'] if binding_only else [])) - print >> sys.stderr, '[INFO] Preprocessing execution time:', (datetime.datetime.now() - preprocess_start).total_seconds(), 'seconds' + subprocess.check_call( + [kythe_to_proto, '--preprocess-entry', '--entries', temp_out_name, '--out', out_name] + + (['--only-binding-data'] if binding_only else []) + ) + print >> sys.stderr, '[INFO] Preprocessing execution time:', ( + datetime.datetime.now() - preprocess_start + ).total_seconds(), 'seconds' print >> sys.stderr, '[INFO] Total execution time:', (datetime.datetime.now() - start).total_seconds(), 'seconds' diff --git a/build/scripts/gen_java_codenav_protobuf.py b/build/scripts/gen_java_codenav_protobuf.py index aee8cfe6c3..94df0861b2 100644 --- a/build/scripts/gen_java_codenav_protobuf.py +++ b/build/scripts/gen_java_codenav_protobuf.py @@ -7,7 +7,19 @@ def just_do_it(kythe_to_proto, entries, out_name, build_file, source_root): classpath = os.pathsep.join([line.strip() for line in f]) os.execv( kythe_to_proto, - [kythe_to_proto, '--sources-rel-root', 'fake_arcadia_root', '--entries', entries, '--out', out_name, '--classpath', classpath, '--arcadia-root', source_root] + [ + kythe_to_proto, + '--sources-rel-root', + 'fake_arcadia_root', + '--entries', + entries, + '--out', + out_name, + '--classpath', + classpath, + '--arcadia-root', + source_root, + ], ) diff --git a/build/scripts/gen_py3_reg.py b/build/scripts/gen_py3_reg.py index 149c094898..ff6bf0de56 100644 --- a/build/scripts/gen_py3_reg.py +++ b/build/scripts/gen_py3_reg.py @@ -21,10 +21,11 @@ def mangle(name): return name return ''.join('{}{}'.format(len(s), s) for s in name.split('.')) + if __name__ == '__main__': if len(sys.argv) != 3: - print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>' - print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv) + print >> sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>' + print >> sys.stderr, 'Passed: ' + ' '.join(sys.argv) sys.exit(1) with open(sys.argv[2], 'w') as f: diff --git a/build/scripts/gen_py_protos.py b/build/scripts/gen_py_protos.py index 606f50dfb6..818139e587 100644 --- a/build/scripts/gen_py_protos.py +++ b/build/scripts/gen_py_protos.py @@ -2,7 +2,6 @@ import os from os import path import shutil import subprocess -import sys import tempfile import argparse import re @@ -12,7 +11,7 @@ OUT_DIR_ARG = '--python_out=' def _noext(fname): - return fname[:fname.rfind('.')] + return fname[: fname.rfind('.')] def main(): @@ -34,7 +33,7 @@ def main(): for i in range(len(args)): if args[i].startswith(OUT_DIR_ARG): assert not out_dir_orig, 'Duplicate "{0}" param'.format(OUT_DIR_ARG) - out_dir_orig = args[i][len(OUT_DIR_ARG):] + out_dir_orig = args[i][len(OUT_DIR_ARG) :] out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig) args[i] = OUT_DIR_ARG + out_dir_temp continue @@ -44,8 +43,10 @@ def main(): plugin_out_dir_arg = match.group(1) plugin = match.group(2) assert plugin not in plugin_out_dirs_orig, 'Duplicate "{0}" param'.format(plugin_out_dir_arg) - plugin_out_dirs_orig[plugin] = args[i][len(plugin_out_dir_arg):] - assert plugin_out_dirs_orig[plugin] == out_dir_orig, 'Params "{0}" and "{1}" expected to have the same value'.format(OUT_DIR_ARG, plugin_out_dir_arg) + plugin_out_dirs_orig[plugin] = args[i][len(plugin_out_dir_arg) :] + assert ( + plugin_out_dirs_orig[plugin] == out_dir_orig + ), 'Params "{0}" and "{1}" expected to have the same value'.format(OUT_DIR_ARG, plugin_out_dir_arg) args[i] = plugin_out_dir_arg + out_dir_temp assert out_dir_temp, 'Param "{0}" not found'.format(OUT_DIR_ARG) @@ -55,21 +56,21 @@ def main(): temp_name = out_dir_temp orig_name = out_dir_orig - dir_name, file_name = path.split(script_args.input[len(script_args.ns) - 1:]) + dir_name, file_name = path.split(script_args.input[len(script_args.ns) - 1 :]) for part in dir_name.split('/'): - temp_part = part.replace('-','_') + temp_part = part.replace('-', '_') temp_name = path.join(temp_name, temp_part) - assert(path.exists(temp_name)) + assert path.exists(temp_name) orig_name = path.join(orig_name, part) if not path.exists(orig_name): os.mkdir(orig_name) orig_base_name = _noext(file_name) - temp_base_name = orig_base_name.replace('-','_') + temp_base_name = orig_base_name.replace('-', '_') for suf in script_args.suffixes: temp_file_name = path.join(temp_name, temp_base_name + suf) - assert(path.exists(temp_file_name)) + assert path.exists(temp_file_name) orig_file_name = path.join(orig_name, orig_base_name + '__int__' + suf) os.rename(temp_file_name, orig_file_name) diff --git a/build/scripts/gen_py_reg.py b/build/scripts/gen_py_reg.py index 1560135ae8..0f38dfffe3 100644 --- a/build/scripts/gen_py_reg.py +++ b/build/scripts/gen_py_reg.py @@ -19,10 +19,11 @@ def mangle(name): return name return ''.join('{}{}'.format(len(s), s) for s in name.split('.')) + if __name__ == '__main__': if len(sys.argv) != 3: - print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>' - print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv) + print >> sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>' + print >> sys.stderr, 'Passed: ' + ' '.join(sys.argv) sys.exit(1) with open(sys.argv[2], 'w') as f: diff --git a/build/scripts/gen_swiftc_output_map.py b/build/scripts/gen_swiftc_output_map.py index 01ce85f256..6811827ecd 100644 --- a/build/scripts/gen_swiftc_output_map.py +++ b/build/scripts/gen_swiftc_output_map.py @@ -4,12 +4,13 @@ import sys def just_do_it(args): source_root, build_root, out_file, srcs = args[0], args[1], args[2], args[3:] - assert(len(srcs)) + assert len(srcs) result_obj = {} for src in srcs: result_obj[src] = {'object': src.replace(source_root, build_root) + '.o'} with open(out_file, 'w') as of: of.write(json.dumps(result_obj)) + if __name__ == '__main__': just_do_it(sys.argv[1:]) diff --git a/build/scripts/gen_tasklet_reg.py b/build/scripts/gen_tasklet_reg.py index 0f7f66ad51..b768ff0d3b 100644 --- a/build/scripts/gen_tasklet_reg.py +++ b/build/scripts/gen_tasklet_reg.py @@ -35,10 +35,7 @@ def parse_args(): if __name__ == '__main__': args = parse_args() - includes = ''.join( - '#include <{}>\n'.format(include) - for include in args.includes - ) + includes = ''.join('#include <{}>\n'.format(include) for include in args.includes) code = TEMPLATE.format( includes=includes, diff --git a/build/scripts/gen_test_apk_gradle_script.py b/build/scripts/gen_test_apk_gradle_script.py index 737091e155..28609ef118 100644 --- a/build/scripts/gen_test_apk_gradle_script.py +++ b/build/scripts/gen_test_apk_gradle_script.py @@ -3,9 +3,9 @@ import os import tarfile import xml.etree.ElementTree as etree -FLAT_DIRS_REPO_TEMPLATE='flatDir {{ dirs {dirs} }}\n' -MAVEN_REPO_TEMPLATE='maven {{ url "{repo}" }}\n' -KEYSTORE_TEMLATE='signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' +FLAT_DIRS_REPO_TEMPLATE = 'flatDir {{ dirs {dirs} }}\n' +MAVEN_REPO_TEMPLATE = 'maven {{ url "{repo}" }}\n' +KEYSTORE_TEMLATE = 'signingConfigs {{ debug {{ storeFile file("{keystore}") }} }}\n' TEST_APK_TEMPLATE = """\ ext.jniLibsDirs = [ @@ -119,9 +119,9 @@ def gen_build_script(args): bundles_dirs = set(args.flat_repos) for bundle in args.bundles: dir_name, base_name = os.path.split(bundle) - assert(len(dir_name) > 0 and len(base_name) > 0) + assert len(dir_name) > 0 and len(base_name) > 0 name, ext = os.path.splitext(base_name) - assert(len(name) > 0 and ext == '.aar') + assert len(name) > 0 and ext == '.aar' bundles_dirs.add(dir_name) bundles.append('com.yandex:{}@aar'.format(name)) @@ -184,9 +184,11 @@ if __name__ == '__main__': f.write(content) with open(args.gradle_properties, 'w') as f: - f.write('''android.enableJetifier=true + f.write( + '''android.enableJetifier=true android.useAndroidX=true - org.gradle.jvmargs=-Xmx8192m -XX:MaxPermSize=512m''') + org.gradle.jvmargs=-Xmx8192m -XX:MaxPermSize=512m''' + ) if args.bundle_name: with open(args.settings_gradle, 'w') as f: diff --git a/build/scripts/gen_yql_python_udf.py b/build/scripts/gen_yql_python_udf.py index 127b4b8867..ac58de3d8f 100644 --- a/build/scripts/gen_yql_python_udf.py +++ b/build/scripts/gen_yql_python_udf.py @@ -1,6 +1,6 @@ import sys -TEMPLATE=""" +TEMPLATE = """ #include <yql/udfs/common/python/python_udf/python_udf.h> #include <contrib/ydb/library/yql/public/udf/udf_registrator.h> @@ -41,8 +41,7 @@ def main(): flavor, module_name, package_name, path, libra_flag = sys.argv[1:] with open(path, 'w') as f: f.write( - TEMPLATE - .strip() + TEMPLATE.strip() .replace('@MODULE_NAME@', module_name) .replace('@PACKAGE_NAME@', package_name) .replace('@FLAVOR@', flavor) diff --git a/build/scripts/generate_mf.py b/build/scripts/generate_mf.py index a44a969980..447215c723 100644 --- a/build/scripts/generate_mf.py +++ b/build/scripts/generate_mf.py @@ -1,12 +1,14 @@ import json -import logging import optparse import os import sys import io +import six + import process_command_files as pcf + class BadMfError(Exception): pass @@ -46,7 +48,12 @@ def parse_args(): parser.add_option('-c', '--credits-output') parser.add_option('-t', '--type') opts, _ = parser.parse_args(free_args) - return lics, peers, credits, opts, + return ( + lics, + peers, + credits, + opts, + ) def generate_header(meta): @@ -61,7 +68,7 @@ def generate_mf(): 'path': os.path.dirname(options.output), 'licenses': lics, 'dependencies': [], - 'license_texts': '' + 'license_texts': '', } build_root = options.build_root @@ -91,7 +98,7 @@ def generate_mf(): texts = data.get('license_texts') if texts: candidate_text = generate_header(data) + '\n' + texts - if isinstance(candidate_text, unicode): + if isinstance(candidate_text, six.text_type): candidate_text = candidate_text.encode('utf-8') final_credits.append(candidate_text) diff --git a/build/scripts/generate_pom.py b/build/scripts/generate_pom.py index d91bce6249..e8fa65e0fa 100644 --- a/build/scripts/generate_pom.py +++ b/build/scripts/generate_pom.py @@ -132,7 +132,7 @@ def build_pom_and_export_to_maven(**kwargs): project = et.Element( '{}{}{}project'.format('{', DEFAULT_NAMESPACE, '}'), - attrib={'{}{}{}schemaLocation'.format('{', XSI_NAMESPACE, '}'): SCHEMA_LOCATION} + attrib={'{}{}{}schemaLocation'.format('{', XSI_NAMESPACE, '}'): SCHEMA_LOCATION}, ) group_id, artifact_id, version = target.split(':') @@ -185,7 +185,9 @@ def build_pom_and_export_to_maven(**kwargs): if test_resource_dirs: test_resource_element = et.SubElement(build, 'testResources') for test_resource_dir in test_resource_dirs: - et.SubElement(et.SubElement(test_resource_element, 'testResource'), 'directory').text = '${basedir}' + (('/' + test_resource_dir) if test_resource_dir != '.' else '') + et.SubElement(et.SubElement(test_resource_element, 'testResource'), 'directory').text = '${basedir}' + ( + ('/' + test_resource_dir) if test_resource_dir != '.' else '' + ) plugins = et.SubElement(build, 'plugins') @@ -294,7 +296,9 @@ def build_pom_and_export_to_maven(**kwargs): et.SubElement(surefire_plugin, 'groupId').text = MAVEN_SUREFIRE_GROUP_ID et.SubElement(surefire_plugin, 'artifactId').text = MAVEN_SUREFIRE_ARTIFACT_ID et.SubElement(surefire_plugin, 'version').text = MAVEN_SUREFIRE_VERSION - classpath_excludes = et.SubElement(et.SubElement(surefire_plugin, 'configuration'), 'classpathDependencyExcludes') + classpath_excludes = et.SubElement( + et.SubElement(surefire_plugin, 'configuration'), 'classpathDependencyExcludes' + ) for classpath_exclude in test_target_dependencies_exclude: et.SubElement(classpath_excludes, 'classpathDependencyExclude').text = classpath_exclude diff --git a/build/scripts/go_proto_wrapper.py b/build/scripts/go_proto_wrapper.py index 6a5841d5d2..a8d856db6f 100644 --- a/build/scripts/go_proto_wrapper.py +++ b/build/scripts/go_proto_wrapper.py @@ -50,10 +50,12 @@ def main(args): m = re.match(OUT_DIR_FLAG_PATTERN, args[i]) if m: out_dir_flag = m.group(1) - index = max(len(out_dir_flag), args[i].rfind(':')+1) + index = max(len(out_dir_flag), args[i].rfind(':') + 1) out_dir = args[i][index:] if out_dir_orig: - assert out_dir_orig == out_dir, 'Output directories do not match: [{}] and [{}]'.format(out_dir_orig, out_dir) + assert out_dir_orig == out_dir, 'Output directories do not match: [{}] and [{}]'.format( + out_dir_orig, out_dir + ) else: out_dir_orig = out_dir out_dir_temp = tempfile.mkdtemp(dir=out_dir_orig) @@ -63,7 +65,9 @@ def main(args): try: subprocess.check_output(args, stdin=None, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - sys.stderr.write('{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output.decode('utf-8'))) + sys.stderr.write( + '{} returned non-zero exit code {}.\n{}\n'.format(' '.join(e.cmd), e.returncode, e.output.decode('utf-8')) + ) return e.returncode # All Arcadia GO projects should have 'a.yandex-team.ru/' namespace prefix. @@ -71,9 +75,7 @@ def main(args): # project is from vendor directory under the root of Arcadia. out_dir_src = os.path.normpath(os.path.join(out_dir_temp, arcadia_prefix, proto_namespace)) out_dir_dst = out_dir_orig - is_from_contrib = False if not os.path.isdir(out_dir_src): - is_from_contrib = True out_dir_src = out_dir_temp out_dir_dst = os.path.join(out_dir_orig, contrib_prefix) @@ -89,7 +91,9 @@ def main(args): package_name = m.group(1).split(';')[-1].split('/')[-1] break with open(check_output, 'w') as fout: - fout.write('// Code generated by go_proto_wrapper.py script. DO NOT EDIT.\n\npackage {}\n'.format(package_name)) + fout.write( + '// Code generated by go_proto_wrapper.py script. DO NOT EDIT.\n\npackage {}\n'.format(package_name) + ) shutil.rmtree(out_dir_temp) diff --git a/build/scripts/go_tool.py b/build/scripts/go_tool.py index b99a333866..05db991498 100644 --- a/build/scripts/go_tool.py +++ b/build/scripts/go_tool.py @@ -23,9 +23,9 @@ vendor_prefix = 'vendor/' vet_info_ext = '.vet.out' vet_report_ext = '.vet.txt' -FIXED_CGO1_SUFFIX='.fixed.cgo1.go' +FIXED_CGO1_SUFFIX = '.fixed.cgo1.go' -COMPILE_OPTIMIZATION_FLAGS=('-N',) +COMPILE_OPTIMIZATION_FLAGS = ('-N',) def get_trimpath_args(args): @@ -99,7 +99,7 @@ def preprocess_args(args): # compute root relative module dir path assert args.output is None or args.output_root == os.path.dirname(args.output) assert args.output_root.startswith(args.build_root_dir) - args.module_path = args.output_root[len(args.build_root_dir):] + args.module_path = args.output_root[len(args.build_root_dir) :] args.source_module_dir = os.path.join(args.source_root, args.test_import_path or args.module_path) + os.path.sep assert len(args.module_path) > 0 args.import_path, args.is_std = get_import_path(args.module_path) @@ -109,7 +109,7 @@ def preprocess_args(args): srcs = [] for f in args.srcs: if f.endswith(FIXED_CGO1_SUFFIX) and f.startswith(args.build_root_dir): - path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[:-len(FIXED_CGO1_SUFFIX)]))) + path = os.path.join(args.output_root, '{}.cgo1.go'.format(os.path.basename(f[: -len(FIXED_CGO1_SUFFIX)]))) srcs.append(path) preprocess_cgo1(f, path, args.source_root) else: @@ -127,8 +127,8 @@ def compare_versions(version1, version2): index = version.find('beta') return len(version) if index < 0 else index - v1 = tuple(x.zfill(8) for x in version1[:last_index(version1)].split('.')) - v2 = tuple(x.zfill(8) for x in version2[:last_index(version2)].split('.')) + v1 = tuple(x.zfill(8) for x in version1[: last_index(version1)].split('.')) + v2 = tuple(x.zfill(8) for x in version2[: last_index(version2)].split('.')) if v1 == v2: return 0 return 1 if v1 < v2 else -1 @@ -159,9 +159,9 @@ def get_import_path(module_path): import_path = module_path.replace('\\', '/') is_std_module = import_path.startswith(std_lib_prefix) if is_std_module: - import_path = import_path[len(std_lib_prefix):] + import_path = import_path[len(std_lib_prefix) :] elif import_path.startswith(vendor_prefix): - import_path = import_path[len(vendor_prefix):] + import_path = import_path[len(vendor_prefix) :] else: import_path = arc_project_prefix + import_path assert len(import_path) > 0 @@ -224,7 +224,9 @@ def create_embed_config(args): } for info in args.embed: embed_dir = os.path.normpath(info[0]) - assert embed_dir == args.source_module_dir[:-1] or embed_dir.startswith((args.source_module_dir, args.build_root)) + assert embed_dir == args.source_module_dir[:-1] or embed_dir.startswith( + (args.source_module_dir, args.build_root) + ) pattern = info[1] if pattern.endswith('/**/*'): pattern = pattern[:-3] @@ -258,7 +260,7 @@ def gen_vet_info(args): # does't make any harm (it needs to be revised later) import_map['unsafe'] = 'unsafe' - for (key, _) in info['packagefile']: + for key, _ in info['packagefile']: if key not in import_map: import_map[key] = key @@ -275,7 +277,7 @@ def gen_vet_info(args): 'PackageVetx': dict((key, vet_info_output_name(value)) for key, value in info['packagefile']), 'VetxOnly': False, 'VetxOutput': vet_info_output_name(args.output), - 'SucceedOnTypecheckFailure': False + 'SucceedOnTypecheckFailure': False, } # sys.stderr.write('{}\n'.format(json.dumps(data, indent=4))) return data @@ -360,7 +362,9 @@ def _do_compile_go(args): compiling_runtime = False if is_std_module: cmd.append('-std') - if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith('runtime/internal/'): + if import_path in ('runtime', 'internal/abi', 'internal/bytealg', 'internal/cpu') or import_path.startswith( + 'runtime/internal/' + ): cmd.append('-+') compiling_runtime = True import_config_name = create_import_config(args.peers, True, args.import_map, args.module_map) @@ -399,7 +403,6 @@ def _do_compile_go(args): class VetThread(threading.Thread): - def __init__(self, target, args): super(VetThread, self).__init__(target=target, args=args) self.exc_info = None @@ -431,11 +434,14 @@ def do_compile_go(args): def do_compile_asm(args): def need_compiling_runtime(import_path): - return import_path in ('runtime', 'reflect', 'syscall') or \ - import_path.startswith('runtime/internal/') or \ - compare_versions('1.17', args.goversion) >= 0 and import_path == 'internal/bytealg' - - assert(len(args.srcs) == 1 and len(args.asm_srcs) == 1) + return ( + import_path in ('runtime', 'reflect', 'syscall') + or import_path.startswith('runtime/internal/') + or compare_versions('1.17', args.goversion) >= 0 + and import_path == 'internal/bytealg' + ) + + assert len(args.srcs) == 1 and len(args.asm_srcs) == 1 cmd = [args.go_asm] cmd += get_trimpath_args(args) cmd += ['-I', args.output_root, '-I', os.path.join(args.pkg_root, 'include')] @@ -485,7 +491,9 @@ def do_link_exe(args): do_link_lib(compile_args) cmd = [args.go_link, '-o', args.output] - import_config_name = create_import_config(args.peers + args.non_local_peers, False, args.import_map, args.module_map) + import_config_name = create_import_config( + args.peers + args.non_local_peers, False, args.import_map, args.module_map + ) if import_config_name: cmd += ['-importcfg', import_config_name] if args.link_flags: @@ -522,7 +530,7 @@ def do_link_exe(args): cgo_peers.append('-Wl,--end-group') try: index = extldflags.index('--cgo-peers') - extldflags = extldflags[:index] + cgo_peers + extldflags[index+1:] + extldflags = extldflags[:index] + cgo_peers + extldflags[index + 1 :] except ValueError: extldflags.extend(cgo_peers) if len(extldflags) > 0: @@ -533,20 +541,27 @@ def do_link_exe(args): def gen_cover_info(args): lines = [] - lines.extend([ - """ + lines.extend( + [ + """ var ( coverCounters = make(map[string][]uint32) coverBlocks = make(map[string][]testing.CoverBlock) ) """, - 'func init() {', - ]) + 'func init() {', + ] + ) for var, file in (x.split(':') for x in args.cover_info): - lines.append(' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format(file=file, var=var)) - lines.extend([ - '}', - """ + lines.append( + ' coverRegisterFile("{file}", _cover0.{var}.Count[:], _cover0.{var}.Pos[:], _cover0.{var}.NumStmt[:])'.format( + file=file, var=var + ) + ) + lines.extend( + [ + '}', + """ func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts []uint16) { if 3*len(counter) != len(pos) || len(counter) != len(numStmts) { panic("coverage: mismatched sizes") @@ -569,7 +584,8 @@ func coverRegisterFile(fileName string, counter []uint32, pos []uint32, numStmts coverBlocks[fileName] = block } """, - ]) + ] + ) return lines @@ -706,18 +722,22 @@ def gen_test_main(args, test_lib_args, xtest_lib_args): lines.append('func main() {') if is_cover: - lines.extend([ - ' testing.RegisterCover(testing.Cover{', - ' Mode: "set",', - ' Counters: coverCounters,', - ' Blocks: coverBlocks,', - ' CoveredPackages: "",', - ' })', - ]) - lines.extend([ - ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)), - '', - ]) + lines.extend( + [ + ' testing.RegisterCover(testing.Cover{', + ' Mode: "set",', + ' Counters: coverCounters,', + ' Blocks: coverBlocks,', + ' CoveredPackages: "",', + ' })', + ] + ) + lines.extend( + [ + ' m := testing.MainStart(testdeps.TestDeps{{}}, {})'.format(', '.join(var_names)), + '', + ] + ) if test_main_package: lines.append(' {}.TestMain(m)'.format(test_main_package)) @@ -746,6 +766,7 @@ def do_link_test(args): xtest_ydx_file_name = None need_append_ydx = test_lib_args and xtest_lib_args and args.ydx_file and args.vet_flags if need_append_ydx: + def find_ydx_file_name(name, flags): for i, elem in enumerate(flags): if elem.endswith(name): @@ -873,12 +894,7 @@ if __name__ == '__main__': # We are going to support only 'lib', 'exe' and 'cgo' build modes currently # and as a result we are going to generate only one build node per module # (or program) - dispatch = { - 'exe': do_link_exe, - 'dll': do_link_exe, - 'lib': do_link_lib, - 'test': do_link_test - } + dispatch = {'exe': do_link_exe, 'dll': do_link_exe, 'lib': do_link_lib, 'test': do_link_test} exit_code = 1 try: diff --git a/build/scripts/ios_wrapper.py b/build/scripts/ios_wrapper.py index d3aa48387a..eeb0a78d26 100644 --- a/build/scripts/ios_wrapper.py +++ b/build/scripts/ios_wrapper.py @@ -86,7 +86,8 @@ def just_do_it(args): if not signs: sign_file = os.path.join(module_dir, app_name + '.xcent') with open(sign_file, 'w') as f: - f.write('''<?xml version="1.0" encoding="UTF-8"?> + f.write( + '''<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> @@ -94,7 +95,8 @@ def just_do_it(args): <true/> </dict> </plist> - ''') + ''' + ) else: sign_file = signs[0] sign_application(sign_file, app_dir) @@ -132,6 +134,7 @@ def make_main_plist(inputs, out, replaced_parameters): else: if root[k] in replaced_parameters: root[k] = replaced_parameters[root[k]] + scan_n_replace(united_data) plistlib.writePlist(united_data, out) subprocess.check_call(['/usr/bin/plutil', '-convert', 'binary1', out]) @@ -145,16 +148,20 @@ def link_storyboards(ibtool, archives, app_name, app_dir, flags): with tarfile.open(arc) as a: a.extractall(path=unpacked[-1]) flags += [ - '--module', app_name, - '--link', app_dir, + '--module', + app_name, + '--link', + app_dir, ] - subprocess.check_call([ibtool] + flags + - ['--errors', '--warnings', '--notices', '--output-format', 'human-readable-text'] + - unpacked) + subprocess.check_call( + [ibtool] + flags + ['--errors', '--warnings', '--notices', '--output-format', 'human-readable-text'] + unpacked + ) def sign_application(xcent, app_dir): - subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', '--entitlements', xcent, '--timestamp=none', app_dir]) + subprocess.check_call( + ['/usr/bin/codesign', '--force', '--sign', '-', '--entitlements', xcent, '--timestamp=none', app_dir] + ) def extract_resources(resources, app_dir, strings=False, sign=False): @@ -163,17 +170,23 @@ def extract_resources(resources, app_dir, strings=False, sign=False): for tfinfo in tf: tf.extract(tfinfo.name, app_dir) if strings: - subprocess.check_call(['/usr/bin/plutil', '-convert', 'binary1', os.path.join(app_dir, tfinfo.name)]) + subprocess.check_call( + ['/usr/bin/plutil', '-convert', 'binary1', os.path.join(app_dir, tfinfo.name)] + ) if sign: - subprocess.check_call(['/usr/bin/codesign', '--force', '--sign', '-', os.path.join(app_dir, tfinfo.name)]) + subprocess.check_call( + ['/usr/bin/codesign', '--force', '--sign', '-', os.path.join(app_dir, tfinfo.name)] + ) def make_archive(app_dir, output): with tarfile.open(output, "w") as tar_handle: for root, _, files in os.walk(app_dir): for f in files: - tar_handle.add(os.path.join(root, f), arcname=os.path.join(os.path.basename(app_dir), - os.path.relpath(os.path.join(root, f), app_dir))) + tar_handle.add( + os.path.join(root, f), + arcname=os.path.join(os.path.basename(app_dir), os.path.relpath(os.path.join(root, f), app_dir)), + ) if __name__ == '__main__': diff --git a/build/scripts/jni_swig.py b/build/scripts/jni_swig.py index 4b2220430b..71a5174da5 100644 --- a/build/scripts/jni_swig.py +++ b/build/scripts/jni_swig.py @@ -4,10 +4,13 @@ import re import os import tarfile + def parse_args(): parser = argparse.ArgumentParser(description='Wrapper script to invoke swig.') parser.add_argument('--swig', help='path to the swig executable') - parser.add_argument('--default-module', type=str, help='swig -module argument value for inputs without %module statement') + parser.add_argument( + '--default-module', type=str, help='swig -module argument value for inputs without %module statement' + ) parser.add_argument('--package-by-file', help='path to file which dir must be converted to swig -package argument') parser.add_argument('--jsrc', help='jsrc output archive filename') parser.add_argument('--src', help='input .swg file path') @@ -29,7 +32,11 @@ def main(args): outdir_abs = os.path.join(os.path.dirname(args.jsrc), outdir) if not os.path.exists(outdir_abs): os.makedirs(outdir_abs) - cmd = [args.swig, '-c++', '-java', '-package', package] + (['-outdir', outdir_abs] if outdir is not None else []) + args.args + cmd = ( + [args.swig, '-c++', '-java', '-package', package] + + (['-outdir', outdir_abs] if outdir is not None else []) + + args.args + ) if '-module' not in args.args and args.default_module: with open(args.src, 'r') as f: if not re.search(r'(?m)^%module\b', f.read()): diff --git a/build/scripts/link_asrc.py b/build/scripts/link_asrc.py index eec5fe09a8..ec8d92c74d 100644 --- a/build/scripts/link_asrc.py +++ b/build/scripts/link_asrc.py @@ -44,12 +44,12 @@ def main(): if args.input and len(args.input) > 0: for x in args.input: if x in DELIMS: - assert(len(parts) == 0 or len(parts[-1]) > 1) + assert len(parts) == 0 or len(parts[-1]) > 1 parts.append([x]) else: - assert(len(parts) > 0) + assert len(parts) > 0 parts[-1].append(x) - assert(len(parts[-1]) > 1) + assert len(parts[-1]) > 1 if args.jsrcs and len(args.jsrcs): src_dir = os.path.join(args.work, DESTS[DELIM_JAVA]) diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py index 00215dfcae..1049089054 100644 --- a/build/scripts/link_dyn_lib.py +++ b/build/scripts/link_dyn_lib.py @@ -11,10 +11,7 @@ from process_whole_archive_option import ProcessWholeArchiveOption def shlex_join(cmd): # equivalent to shlex.join() in python 3 - return ' '.join( - pipes.quote(part) - for part in cmd - ) + return ' '.join(pipes.quote(part) for part in cmd) def parse_export_file(p): @@ -37,12 +34,12 @@ def parse_export_file(p): def to_c(sym): symbols = collections.deque(sym.split('::')) c_prefixes = [ # demangle prefixes for c++ symbols - '_ZN', # namespace - '_ZTIN', # typeinfo for - '_ZTSN', # typeinfo name for - '_ZTTN', # VTT for - '_ZTVN', # vtable for - '_ZNK', # const methods + '_ZN', # namespace + '_ZTIN', # typeinfo for + '_ZTSN', # typeinfo name for + '_ZTTN', # VTT for + '_ZTVN', # vtable for + '_ZNK', # const methods ] c_sym = '' while symbols: @@ -142,7 +139,7 @@ CUDA_LIBRARIES = { '-lnvinfer_static': '-lnvinfer', '-lnvinfer_plugin_static': '-lnvinfer_plugin', '-lnvonnxparser_static': '-lnvonnxparser', - '-lnvparsers_static': '-lnvparsers' + '-lnvparsers_static': '-lnvparsers', } @@ -159,7 +156,7 @@ def fix_cmd(arch, c): def do_fix(p): if p.startswith(prefix) and p.endswith('.exports'): - fname = p[len(prefix):] + fname = p[len(prefix) :] return list(f(list(parse_export_file(fname)))) @@ -249,8 +246,8 @@ if __name__ == '__main__': proc.communicate() if proc.returncode: - print >>sys.stderr, 'linker has failed with retcode:', proc.returncode - print >>sys.stderr, 'linker command:', shlex_join(cmd) + print >> sys.stderr, 'linker has failed with retcode:', proc.returncode + print >> sys.stderr, 'linker command:', shlex_join(cmd) sys.exit(proc.returncode) if opts.fix_elf: @@ -259,8 +256,8 @@ if __name__ == '__main__': proc.communicate() if proc.returncode: - print >>sys.stderr, 'fix_elf has failed with retcode:', proc.returncode - print >>sys.stderr, 'fix_elf command:', shlex_join(cmd) + print >> sys.stderr, 'fix_elf has failed with retcode:', proc.returncode + print >> sys.stderr, 'fix_elf command:', shlex_join(cmd) sys.exit(proc.returncode) if opts.soname and opts.soname != opts.target: @@ -272,6 +269,7 @@ if __name__ == '__main__': # -----------------Test---------------- # def write_temp_file(content): import yatest.common as yc + filename = yc.output_path('test.exports') with open(filename, 'w') as f: f.write(content) @@ -304,7 +302,7 @@ C++ geobase5::hardcoded_service def run_fix_gnu_param(export_file_content): filename = write_temp_file(export_file_content) result = fix_gnu_param('LINUX', list(parse_export_file(filename)))[0] - version_script_path = result[len('-Wl,--version-script='):] + version_script_path = result[len('-Wl,--version-script=') :] with open(version_script_path) as f: content = f.read() return content @@ -315,7 +313,9 @@ def test_fix_gnu_param(): C++ geobase5::details::lookup_impl::* C getFactoryMap """ - assert run_fix_gnu_param(export_file_content) == """{ + assert ( + run_fix_gnu_param(export_file_content) + == """{ global: extern "C" { _ZN8geobase57details11lookup_impl*; @@ -329,6 +329,7 @@ global: local: *; }; """ + ) def test_fix_gnu_param_with_linux_version(): @@ -337,7 +338,9 @@ C++ geobase5::details::lookup_impl::* linux_version ver1.0 C getFactoryMap """ - assert run_fix_gnu_param(export_file_content) == """ver1.0 { + assert ( + run_fix_gnu_param(export_file_content) + == """ver1.0 { global: extern "C" { _ZN8geobase57details11lookup_impl*; @@ -351,3 +354,4 @@ global: local: *; }; """ + ) diff --git a/build/scripts/link_exe.py b/build/scripts/link_exe.py index 6d1839e575..96fc83e7f6 100644 --- a/build/scripts/link_exe.py +++ b/build/scripts/link_exe.py @@ -36,7 +36,7 @@ CUDA_LIBRARIES = { '-lnvinfer_static': '-lnvinfer', '-lnvinfer_plugin_static': '-lnvinfer_plugin', '-lnvonnxparser_static': '-lnvonnxparser', - '-lnvparsers_static': '-lnvparsers' + '-lnvparsers_static': '-lnvparsers', } @@ -65,7 +65,7 @@ def fix_sanitize_flag(cmd): '-fsanitize=memory': CLANG_RT + 'msan', '-fsanitize=leak': CLANG_RT + 'lsan', '-fsanitize=undefined': CLANG_RT + 'ubsan', - '-fsanitize=thread': CLANG_RT + 'tsan' + '-fsanitize=thread': CLANG_RT + 'tsan', } used_sanitize_libs = [] diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py index 19f2982a42..ea143ec902 100644 --- a/build/scripts/link_fat_obj.py +++ b/build/scripts/link_fat_obj.py @@ -38,7 +38,7 @@ def get_args(): elif arg == '--with-global-srcs': groups['default'].append(arg) elif arg.startswith(YA_ARG_PREFIX): - group_name = arg[len(YA_ARG_PREFIX):] + group_name = arg[len(YA_ARG_PREFIX) :] args_list = groups.setdefault(group_name, []) else: args_list.append(arg) @@ -52,7 +52,7 @@ def strip_suppression_files(srcs): def strip_forceload_prefix(srcs): force_load_prefix = '-Wl,-force_load,' - return list(map(lambda lib: lib[lib.startswith(force_load_prefix) and len(force_load_prefix):], srcs)) + return list(map(lambda lib: lib[lib.startswith(force_load_prefix) and len(force_load_prefix) :], srcs)) def main(): @@ -81,7 +81,9 @@ def main(): else: no_pie = '-Wl,-no-pie' - do_link = linker + ['-o', obj_output, '-Wl,-r', '-nodefaultlibs', '-nostartfiles', no_pie] + global_srcs + auto_input + do_link = ( + linker + ['-o', obj_output, '-Wl,-r', '-nodefaultlibs', '-nostartfiles', no_pie] + global_srcs + auto_input + ) do_archive = archiver + [lib_output] + peers do_globals = None if args.globals_lib: diff --git a/build/scripts/link_lib.py b/build/scripts/link_lib.py index e73c02027a..1247e67cb6 100644 --- a/build/scripts/link_lib.py +++ b/build/scripts/link_lib.py @@ -27,8 +27,8 @@ class Opts(object): self.modify_flags = ['-M'] self.need_modify = any(item.endswith('.a') for item in auto_input) if self.need_modify: - self.objs = list( filter(lambda x: x.endswith('.o'), auto_input) ) - self.libs = list( filter(lambda x: x.endswith('.a'), auto_input) ) + self.objs = list(filter(lambda x: x.endswith('.o'), auto_input)) + self.libs = list(filter(lambda x: x.endswith('.a'), auto_input)) else: self.objs = auto_input self.libs = [] @@ -40,8 +40,8 @@ class Opts(object): self.output_opts = ['-o', self.output] elif self.arch_type == 'LIB': self.create_flags = [] - self.extra_args = list( filter(lambda x: x.startswith('/'), auto_input) ) - self.objs = list( filter(lambda x: not x.startswith('/'), auto_input) ) + self.extra_args = list(filter(lambda x: x.startswith('/'), auto_input)) + self.objs = list(filter(lambda x: not x.startswith('/'), auto_input)) self.libs = [] self.output_opts = ['/OUT:' + self.output] diff --git a/build/scripts/make_container.py b/build/scripts/make_container.py index a485baffdd..2c07177fdb 100644 --- a/build/scripts/make_container.py +++ b/build/scripts/make_container.py @@ -5,7 +5,7 @@ import struct import subprocess import sys -import container # 1 +import container # 1 def main(output_path, entry_path, input_paths, squashfs_path): @@ -29,7 +29,7 @@ def main(output_path, entry_path, input_paths, squashfs_path): program_container_path = os.path.basename(program_path) os.symlink(program_container_path, 'entry') - add_cmd = [ os.path.join(squashfs_path, 'mksquashfs') ] + add_cmd = [os.path.join(squashfs_path, 'mksquashfs')] add_cmd.extend([program_path, 'entry', 'program_layer']) subprocess.run(add_cmd) @@ -59,7 +59,6 @@ def main(output_path, entry_path, input_paths, squashfs_path): if len(data) == 0: break - output.write(struct.pack('<Q', size)) os.rename(output_tmp_path, output_path) diff --git a/build/scripts/make_container_layer.py b/build/scripts/make_container_layer.py index 4f61f5a2e5..4bf0f8127a 100644 --- a/build/scripts/make_container_layer.py +++ b/build/scripts/make_container_layer.py @@ -1,6 +1,6 @@ import sys -import container # 1 +import container # 1 class UserError(Exception): diff --git a/build/scripts/make_java_classpath_file.py b/build/scripts/make_java_classpath_file.py index c70a7876d7..fbd9eb1358 100644 --- a/build/scripts/make_java_classpath_file.py +++ b/build/scripts/make_java_classpath_file.py @@ -8,10 +8,11 @@ def make_cp_file(args): source = args[0] destination = args[1] with open(source) as src: - lines = [l.strip() for l in src if l.strip()] + lines = [line.strip() for line in src if line.strip()] with open(destination, 'w') as dst: dst.write(os.pathsep.join(lines)) + def make_cp_file_from_args(args): destination = args[0] with open(destination, 'w') as dst: diff --git a/build/scripts/make_java_srclists.py b/build/scripts/make_java_srclists.py index 65174bafd7..ccf4027b0d 100644 --- a/build/scripts/make_java_srclists.py +++ b/build/scripts/make_java_srclists.py @@ -43,7 +43,7 @@ def main(): SRCDIR_ARG = 3 JSOURCES_DIR_ARG = 4 - next_arg=FILE_ARG + next_arg = FILE_ARG for src in remaining_args: if next_arg == RESOURCES_DIR_ARG: diff --git a/build/scripts/make_manifest_from_bf.py b/build/scripts/make_manifest_from_bf.py index bfea3ba3de..d9b1264ae4 100644 --- a/build/scripts/make_manifest_from_bf.py +++ b/build/scripts/make_manifest_from_bf.py @@ -1,7 +1,6 @@ import sys import zipfile import os -import re def prepare_path(path): diff --git a/build/scripts/mangle_typeinfo_names.py b/build/scripts/mangle_typeinfo_names.py index 8f30a46552..32ff5d05cc 100755 --- a/build/scripts/mangle_typeinfo_names.py +++ b/build/scripts/mangle_typeinfo_names.py @@ -2,10 +2,8 @@ import base64 import hashlib -import io import os import struct -import subprocess import sys from collections import namedtuple @@ -116,8 +114,9 @@ def read_elf_sections(elf_data, elf64): # Elf32_Half e_shstrndx; # } Elf32_Ehdr; - section_header_offset, section_header_entry_size, section_header_entries_number,\ - section_name_string_table_index = unpack('< Q 10x 3H', elf_data, 40) if elf64 else unpack('< L 10x 3H', elf_data, 32) + section_header_offset, section_header_entry_size, section_header_entries_number, section_name_string_table_index = ( + unpack('< Q 10x 3H', elf_data, 40) if elf64 else unpack('< L 10x 3H', elf_data, 32) + ) # https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.sheader.html # If the number of sections is greater than or equal to SHN_LORESERVE (0xff00), e_shnum has the value SHN_UNDEF (0) @@ -126,17 +125,22 @@ def read_elf_sections(elf_data, elf64): if section_header_entries_number == 0: section_header_entries_number = unpack_section_header(elf_data, section_header_offset, elf64).size - sections = [unpack_section_header(elf_data, section_header_offset + i * section_header_entry_size, elf64) - for i in range(section_header_entries_number)] + sections = [ + unpack_section_header(elf_data, section_header_offset + i * section_header_entry_size, elf64) + for i in range(section_header_entries_number) + ] # section names data section_names_section = sections[section_name_string_table_index] - section_names_data = elf_data[section_names_section.data_offset : section_names_section.data_offset + section_names_section.size] + section_names_data = elf_data[ + section_names_section.data_offset : section_names_section.data_offset + section_names_section.size + ] # read section names for i, section in enumerate(sections): sections[i] = section._replace( - name=section_names_data[section.name : section_names_data.find(b'\x00', section.name)].decode()) + name=section_names_data[section.name : section_names_data.find(b'\x00', section.name)].decode() + ) return sections @@ -161,7 +165,7 @@ def mangle_elf_typeinfo_names(elf_data, elf64, sections): else: elf_data[section.header_offset + 20 : section.header_offset + 24] = struct.pack('< L', len(mangled)) - symbol_sizes[section.name[len('.rodata.'):]] = len(mangled) + symbol_sizes[section.name[len('.rodata.') :]] = len(mangled) return symbol_sizes @@ -215,9 +219,9 @@ def mangle_elf(elf_data): ei_mag, ei_class = unpack('4s B', elf_data) assert ei_mag == b'\x7fELF' - if ei_class == 1: # ELFCLASS32 + if ei_class == 1: # ELFCLASS32 elf64 = False - elif ei_class == 2: # ELFCLASS64 + elif ei_class == 2: # ELFCLASS64 elf64 = True else: raise Exception('unknown ei_class: ' + str(ei_class)) @@ -269,8 +273,6 @@ def mangle_ar_impl(ar, out): out.write(ar_magic) - string_table = None - while True: obj = read_ar_object(ar) if not obj: diff --git a/build/scripts/merge_coverage_data.py b/build/scripts/merge_coverage_data.py index b7fa3c6a86..22cca75745 100644 --- a/build/scripts/merge_coverage_data.py +++ b/build/scripts/merge_coverage_data.py @@ -13,7 +13,7 @@ def main(args): split_i = args.index('-no-merge') except ValueError: split_i = len(args) - files, expendables = args[:split_i], args[split_i + 1:] + files, expendables = args[:split_i], args[split_i + 1 :] with tarfile.open(output_file, 'w') as outf: for x in files: diff --git a/build/scripts/mkdir.py b/build/scripts/mkdir.py index a326b29300..e947bf081b 100755 --- a/build/scripts/mkdir.py +++ b/build/scripts/mkdir.py @@ -7,6 +7,7 @@ def mkdir_p(directory): if not os.path.exists(directory): os.makedirs(directory) + if __name__ == "__main__": for directory in sys.argv[1:]: mkdir_p(directory) diff --git a/build/scripts/mkdocs_builder_wrapper.py b/build/scripts/mkdocs_builder_wrapper.py index 7a0df04190..e782c97d94 100644 --- a/build/scripts/mkdocs_builder_wrapper.py +++ b/build/scripts/mkdocs_builder_wrapper.py @@ -16,7 +16,7 @@ def main(): continue basename = os.path.basename(arg) assert arg.startswith(build_root) and len(arg) > length + len(basename) and arg[length] in ('/', '\\') - cmd.extend([str('--dep'), str('{}:{}:{}'.format(build_root, os.path.dirname(arg[length+1:]), basename))]) + cmd.extend([str('--dep'), str('{}:{}:{}'.format(build_root, os.path.dirname(arg[length + 1 :]), basename))]) elif arg == '--dep': is_dep = True else: diff --git a/build/scripts/mkver.py b/build/scripts/mkver.py index 321cdaade1..7bdbb88514 100755 --- a/build/scripts/mkver.py +++ b/build/scripts/mkver.py @@ -8,5 +8,5 @@ if __name__ == '__main__': end = data.find(')') version = data[beg:end] - print '#pragma once' - print '#define DEBIAN_VERSION "%s"' % version + print('#pragma once') + print('#define DEBIAN_VERSION "%s"' % version) diff --git a/build/scripts/pack_ios.py b/build/scripts/pack_ios.py index 37c36d1f95..350220be7b 100644 --- a/build/scripts/pack_ios.py +++ b/build/scripts/pack_ios.py @@ -20,7 +20,9 @@ def just_do_it(): if os.path.exists(args.target): os.remove(args.target) with tarfile.open(args.target, 'w') as tf: - tf.add(os.path.join(args.temp_dir, 'bin'), arcname=os.path.join(os.path.basename(args.binary) + '.app', 'bin')) + tf.add( + os.path.join(args.temp_dir, 'bin'), arcname=os.path.join(os.path.basename(args.binary) + '.app', 'bin') + ) return if len(app_tar) > 1: app_tar = [p for p in args.peers if not p.endswith('.default.ios.interface')] @@ -30,7 +32,7 @@ def just_do_it(): with tarfile.open(app_tar) as tf: tf.extractall(args.temp_dir) tar_suffix = '.default.ios.interface' if app_tar.endswith('.default.ios.interface') else '.ios.interface' - app_unpacked_path = os.path.join(args.temp_dir, os.path.basename(app_tar)[:-len(tar_suffix)] + '.app') + app_unpacked_path = os.path.join(args.temp_dir, os.path.basename(app_tar)[: -len(tar_suffix)] + '.app') if not os.path.exists(app_unpacked_path): raise Exception('Bad IOS_INTERFACE resource: {}'.format(app_tar)) shutil.copyfile(args.binary, os.path.join(app_unpacked_path, 'bin')) diff --git a/build/scripts/pack_jcoverage_resources.py b/build/scripts/pack_jcoverage_resources.py index f6e181067a..5881d90153 100644 --- a/build/scripts/pack_jcoverage_resources.py +++ b/build/scripts/pack_jcoverage_resources.py @@ -8,10 +8,10 @@ def main(args): output_file = args[0] report_file = args[1] - res = subprocess.call(args[args.index('-end') + 1:]) + res = subprocess.call(args[args.index('-end') + 1 :]) if not os.path.exists(report_file): - print>>sys.stderr, 'Can\'t find jacoco exec file' + print >> sys.stderr, 'Can\'t find jacoco exec file' return res with tarfile.open(output_file, 'w') as outf: diff --git a/build/scripts/postprocess_go_fbs.py b/build/scripts/postprocess_go_fbs.py index ffc2f7cb8f..10882413cf 100644 --- a/build/scripts/postprocess_go_fbs.py +++ b/build/scripts/postprocess_go_fbs.py @@ -5,12 +5,15 @@ import os # very simple regexp to find go import statement in the source code # NOTE! only one-line comments are somehow considered -IMPORT_DECL=re.compile(r''' +IMPORT_DECL = re.compile( + r''' \bimport ( \s+((\.|\w+)\s+)?"[^"]+" ( \s+//[^\n]* )? | \s* \( \s* ( ( \s+ ((\.|\w+)\s+)? "[^"]+" )? ( \s* //[^\n]* )? )* \s* \) - )''', re.MULTILINE | re.DOTALL | re.VERBOSE) + )''', + re.MULTILINE | re.DOTALL | re.VERBOSE, +) def parse_args(): @@ -62,7 +65,11 @@ def main(): if len(raw_import_map) != len(import_map): for k, v in (z.split('=', 1) for z in raw_import_map): if v != import_map[k]: - raise Exception('import map [{}] contains different values for key [{}]: [{}] and [{}].'.format(args.map, k, v, import_map[k])) + raise Exception( + 'import map [{}] contains different values for key [{}]: [{}] and [{}].'.format( + args.map, k, v, import_map[k] + ) + ) for root, _, files in os.walk(args.input_dir): for src in (f for f in files if f.endswith('.go')): diff --git a/build/scripts/preprocess.py b/build/scripts/preprocess.py index 4657bef732..012617fdfc 100644 --- a/build/scripts/preprocess.py +++ b/build/scripts/preprocess.py @@ -9,15 +9,15 @@ def load_file(p): def step(base, data, hh): def flt(): - for l in data.split('\n'): - if l in hh: - pp = os.path.join(base, hh[l]) + for line in data.split('\n'): + if line in hh: + pp = os.path.join(base, hh[line]) yield '\n\n' + load_file(pp) + '\n\n' os.unlink(pp) else: - yield l + yield line return '\n'.join(flt()) diff --git a/build/scripts/process_command_files.py b/build/scripts/process_command_files.py index e24bdf3e2b..4ca9caa649 100644 --- a/build/scripts/process_command_files.py +++ b/build/scripts/process_command_files.py @@ -1,33 +1,35 @@ -import sys
-
-
-def is_cmdfile_arg(arg):
- return arg.startswith('@')
-
-def cmdfile_path(arg):
- return arg[1:]
-
-def read_from_command_file(arg):
- with open(arg) as afile:
- return afile.read().splitlines()
-
-def skip_markers(args):
- res = []
- for arg in args:
- if arg == '--ya-start-command-file' or arg == '--ya-end-command-file':
- continue
- res.append(arg)
- return res
-
-def iter_args(args):
- for arg in args:
- if not is_cmdfile_arg(arg):
- if arg == '--ya-start-command-file' or arg == '--ya-end-command-file':
- continue
- yield arg
- else:
- for cmdfile_arg in read_from_command_file(cmdfile_path(arg)):
- yield cmdfile_arg
-
-def get_args(args):
- return list(iter_args(args))
+def is_cmdfile_arg(arg): + return arg.startswith('@') + + +def cmdfile_path(arg): + return arg[1:] + + +def read_from_command_file(arg): + with open(arg) as afile: + return afile.read().splitlines() + + +def skip_markers(args): + res = [] + for arg in args: + if arg == '--ya-start-command-file' or arg == '--ya-end-command-file': + continue + res.append(arg) + return res + + +def iter_args(args): + for arg in args: + if not is_cmdfile_arg(arg): + if arg == '--ya-start-command-file' or arg == '--ya-end-command-file': + continue + yield arg + else: + for cmdfile_arg in read_from_command_file(cmdfile_path(arg)): + yield cmdfile_arg + + +def get_args(args): + return list(iter_args(args)) diff --git a/build/scripts/process_whole_archive_option.py b/build/scripts/process_whole_archive_option.py index 84d29869e9..3e4458a8c7 100644 --- a/build/scripts/process_whole_archive_option.py +++ b/build/scripts/process_whole_archive_option.py @@ -3,11 +3,11 @@ import os import process_command_files as pcf -class ProcessWholeArchiveOption(): +class ProcessWholeArchiveOption: def __init__(self, arch, peers=None, libs=None): self.arch = arch.upper() - self.peers = { x : 0 for x in peers } if peers else None - self.libs = { x : 0 for x in libs } if libs else None + self.peers = {x: 0 for x in peers} if peers else None + self.libs = {x: 0 for x in libs} if libs else None self.start_wa_marker = '--start-wa' self.end_wa_marker = '--end-wa' diff --git a/build/scripts/resolve_java_srcs.py b/build/scripts/resolve_java_srcs.py index a2e6c20012..8d3d6285e7 100644 --- a/build/scripts/resolve_java_srcs.py +++ b/build/scripts/resolve_java_srcs.py @@ -9,29 +9,36 @@ def list_all_files(directory, prefix='/', hidden_files=False): if os.path.exists(directory): for i in os.listdir(directory): abs_path = os.path.join(directory, i) - result += list_all_files(os.path.join(directory, abs_path), prefix + i + '/', hidden_files) \ - if os.path.isdir(abs_path) else ([prefix + i] if (hidden_files or not i.startswith('.')) else []) + result += ( + list_all_files(os.path.join(directory, abs_path), prefix + i + '/', hidden_files) + if os.path.isdir(abs_path) + else ([prefix + i] if (hidden_files or not i.startswith('.')) else []) + ) return result def pattern_to_regexp(p): - return '^' + \ - ('/' if not p.startswith('**') else '') + \ - re.escape(p).replace( - r'\*\*\/', '[_DIR_]' - ).replace( - r'\*', '[_FILE_]' - ).replace( - '[_DIR_]', '(.*/)?' - ).replace( - '[_FILE_]', '([^/]*)' - ) + '$' - - -def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, resolve_kotlin=False, resolve_groovy=False): + return ( + '^' + + ('/' if not p.startswith('**') else '') + + re.escape(p) + .replace(r'\*\*\/', '[_DIR_]') + .replace(r'\*', '[_FILE_]') + .replace('[_DIR_]', '(.*/)?') + .replace('[_FILE_]', '([^/]*)') + + '$' + ) + + +def resolve_java_srcs( + srcdir, include_patterns, exclude_patterns, all_resources, resolve_kotlin=False, resolve_groovy=False +): result = {'java': [], 'not_java': [], 'kotlin': [], 'groovy': []} include_patterns_normal, include_patterns_hidden, exclude_patterns_normal, exclude_patterns_hidden = [], [], [], [] - for vis, hid, patterns in ((include_patterns_normal, include_patterns_hidden, include_patterns), (exclude_patterns_normal, exclude_patterns_hidden, exclude_patterns),): + for vis, hid, patterns in ( + (include_patterns_normal, include_patterns_hidden, include_patterns), + (exclude_patterns_normal, exclude_patterns_hidden, exclude_patterns), + ): for pattern in patterns: if (pattern if pattern.find('/') == -1 else pattern.rsplit('/', 1)[1]).startswith('.'): hid.append(pattern) @@ -42,7 +49,7 @@ def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, re_patterns = [re.compile(i, re.IGNORECASE) for i in re_patterns] else: re_patterns = [re.compile(i) for i in re_patterns] - vis[:], hid[:] = re_patterns[:len(vis)], re_patterns[len(vis):] + vis[:], hid[:] = re_patterns[: len(vis)], re_patterns[len(vis) :] for inc_patterns, exc_patterns, with_hidden_files in ( (include_patterns_normal, exclude_patterns_normal, False), @@ -77,8 +84,22 @@ def resolve_java_srcs(srcdir, include_patterns, exclude_patterns, all_resources, return sorted(result['java']), sorted(result['not_java']), sorted(result['kotlin']), sorted(result['groovy']) -def do_it(directory, sources_file, resources_file, kotlin_sources_file, groovy_sources_file, include_patterns, exclude_patterns, resolve_kotlin, resolve_groovy, append, all_resources): - j, r, k, g = resolve_java_srcs(directory, include_patterns, exclude_patterns, all_resources, resolve_kotlin, resolve_groovy) +def do_it( + directory, + sources_file, + resources_file, + kotlin_sources_file, + groovy_sources_file, + include_patterns, + exclude_patterns, + resolve_kotlin, + resolve_groovy, + append, + all_resources, +): + j, r, k, g = resolve_java_srcs( + directory, include_patterns, exclude_patterns, all_resources, resolve_kotlin, resolve_groovy + ) mode = 'a' if append else 'w' open(sources_file, mode).writelines(i + '\n' for i in j) open(resources_file, mode).writelines(i + '\n' for i in r) @@ -96,8 +117,8 @@ if __name__ == '__main__': parser.add_argument('-k', '--kotlin-sources-file', default=None) parser.add_argument('-g', '--groovy-sources-file', default=None) parser.add_argument('--append', action='store_true', default=False) - parser.add_argument('--all-resources', action='store_true', default=False) - parser.add_argument('--resolve-kotlin', action='store_true', default=False) + parser.add_argument('--all-resources', action='store_true', default=False) + parser.add_argument('--resolve-kotlin', action='store_true', default=False) parser.add_argument('--resolve-groovy', action='store_true', default=False) parser.add_argument('--include-patterns', nargs='*', default=[]) parser.add_argument('--exclude-patterns', nargs='*', default=[]) diff --git a/build/scripts/retry.py b/build/scripts/retry.py index d14170bfec..e5ae8edae4 100644 --- a/build/scripts/retry.py +++ b/build/scripts/retry.py @@ -8,7 +8,7 @@ def retry_func(f, exceptions=Exception, tries=-1, delay=1, max_delay=None, backo while _tries: try: return f() - except exceptions as e: + except exceptions: _tries -= 1 if not _tries: raise @@ -25,5 +25,7 @@ def retry(**retry_kwargs): @functools.wraps(func) def wrapper(*args, **kwargs): return retry_func(lambda: func(*args, **kwargs), **retry_kwargs) + return wrapper + return decorator diff --git a/build/scripts/run_ios_simulator.py b/build/scripts/run_ios_simulator.py index 052c855b77..b69ef81ccb 100644 --- a/build/scripts/run_ios_simulator.py +++ b/build/scripts/run_ios_simulator.py @@ -29,13 +29,17 @@ def action_create(simctl, profiles, device_dir, name, args): all_devices = list(get_all_devices(simctl, profiles, device_dir)) if filter(lambda x: x["name"] == name, all_devices): raise Exception("Device named {} already exists".format(name)) - subprocess.check_call([simctl, "--profiles", profiles, "--set", device_dir, "create", name, args.device_type, args.device_runtime]) + subprocess.check_call( + [simctl, "--profiles", profiles, "--set", device_dir, "create", name, args.device_type, args.device_runtime] + ) created = filter(lambda x: x["name"] == name, get_all_devices(simctl, profiles, device_dir)) if not created: raise Exception("Creation error: temp device named {} not found".format(name)) created = created[0] if created["availability"] != "(available)": - raise Exception("Creation error: temp device {} status is {} ((available) expected)".format(name, created["availability"])) + raise Exception( + "Creation error: temp device {} status is {} ((available) expected)".format(name, created["availability"]) + ) def action_spawn(simctl, profiles, device_dir, name, args): @@ -60,7 +64,9 @@ def action_kill(simctl, profiles, device_dir, name): def get_all_devices(simctl, profiles, device_dir): - p = subprocess.Popen([simctl, "--profiles", profiles, "--set", device_dir, "list", "--json", "devices"], stdout=subprocess.PIPE) + p = subprocess.Popen( + [simctl, "--profiles", profiles, "--set", device_dir, "list", "--json", "devices"], stdout=subprocess.PIPE + ) out, _ = p.communicate() rc = p.wait() if rc: diff --git a/build/scripts/run_msvc_wine.py b/build/scripts/run_msvc_wine.py index eb5ed3eba5..38ffa1ffb0 100644 --- a/build/scripts/run_msvc_wine.py +++ b/build/scripts/run_msvc_wine.py @@ -41,7 +41,7 @@ def run_subprocess_with_timeout(timeout, args): stdout, stderr = p.communicate(timeout=timeout) return p, stdout, stderr except subprocess.TimeoutExpired as e: - print >>sys.stderr, 'timeout running {0}, error {1}, delay {2} seconds'.format(args, str(e), delay) + print >> sys.stderr, 'timeout running {0}, error {1}, delay {2} seconds'.format(args, str(e), delay) if p is not None: try: p.kill() @@ -77,7 +77,9 @@ def subst_path(l): def call_wine_cmd_once(wine, cmd, env, mode): - p = run_subprocess(wine + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, close_fds=True, shell=False) + p = run_subprocess( + wine + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, close_fds=True, shell=False + ) output = find_cmd_out(cmd) error = None @@ -118,7 +120,7 @@ def call_wine_cmd_once(wine, cmd, env, mode): 'Could not load wine-gecko', 'wine: configuration in', 'wine: created the configuration directory', - 'libpng warning:' + 'libpng warning:', ] suffixes = [ @@ -162,7 +164,7 @@ def call_wine_cmd_once(wine, cmd, env, mode): stdout_and_stderr = '\n'.join(filter_lines()).strip() if stdout_and_stderr: - print >>sys.stderr, stdout_and_stderr + print >> sys.stderr, stdout_and_stderr return return_code @@ -173,7 +175,7 @@ def prepare_vc(fr, to): to_p = os.path.join(to, p) if not os.path.exists(to_p): - print >>sys.stderr, 'install %s -> %s' % (fr_p, to_p) + print >> sys.stderr, 'install %s -> %s' % (fr_p, to_p) os.link(fr_p, to_p) @@ -194,7 +196,7 @@ def run_slave(): try: return call_wine_cmd_once([wine], args['cmd'], args['env'], args['mode']) except Exception as e: - print >>sys.stderr, '%s, will retry in %s' % (str(e), tout) + print >> sys.stderr, '%s, will retry in %s' % (str(e), tout) time.sleep(tout) tout = min(2 * tout, 4) @@ -279,14 +281,14 @@ def colorize_strings(l): if p >= 0: yield l[:p] - l = l[p + 1:] + l = l[p + 1 :] p = l.find("'") if p >= 0: yield CYA + "'" + subst_path(l[:p]) + "'" + RST - for x in colorize_strings(l[p + 1:]): + for x in colorize_strings(l[p + 1 :]): yield x else: yield "'" + l @@ -311,7 +313,7 @@ def colorize_line(l): if l and l.startswith('('): p = l.find(')') parts.append(':' + MGT + l[1:p] + RST) - l = l[p + 1:] + l = l[p + 1 :] if l: if l.startswith(' : '): @@ -360,8 +362,9 @@ def trim_path(path, winepath): if not check_path[1:].startswith((path[1:4], path[1:4].upper())): raise Exception( 'Cannot trim path {}; 1st winepath exit code: {}, stdout:\n{}\n stderr:\n{}\n 2nd winepath exit code: {}, stdout:\n{}\n stderr:\n{}'.format( - path, p1.returncode, p1_stdout, p1_stderr, p2.returncode, p2_stdout, p2_stderr - )) + path, p1.returncode, p1_stdout, p1_stderr, p2.returncode, p2_stdout, p2_stderr + ) + ) return short_path @@ -421,7 +424,7 @@ def process_free_args(args, wine, bld_root, mode): def process_arg(arg): with_wa_prefix = arg.startswith(whole_archive_prefix) prefix = whole_archive_prefix if with_wa_prefix else '' - without_prefix_arg = arg[len(prefix):] + without_prefix_arg = arg[len(prefix) :] return prefix + fix_path(process_link(downsize_path(without_prefix_arg, short_names))) result = [] @@ -493,12 +496,7 @@ def run_main(): if sleep: time.sleep(sleep) - args = { - 'cmd': cmd, - 'env': env, - 'mode': mode, - 'tout': tout - } + args = {'cmd': cmd, 'env': env, 'mode': mode, 'tout': tout} slave_cmd = [sys.executable, sys.argv[0], wine, 'slave', json.dumps(args)] p = run_subprocess(slave_cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=False) @@ -510,7 +508,7 @@ def run_main(): return if mode == 'cxx': log = colorize(log) - print >>sys.stderr, log + print >> sys.stderr, log tout = 200 @@ -519,26 +517,26 @@ def run_main(): if rc in (-signal.SIGALRM, signal.SIGALRM): print_err_log(out) - print >>sys.stderr, '##append_tag##time out' + print >> sys.stderr, '##append_tag##time out' elif out and ' stack overflow ' in out: - print >>sys.stderr, '##append_tag##stack overflow' + print >> sys.stderr, '##append_tag##stack overflow' elif out and 'recvmsg: Connection reset by peer' in out: - print >>sys.stderr, '##append_tag##wine gone' + print >> sys.stderr, '##append_tag##wine gone' elif out and 'D8037' in out: - print >>sys.stderr, '##append_tag##repair wine' + print >> sys.stderr, '##append_tag##repair wine' try: os.unlink(os.path.join(os.environ['WINEPREFIX'], '.update-timestamp')) except Exception as e: - print >>sys.stderr, e + print >> sys.stderr, e else: print_err_log(out) # non-zero return code - bad, return it immediately if rc: - print >>sys.stderr, '##win_cmd##' + ' '.join(cmd) - print >>sys.stderr, '##args##' + ' '.join(free_args) + print >> sys.stderr, '##win_cmd##' + ' '.join(cmd) + print >> sys.stderr, '##args##' + ' '.join(free_args) return rc # check for output existence(if we expect it!) and real length @@ -547,7 +545,7 @@ def run_main(): return 0 else: # retry! - print >>sys.stderr, '##append_tag##no output' + print >> sys.stderr, '##append_tag##no output' else: return 0 @@ -577,7 +575,7 @@ def main(): except KeyboardInterrupt: sys.exit(4) except Exception as e: - print >>sys.stderr, str(e) + print >> sys.stderr, str(e) sys.exit(3) diff --git a/build/scripts/run_sonar.py b/build/scripts/run_sonar.py index 761cc34b78..e3bf086084 100644 --- a/build/scripts/run_sonar.py +++ b/build/scripts/run_sonar.py @@ -72,7 +72,10 @@ def main(opts, props_args): extract_zip_file(opts.sources_jar_path, sources_dir) if opts.gcov_report_path: collect_cpp_sources(opts.gcov_report_path, opts.source_root, sources_dir) - base_props_args += ['-Dsonar.projectBaseDir=' + sources_dir, '-Dsonar.cxx.coverage.reportPath=' + opts.gcov_report_path] + base_props_args += [ + '-Dsonar.projectBaseDir=' + sources_dir, + '-Dsonar.cxx.coverage.reportPath=' + opts.gcov_report_path, + ] if opts.classes_jar_paths: classes_dir = os.path.abspath('cls') @@ -92,18 +95,25 @@ def main(opts, props_args): if extracted is not None: shutil.copyfileobj(extracted, dest) - base_props_args += [ - '-Dsonar.core.codeCoveragePlugin=jacoco', - '-Dsonar.jacoco.reportPath=' + jacoco_report_path + base_props_args += ['-Dsonar.core.codeCoveragePlugin=jacoco', '-Dsonar.jacoco.reportPath=' + jacoco_report_path] + java_args = ['-{}'.format(i) for i in opts.java_args] + [ + '-Djava.net.preferIPv6Addresses=true', + '-Djava.net.preferIPv4Addresses=false', + ] + + sonar_cmd = ( + [ + opts.java_binary_path, + ] + + java_args + + [ + '-classpath', + opts.sonar_scanner_jar_path, ] - java_args = ['-{}'.format(i) for i in opts.java_args] + ['-Djava.net.preferIPv6Addresses=true', '-Djava.net.preferIPv4Addresses=false'] - - sonar_cmd = [ - opts.java_binary_path, - ] + java_args + [ - '-classpath', - opts.sonar_scanner_jar_path, - ] + base_props_args + props_args + [opts.sonar_scanner_main_class, '-X'] + + base_props_args + + props_args + + [opts.sonar_scanner_main_class, '-X'] + ) p = sp.Popen(sonar_cmd, stdout=sp.PIPE, stderr=sp.STDOUT) out, _ = p.communicate() diff --git a/build/scripts/run_tool.py b/build/scripts/run_tool.py index 00e3ff6f1e..d9e5706300 100755 --- a/build/scripts/run_tool.py +++ b/build/scripts/run_tool.py @@ -6,4 +6,4 @@ import os if __name__ == '__main__': env = os.environ.copy() env['ASAN_OPTIONS'] = 'detect_leaks=0' - subprocess.check_call(sys.argv[sys.argv.index('--') + 1:], env=env) + subprocess.check_call(sys.argv[sys.argv.index('--') + 1 :], env=env) diff --git a/build/scripts/setup_java_tmpdir.py b/build/scripts/setup_java_tmpdir.py index e478d4aa96..c0d4ae7cfb 100644 --- a/build/scripts/setup_java_tmpdir.py +++ b/build/scripts/setup_java_tmpdir.py @@ -25,7 +25,7 @@ def fix_tmpdir(cmd): tmpdir = os.environ.get('TMPDIR') or os.environ.get('TEMPDIR') if not tmpdir: return cmd - return cmd[:java_id + 1] + ['{}{}'.format(option_name, tmpdir)] + cmd[java_id + 1:] + return cmd[: java_id + 1] + ['{}{}'.format(option_name, tmpdir)] + cmd[java_id + 1 :] def just_do_it(): diff --git a/build/scripts/stdout2stderr.py b/build/scripts/stdout2stderr.py index a67eb0afb3..ee26c73282 100644 --- a/build/scripts/stdout2stderr.py +++ b/build/scripts/stdout2stderr.py @@ -1,13 +1,13 @@ import subprocess import sys -FILE_PARAM='--file=' +FILE_PARAM = '--file=' if __name__ == '__main__': i = 1 stdout = sys.stderr if len(sys.argv) > i and sys.argv[i].startswith(FILE_PARAM): - file_name = sys.argv[i][len(FILE_PARAM):] + file_name = sys.argv[i][len(FILE_PARAM) :] stdout = open(file_name, "w") i += 1 assert len(sys.argv) > i and not sys.argv[i].startswith(FILE_PARAM) diff --git a/build/scripts/symlink.py b/build/scripts/symlink.py index 9e30a25065..9636bf3929 100755 --- a/build/scripts/symlink.py +++ b/build/scripts/symlink.py @@ -8,7 +8,7 @@ from subprocess import call def symlink(): if len(sys.argv) < 3: - print >>sys.stderr, "Usage: symlink.py <source> <target>" + print >> sys.stderr, "Usage: symlink.py <source> <target>" sys.exit(1) source = sys.argv[1] @@ -25,5 +25,6 @@ def symlink(): else: call(["ln", "-f", "-s", "-n", source, target]) + if __name__ == '__main__': symlink() diff --git a/build/scripts/tar_directory.py b/build/scripts/tar_directory.py index a91889fa22..16a62bd5fa 100644 --- a/build/scripts/tar_directory.py +++ b/build/scripts/tar_directory.py @@ -9,7 +9,9 @@ def is_exe(fpath): def main(args): if len(args) < 2 or len(args) > 3: - raise Exception("Illegal usage: `tar_directory.py archive.tar directory [skip prefix]` or `tar_directory.py archive.tar output_directory --extract`") + raise Exception( + "Illegal usage: `tar_directory.py archive.tar directory [skip prefix]` or `tar_directory.py archive.tar output_directory --extract`" + ) tar, directory, prefix, extract = args[0], args[1], None, False if len(args) == 3: if args[2] == '--extract': diff --git a/build/scripts/tar_sources.py b/build/scripts/tar_sources.py index 33555e3f20..71a665843f 100644 --- a/build/scripts/tar_sources.py +++ b/build/scripts/tar_sources.py @@ -37,7 +37,9 @@ def main(): with open(fname, 'rb') as fin: tarinfo = out.gettarinfo(fname, arcname) tarinfo.mode = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH if tarinfo.mode | stat.S_IXUSR else 0 - tarinfo.mode = tarinfo.mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH + tarinfo.mode = ( + tarinfo.mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH + ) tarinfo.mtime = 0 tarinfo.uid = 0 tarinfo.gid = 0 diff --git a/build/scripts/tared_protoc.py b/build/scripts/tared_protoc.py index 48ced0c479..ce3f5cb5ba 100644 --- a/build/scripts/tared_protoc.py +++ b/build/scripts/tared_protoc.py @@ -1,7 +1,6 @@ import os import optparse import tarfile -import contextlib import stat import subprocess as sp @@ -32,7 +31,9 @@ def main(): with open(fname, 'rb') as fin: tarinfo = tf.gettarinfo(fname, os.path.relpath(fname, opts.protoc_out_dir)) tarinfo.mode = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH if tarinfo.mode | stat.S_IXUSR else 0 - tarinfo.mode = tarinfo.mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH + tarinfo.mode = ( + tarinfo.mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH + ) tarinfo.mtime = 0 tarinfo.uid = 0 tarinfo.gid = 0 diff --git a/build/scripts/touch.py b/build/scripts/touch.py index e01ba7f86b..16be77630e 100755 --- a/build/scripts/touch.py +++ b/build/scripts/touch.py @@ -10,8 +10,7 @@ def main(argv): parser = optparse.OptionParser(add_help_option=False) parser.disable_interspersed_args() - parser.add_option('-?', '--help', dest='help', - action='store_true', default=None, help='print help') + parser.add_option('-?', '--help', dest='help', action='store_true', default=None, help='print help') parser.add_option('-t', dest='t', action='store', default=None) opts, argv_rest = parser.parse_args(argv) diff --git a/build/scripts/unpacking_jtest_runner.py b/build/scripts/unpacking_jtest_runner.py index 9730dcd711..dbcbd4a7d7 100644 --- a/build/scripts/unpacking_jtest_runner.py +++ b/build/scripts/unpacking_jtest_runner.py @@ -32,7 +32,7 @@ def fix_cmd(cmd): p = subprocess.Popen([java, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() out, err = out.strip(), err.strip() - if ((out or '').strip().startswith('java version "1.8') or (err or '').strip().startswith('java version "1.8')): + if (out or '').strip().startswith('java version "1.8') or (err or '').strip().startswith('java version "1.8'): res = [] i = 0 while i < len(cmd): @@ -71,9 +71,7 @@ def extract_jars(dest, archive): def make_bfg_from_cp(class_path, out): - class_path = ' '.join( - map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path) - ) + class_path = ' '.join(map(lambda path: ('file:/' + path.lstrip('/')) if os.path.isabs(path) else path, class_path)) with zipfile.ZipFile(out, 'w') as zf: lines = [] while class_path: @@ -127,7 +125,7 @@ def main(): mf = os.pathsep.join([dest] + class_path) else: raise Exception("Unexpected classpath option type: " + opts.classpath_option_type) - args = fix_cmd(args[:cp_idx + 1]) + [mf] + args[cp_idx + 2:] + args = fix_cmd(args[: cp_idx + 1]) + [mf] + args[cp_idx + 2 :] else: args[cp_idx + 1] = args[cp_idx + 1].replace(opts.tests_jar_path, dest) args = fix_cmd(args[:cp_idx]) + args[cp_idx:] diff --git a/build/scripts/vcs_info.py b/build/scripts/vcs_info.py index 88ad700579..fd1f6b3c50 100644 --- a/build/scripts/vcs_info.py +++ b/build/scripts/vcs_info.py @@ -45,8 +45,11 @@ class _Formatting(object): def escaped_define(strkey, val): name = "#define " + strkey + " " if _Formatting.is_str(val): - define = "\"" + _Formatting.escape_line_feed( - _Formatting.escape_trigraphs(_Formatting.escape_special_symbols(val))) + "\"" + define = ( + "\"" + + _Formatting.escape_line_feed(_Formatting.escape_trigraphs(_Formatting.escape_special_symbols(val))) + + "\"" + ) else: define = str(val) return name + define @@ -60,7 +63,8 @@ class _Formatting(object): def get_default_json(): - return json.loads('''{ + return json.loads( + '''{ "ARCADIA_SOURCE_HG_HASH": "0000000000000000000000000000000000000000", "ARCADIA_SOURCE_LAST_AUTHOR": "<UNKNOWN>", "ARCADIA_SOURCE_LAST_CHANGE": -1, @@ -78,7 +82,8 @@ def get_default_json(): "VCS": "arc", "ARCADIA_PATCH_NUMBER": 0, "ARCADIA_TAG": "" -}''') +}''' + ) def get_json(file_name): @@ -99,10 +104,10 @@ def get_json(file_name): def print_c(json_file, output_file, argv): - """ params: - json file - output file - $(SOURCE_ROOT)/build/scripts/c_templates/svn_interface.c""" + """params: + json file + output file + $(SOURCE_ROOT)/build/scripts/c_templates/svn_interface.c""" interface = argv[0] with open(interface) as c: @@ -188,10 +193,9 @@ def merge_java_mf(json_file, out_manifest, input): def print_java_mf(info): - wrapper = textwrap.TextWrapper(subsequent_indent=' ', - break_long_words=True, - replace_whitespace=False, - drop_whitespace=False) + wrapper = textwrap.TextWrapper( + subsequent_indent=' ', break_long_words=True, replace_whitespace=False, drop_whitespace=False + ) names = set() def wrap(key, val): @@ -222,15 +226,17 @@ def print_java_mf(info): if 'BUILD_TIMESTAMP' in info: lines += wrap('Build-Timestamp: ', str(info['BUILD_TIMESTAMP'])) if 'CUSTOM_VERSION' in info: - lines += wrap('Custom-Version-String: ', base64.b64encode(info['CUSTOM_VERSION'].encode('utf-8')).decode('utf-8')) + lines += wrap( + 'Custom-Version-String: ', base64.b64encode(info['CUSTOM_VERSION'].encode('utf-8')).decode('utf-8') + ) return lines, names def print_java(json_file, output_file, argv): - """ params: - json file - output file - file""" + """params: + json file + output file + file""" input = argv[0] if argv else os.curdir merge_java_mf(json_file, output_file, input) @@ -243,15 +249,21 @@ def print_go(json_file, output_file, arc_project_prefix): return lines with open(output_file, 'w') as f: - f.write('\n'.join([ - '// Code generated by vcs_info.py; DO NOT EDIT.', - '', - 'package main', - 'import "{}library/go/core/buildinfo"'.format(arc_project_prefix), - 'func init() {', - ' buildinfo.InitBuildInfo(map[string]string {'] + gen_map(json_file) + ['})', - '}'] - ) + '\n') + f.write( + '\n'.join( + [ + '// Code generated by vcs_info.py; DO NOT EDIT.', + '', + 'package main', + 'import "{}library/go/core/buildinfo"'.format(arc_project_prefix), + 'func init() {', + ' buildinfo.InitBuildInfo(map[string]string {', + ] + + gen_map(json_file) + + ['})', '}'] + ) + + '\n' + ) def print_json(json_file, output_file): @@ -291,12 +303,12 @@ def print_json(json_file, output_file): ext_json[MANDATOTRY_FIELDS_MAP[k]] = json_file[k] if SVN_REVISION in json_file: - for k in SVN_FIELDS_MAP: - ext_json[SVN_FIELDS_MAP[k]] = json_file[k] + for k in SVN_FIELDS_MAP: + ext_json[SVN_FIELDS_MAP[k]] = json_file[k] for k in OPTIONAL_FIELDS_MAP: if k in json_file and json_file[k]: - ext_json[OPTIONAL_FIELDS_MAP[k]] = json_file[k] + ext_json[OPTIONAL_FIELDS_MAP[k]] = json_file[k] with open(output_file, 'w') as f: json.dump(ext_json, f, sort_keys=True, indent=4) diff --git a/build/scripts/with_kapt_args.py b/build/scripts/with_kapt_args.py index eb7438a4c9..5055d2390e 100644 --- a/build/scripts/with_kapt_args.py +++ b/build/scripts/with_kapt_args.py @@ -10,12 +10,17 @@ def parse_args(args): parser = argparse.ArgumentParser() parser.add_argument('--ap-classpath', nargs='*', type=str, dest='classpath') cmd_start = args.index('--') - return parser.parse_args(args[:cmd_start]), args[cmd_start+1:] + return parser.parse_args(args[:cmd_start]), args[cmd_start + 1 :] def get_ap_classpath(directory): jar_re = re.compile(r'.*(?<!-sources)\.jar') - found_jars = [os.path.join(address, name) for address, dirs, files in os.walk(directory) for name in files if jar_re.match(name)] + found_jars = [ + os.path.join(address, name) + for address, dirs, files in os.walk(directory) + for name in files + if jar_re.match(name) + ] if len(found_jars) != 1: raise Exception("found %d JAR files in directory %s" % (len(found_jars), directory)) arg = 'plugin:org.jetbrains.kotlin.kapt3:apclasspath=' + found_jars[0] @@ -26,6 +31,7 @@ def create_extra_args(args): cp_opts = [arg for d in args.classpath for arg in get_ap_classpath(d)] return cp_opts + if __name__ == '__main__': args, cmd = parse_args(sys.argv[1:]) res = cmd + create_extra_args(args) diff --git a/build/scripts/with_pathsep_resolve.py b/build/scripts/with_pathsep_resolve.py index 37c8c598ae..5970fba0c7 100644 --- a/build/scripts/with_pathsep_resolve.py +++ b/build/scripts/with_pathsep_resolve.py @@ -15,6 +15,7 @@ def fix_args(args): just_replace_it = False yield arg + if __name__ == '__main__': res = list(fix_args(sys.argv[1:])) if platform.system() == 'Windows': diff --git a/build/scripts/writer.py b/build/scripts/writer.py index 21bb3006e5..ce88e35693 100644 --- a/build/scripts/writer.py +++ b/build/scripts/writer.py @@ -24,6 +24,7 @@ def smart_shell_quote(v): return "\"{0}\"".format(v.replace('"', '\\"')) return v + if __name__ == '__main__': args = parse_args() open_type = 'a' if args.append else 'w' diff --git a/build/scripts/xargs.py b/build/scripts/xargs.py index 5d68929ecc..9ac4666c46 100644 --- a/build/scripts/xargs.py +++ b/build/scripts/xargs.py @@ -5,7 +5,7 @@ import subprocess if __name__ == '__main__': pos = sys.argv.index('--') fname = sys.argv[pos + 1] - cmd = sys.argv[pos + 2:] + cmd = sys.argv[pos + 2 :] with open(fname, 'r') as f: args = [x.strip() for x in f] diff --git a/build/scripts/ya.make b/build/scripts/ya.make index 8cdf677024..1433d4e738 100644 --- a/build/scripts/ya.make +++ b/build/scripts/ya.make @@ -4,102 +4,140 @@ PY23_TEST() IF (PY2) TEST_SRCS( + build_catboost.py + collect_java_srcs.py + compile_cuda.py + coverage-info.py + copy_clang_profile_rt.py + create_jcoverage_report.py + custom_link_green_mysql.py + f2c.py + fetch_from.py + fetch_from_archive.py + fetch_from_mds.py + fetch_from_npm.py + fetch_from_sandbox.py + fetch_resource.py + gen_java_codenav_entry.py + gen_py3_reg.py + go_tool.py + ios_wrapper.py + link_dyn_lib.py + mangle_typeinfo_names.py + pack_ios.py + pack_jcoverage_resources.py + python_yndexer.py + run_ios_simulator.py + run_msvc_wine.py + run_sonar.py + symlink.py + touch.py + unpacking_jtest_runner.py + vcs_info.py + with_crash_on_timeout.py + yndexer.py + ) +ELSEIF (PY3) + STYLE_PYTHON() + TEST_SRCS( + append_file.py + autotar_gendirs.py build_dll_and_java.py + build_info_gen.py build_java_codenav_index.py build_java_with_error_prone.py build_java_with_error_prone2.py cat.py cgo1_wrapper.py check_config_h.py - collect_java_srcs.py - compile_cuda.py + clang_tidy.py + clang_tidy_arch.py + clang_wrapper.py compile_java.py compile_jsrc.py compile_pysrc.py configure_file.py + container.py copy_docs_files.py copy_docs_files_to_dir.py copy_files_to_dir.py copy_to_dir.py - coverage-info.py cpp_flatc_wrapper.py - create_jcoverage_report.py + decimal_md5.py + error.py extract_asrc.py extract_docs.py extract_jacoco_report.py - f2c.py fail_module_cmd.py - fetch_from.py - fetch_from_external.py - fetch_from_mds.py - fetch_from_npm.py - fetch_from_sandbox.py - fetch_resource.py filter_zip.py find_and_tar.py + find_time_trace.py + fix_java_command_file_cp.py fix_msvc_output.py fs_tools.py gen_aar_gradle_script.py - gen_java_codenav_entry.py gen_java_codenav_protobuf.py - gen_py3_reg.py + gen_join_srcs.py + gen_py_protos.py gen_py_reg.py + gen_swiftc_output_map.py + gen_tasklet_reg.py gen_test_apk_gradle_script.py - gen_ub.py + gen_yql_python_udf.py + generate_mf.py generate_pom.py go_proto_wrapper.py - go_tool.py - ios_wrapper.py java_pack_to_file.py + jni_swig.py + kt_copy.py link_asrc.py - link_dyn_lib.py link_exe.py link_fat_obj.py + link_jsrc.py link_lib.py + list.py llvm_opt_wrapper.py + make_container.py + make_container_layer.py + make_java_classpath_file.py + make_java_srclists.py + make_manifest_from_bf.py merge_coverage_data.py merge_files.py mkdir.py mkdocs_builder_wrapper.py mkver.py - pack_ios.py - pack_jcoverage_resources.py + move.py postprocess_go_fbs.py preprocess.py + process_command_files.py + process_whole_archive_option.py py_compile.py - run_ios_simulator.py + resolve_java_srcs.py + retry.py + rodata2asm.py + rodata2cpp.py run_javac.py run_junit.py run_llvm_dsymutil.py - run_msvc_wine.py run_tool.py + setup_java_tmpdir.py sky.py + stderr2stdout.py stdout2stderr.py - symlink.py tar_directory.py tar_sources.py tared_protoc.py - touch.py - unpacking_jtest_runner.py - vcs_info.py with_coverage.py - with_crash_on_timeout.py + with_kapt_args.py with_pathsep_resolve.py wrap_groovyc.py + wrapcc.py wrapper.py - writer.py write_file_size.py + writer.py xargs.py yield_line.py - yndexer.py - ) -ELSEIF(PY3) - STYLE_PYTHON() - - TEST_SRCS( - build_info_gen.py - copy_clang_profile_rt.py - gen_yql_python_udf.py ) ENDIF() diff --git a/build/scripts/yield_line.py b/build/scripts/yield_line.py index c7087e521e..f314a6c5fa 100644 --- a/build/scripts/yield_line.py +++ b/build/scripts/yield_line.py @@ -5,4 +5,4 @@ if __name__ == '__main__': pos = sys.argv.index('--') with open(sys.argv[pos + 1], 'a') as f: - f.write(' '.join(sys.argv[pos + 2:]) + '\n') + f.write(' '.join(sys.argv[pos + 2 :]) + '\n') diff --git a/build/scripts/yndexer.py b/build/scripts/yndexer.py index a38e28ba99..c8127de711 100644 --- a/build/scripts/yndexer.py +++ b/build/scripts/yndexer.py @@ -58,18 +58,29 @@ if __name__ == '__main__': out = subprocess.check_output([clang, '-print-search-dirs']) resource_dir = rx_resource_dir.search(out).group(1) - yndexer_args = [ - yndexer, input_file, - '-pb2', - '-i', 'arc::{}'.format(arc_root), - '-i', 'build::{}'.format(build_root), - '-i', '.IGNORE::/', - '-o', os.path.dirname(output_file), - '-n', os.path.basename(output_file).rsplit('.ydx.pb2', 1)[0], - '--' - ] + tail_args + [ - '-resource-dir', resource_dir, - ] + yndexer_args = ( + [ + yndexer, + input_file, + '-pb2', + '-i', + 'arc::{}'.format(arc_root), + '-i', + 'build::{}'.format(build_root), + '-i', + '.IGNORE::/', + '-o', + os.path.dirname(output_file), + '-n', + os.path.basename(output_file).rsplit('.ydx.pb2', 1)[0], + '--', + ] + + tail_args + + [ + '-resource-dir', + resource_dir, + ] + ) process = Process(yndexer_args) result = process.wait(timeout=timeout) |