aboutsummaryrefslogtreecommitdiffstats
path: root/build/scripts
diff options
context:
space:
mode:
authororivej <orivej@yandex-team.ru>2022-02-10 16:44:49 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:49 +0300
commit718c552901d703c502ccbefdfc3c9028d608b947 (patch)
tree46534a98bbefcd7b1f3faa5b52c138ab27db75b7 /build/scripts
parente9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (diff)
downloadydb-718c552901d703c502ccbefdfc3c9028d608b947.tar.gz
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'build/scripts')
-rw-r--r--build/scripts/compile_cuda.py46
-rw-r--r--build/scripts/f2c.py64
-rwxr-xr-xbuild/scripts/fetch_from.py90
-rwxr-xr-xbuild/scripts/fetch_from_sandbox.py8
-rw-r--r--build/scripts/gen_py3_reg.py6
-rw-r--r--build/scripts/gen_py_reg.py22
-rw-r--r--build/scripts/link_dyn_lib.py36
-rwxr-xr-xbuild/scripts/py_compile.py14
-rw-r--r--build/scripts/resolve_java_srcs.py2
-rw-r--r--build/scripts/yndexer.py36
10 files changed, 162 insertions, 162 deletions
diff --git a/build/scripts/compile_cuda.py b/build/scripts/compile_cuda.py
index c0bec50b2a..1f87f28478 100644
--- a/build/scripts/compile_cuda.py
+++ b/build/scripts/compile_cuda.py
@@ -3,7 +3,7 @@ import subprocess
import os
import collections
import re
-import tempfile
+import tempfile
def is_clang(command):
@@ -22,8 +22,8 @@ def main():
skip_nocxxinc = False
spl = sys.argv.index('--cflags')
- mtime0 = sys.argv[1]
- command = sys.argv[2: spl]
+ mtime0 = sys.argv[1]
+ command = sys.argv[2: spl]
cflags = sys.argv[spl + 1:]
dump_args = False
@@ -37,9 +37,9 @@ def main():
sys.exit(1)
if is_clang(command):
- # nvcc concatenates the sources for clang, and clang reports unused
- # things from .h files as if they they were defined in a .cpp file.
- cflags += ['-Wno-unused-function', '-Wno-unused-parameter']
+ # nvcc concatenates the sources for clang, and clang reports unused
+ # things from .h files as if they they were defined in a .cpp file.
+ cflags += ['-Wno-unused-function', '-Wno-unused-parameter']
if not is_clang(command) and '-fopenmp=libomp' in cflags:
cflags.append('-fopenmp')
@@ -126,26 +126,26 @@ def main():
if compiler_args:
command += ['--compiler-options', ','.join(compiler_args)]
- # --keep is necessary to prevent nvcc from embedding nvcc pid in generated
- # symbols. It makes nvcc use the original file name as the prefix in the
- # generated files (otherwise it also prepends tmpxft_{pid}_00000000-5), and
- # cicc derives the module name from its {input}.cpp1.ii file name.
- command += ['--keep', '--keep-dir', tempfile.mkdtemp(prefix='compile_cuda.py.')]
- # nvcc generates symbols like __fatbinwrap_{len}_{basename}_{hash} where
- # {basename} is {input}.cpp1.ii with non-C chars translated to _, {len} is
- # {basename} length, and {hash} is the hash of first exported symbol in
- # {input}.cpp1.ii if there is one, otherwise it is based on its modification
- # time (converted to string in the local timezone) and the current working
- # directory. To stabilize the names of these symbols we need to fix mtime,
- # timezone, and cwd.
- os.environ['LD_PRELOAD'] = mtime0
- os.environ['TZ'] = 'UTC0' # POSIX fixed offset format.
- os.environ['TZDIR'] = '/var/empty' # Against counterfeit /usr/share/zoneinfo/$TZ.
-
+ # --keep is necessary to prevent nvcc from embedding nvcc pid in generated
+ # symbols. It makes nvcc use the original file name as the prefix in the
+ # generated files (otherwise it also prepends tmpxft_{pid}_00000000-5), and
+ # cicc derives the module name from its {input}.cpp1.ii file name.
+ command += ['--keep', '--keep-dir', tempfile.mkdtemp(prefix='compile_cuda.py.')]
+ # nvcc generates symbols like __fatbinwrap_{len}_{basename}_{hash} where
+ # {basename} is {input}.cpp1.ii with non-C chars translated to _, {len} is
+ # {basename} length, and {hash} is the hash of first exported symbol in
+ # {input}.cpp1.ii if there is one, otherwise it is based on its modification
+ # time (converted to string in the local timezone) and the current working
+ # directory. To stabilize the names of these symbols we need to fix mtime,
+ # timezone, and cwd.
+ os.environ['LD_PRELOAD'] = mtime0
+ os.environ['TZ'] = 'UTC0' # POSIX fixed offset format.
+ os.environ['TZDIR'] = '/var/empty' # Against counterfeit /usr/share/zoneinfo/$TZ.
+
if dump_args:
sys.stdout.write('\n'.join(command))
else:
- sys.exit(subprocess.Popen(command, stdout=sys.stderr, stderr=sys.stderr, cwd='/').wait())
+ sys.exit(subprocess.Popen(command, stdout=sys.stderr, stderr=sys.stderr, cwd='/').wait())
if __name__ == '__main__':
diff --git a/build/scripts/f2c.py b/build/scripts/f2c.py
index 7021e1391f..9775895d3b 100644
--- a/build/scripts/f2c.py
+++ b/build/scripts/f2c.py
@@ -3,31 +3,31 @@ import subprocess
import argparse
import os
-
-header = '''\
-#ifdef __GNUC__
-#pragma GCC diagnostic push
-#pragma GCC diagnostic ignored "-Wunused-parameter"
-#pragma GCC diagnostic ignored "-Wmissing-braces"
-#pragma GCC diagnostic ignored "-Wuninitialized"
-#pragma GCC diagnostic ignored "-Wreturn-type"
-#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
-#endif
-
-'''
-
-footer = '''
-#ifdef __GNUC__
-#pragma GCC diagnostic pop
-#endif
-'''
-
-
-def mkdir_p(directory):
- if not os.path.exists(directory):
- os.makedirs(directory)
-
-
+
+header = '''\
+#ifdef __GNUC__
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunused-parameter"
+#pragma GCC diagnostic ignored "-Wmissing-braces"
+#pragma GCC diagnostic ignored "-Wuninitialized"
+#pragma GCC diagnostic ignored "-Wreturn-type"
+#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
+#endif
+
+'''
+
+footer = '''
+#ifdef __GNUC__
+#pragma GCC diagnostic pop
+#endif
+'''
+
+
+def mkdir_p(directory):
+ if not os.path.exists(directory):
+ os.makedirs(directory)
+
+
if __name__ == '__main__':
parser = argparse.ArgumentParser()
@@ -36,12 +36,12 @@ if __name__ == '__main__':
parser.add_argument('-o', '--output')
args = parser.parse_args()
- tmpdir = args.output + '.f2c'
- mkdir_p(tmpdir)
+ tmpdir = args.output + '.f2c'
+ mkdir_p(tmpdir)
# should parse includes, really
- p = subprocess.Popen(
- [args.tool, '-w', '-R', '-a', '-I' + os.path.dirname(args.input), '-T' + tmpdir],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
+ p = subprocess.Popen(
+ [args.tool, '-w', '-R', '-a', '-I' + os.path.dirname(args.input), '-T' + tmpdir],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
stdout, stderr = p.communicate(input=open(args.input).read())
ret = p.wait()
@@ -53,6 +53,6 @@ if __name__ == '__main__':
print >>sys.stderr, stderr
with open(args.output, 'w') as f:
- f.write(header)
+ f.write(header)
f.write(stdout)
- f.write(footer)
+ f.write(footer)
diff --git a/build/scripts/fetch_from.py b/build/scripts/fetch_from.py
index db4fea50bf..b1fd2eec1f 100755
--- a/build/scripts/fetch_from.py
+++ b/build/scripts/fetch_from.py
@@ -21,26 +21,26 @@ def make_user_agent():
def add_common_arguments(parser):
- parser.add_argument('--copy-to') # used by jbuild in fetch_resource
- parser.add_argument('--rename-to') # used by test_node in inject_mds_resource_to_graph
+ parser.add_argument('--copy-to') # used by jbuild in fetch_resource
+ parser.add_argument('--rename-to') # used by test_node in inject_mds_resource_to_graph
parser.add_argument('--copy-to-dir')
parser.add_argument('--untar-to')
- parser.add_argument('--rename', action='append', default=[], metavar='FILE', help='rename FILE to the corresponding output')
- parser.add_argument('--executable', action='store_true', help='make outputs executable')
+ parser.add_argument('--rename', action='append', default=[], metavar='FILE', help='rename FILE to the corresponding output')
+ parser.add_argument('--executable', action='store_true', help='make outputs executable')
parser.add_argument('--log-path')
parser.add_argument('-v', '--verbose', action='store_true', default=os.environ.get('YA_VERBOSE_FETCHER'), help='increase stderr verbosity')
parser.add_argument('outputs', nargs='*', default=[])
-def ensure_dir(path):
- if not (path == '' or os.path.isdir(path)):
- os.makedirs(path)
-
-
+def ensure_dir(path):
+ if not (path == '' or os.path.isdir(path)):
+ os.makedirs(path)
+
+
# Reference code: library/python/fs/__init__.py
def hardlink_or_copy(src, dst):
- ensure_dir(os.path.dirname(dst))
-
+ ensure_dir(os.path.dirname(dst))
+
if os.name == 'nt':
shutil.copy(src, dst)
else:
@@ -57,7 +57,7 @@ def hardlink_or_copy(src, dst):
def rename_or_copy_and_remove(src, dst):
- ensure_dir(os.path.dirname(dst))
+ ensure_dir(os.path.dirname(dst))
try:
os.rename(src, dst)
@@ -146,7 +146,7 @@ def report_to_snowden(value):
try:
inner()
except Exception as e:
- logging.warning('report_to_snowden failed: %s', e)
+ logging.warning('report_to_snowden failed: %s', e)
def copy_stream(read, *writers, **kwargs):
@@ -307,9 +307,9 @@ def chmod(filename, mode):
raise
-def process(fetched_file, file_name, args, remove=True):
- assert len(args.rename) <= len(args.outputs), (
- 'too few outputs to rename', args.rename, 'into', args.outputs)
+def process(fetched_file, file_name, args, remove=True):
+ assert len(args.rename) <= len(args.outputs), (
+ 'too few outputs to rename', args.rename, 'into', args.outputs)
# Forbid changes to the loaded resource
chmod(fetched_file, 0o444)
@@ -317,21 +317,21 @@ def process(fetched_file, file_name, args, remove=True):
if not os.path.isfile(fetched_file):
raise ResourceIsDirectoryError('Resource must be a file, not a directory: %s' % fetched_file)
- if args.copy_to:
- hardlink_or_copy(fetched_file, args.copy_to)
- if not args.outputs:
- args.outputs = [args.copy_to]
+ if args.copy_to:
+ hardlink_or_copy(fetched_file, args.copy_to)
+ if not args.outputs:
+ args.outputs = [args.copy_to]
- if args.rename_to:
- args.rename.append(fetched_file)
- if not args.outputs:
- args.outputs = [args.rename_to]
+ if args.rename_to:
+ args.rename.append(fetched_file)
+ if not args.outputs:
+ args.outputs = [args.rename_to]
- if args.copy_to_dir:
- hardlink_or_copy(fetched_file, os.path.join(args.copy_to_dir, file_name))
+ if args.copy_to_dir:
+ hardlink_or_copy(fetched_file, os.path.join(args.copy_to_dir, file_name))
if args.untar_to:
- ensure_dir(args.untar_to)
+ ensure_dir(args.untar_to)
# Extract only requested files
try:
with tarfile.open(fetched_file, mode='r:*') as tar:
@@ -346,14 +346,14 @@ def process(fetched_file, file_name, args, remove=True):
logging.exception(e)
raise ResourceUnpackingError('File {} cannot be untared'.format(fetched_file))
- for src, dst in zip(args.rename, args.outputs):
- if src == 'RESOURCE':
- src = fetched_file
- if os.path.abspath(src) == os.path.abspath(fetched_file):
- logging.info('Copying %s to %s', src, dst)
- hardlink_or_copy(src, dst)
- else:
- logging.info('Renaming %s to %s', src, dst)
+ for src, dst in zip(args.rename, args.outputs):
+ if src == 'RESOURCE':
+ src = fetched_file
+ if os.path.abspath(src) == os.path.abspath(fetched_file):
+ logging.info('Copying %s to %s', src, dst)
+ hardlink_or_copy(src, dst)
+ else:
+ logging.info('Renaming %s to %s', src, dst)
if os.path.exists(dst):
raise ResourceUnpackingError("Target file already exists ({} -> {})".format(src, dst))
if remove:
@@ -361,15 +361,15 @@ def process(fetched_file, file_name, args, remove=True):
else:
hardlink_or_copy(src, dst)
- for path in args.outputs:
- if not os.path.exists(path):
- raise OutputNotExistError('Output does not exist: %s' % os.path.abspath(path))
- if not os.path.isfile(path):
- raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path))
- if args.executable:
+ for path in args.outputs:
+ if not os.path.exists(path):
+ raise OutputNotExistError('Output does not exist: %s' % os.path.abspath(path))
+ if not os.path.isfile(path):
+ raise OutputIsDirectoryError('Output must be a file, not a directory: %s' % os.path.abspath(path))
+ if args.executable:
chmod(path, os.stat(path).st_mode | 0o111)
- if os.path.abspath(path) == os.path.abspath(fetched_file):
- remove = False
+ if os.path.abspath(path) == os.path.abspath(fetched_file):
+ remove = False
- if remove:
- os.remove(fetched_file)
+ if remove:
+ os.remove(fetched_file)
diff --git a/build/scripts/fetch_from_sandbox.py b/build/scripts/fetch_from_sandbox.py
index a99542e174..e962637eab 100755
--- a/build/scripts/fetch_from_sandbox.py
+++ b/build/scripts/fetch_from_sandbox.py
@@ -91,14 +91,14 @@ def _urlopen(url, data=None, headers=None):
return urllib2.urlopen(request, timeout=tout).read()
except urllib2.HTTPError as e:
- logging.warning('failed to fetch URL %s with HTTP code %d: %s', url, e.code, e)
+ logging.warning('failed to fetch URL %s with HTTP code %d: %s', url, e.code, e)
retry_after = int(e.headers.get('Retry-After', str(retry_after)))
if e.code not in TEMPORARY_ERROR_CODES:
raise
except Exception as e:
- logging.warning('failed to fetch URL %s: %s', url, e)
+ logging.warning('failed to fetch URL %s: %s', url, e)
if i + 1 == n:
raise e
@@ -194,10 +194,10 @@ def fetch(resource_id, custom_fetcher):
except UnsupportedProtocolException:
pass
except subprocess.CalledProcessError as e:
- logging.warning('failed to fetch resource %s with subprocess: %s', resource_id, e)
+ logging.warning('failed to fetch resource %s with subprocess: %s', resource_id, e)
time.sleep(i)
except urllib2.HTTPError as e:
- logging.warning('failed to fetch resource %s with HTTP code %d: %s', resource_id, e.code, e)
+ logging.warning('failed to fetch resource %s with HTTP code %d: %s', resource_id, e.code, e)
if e.code not in TEMPORARY_ERROR_CODES:
exc_info = exc_info or sys.exc_info()
time.sleep(i)
diff --git a/build/scripts/gen_py3_reg.py b/build/scripts/gen_py3_reg.py
index 149c094898..5f3d026aee 100644
--- a/build/scripts/gen_py3_reg.py
+++ b/build/scripts/gen_py3_reg.py
@@ -1,9 +1,9 @@
import sys
template = '''
-struct PyObject;
-extern "C" int PyImport_AppendInittab(const char* name, PyObject* (*initfunc)());
-extern "C" PyObject* {1}();
+struct PyObject;
+extern "C" int PyImport_AppendInittab(const char* name, PyObject* (*initfunc)());
+extern "C" PyObject* {1}();
namespace {
struct TRegistrar {
diff --git a/build/scripts/gen_py_reg.py b/build/scripts/gen_py_reg.py
index 1560135ae8..02f45fe10d 100644
--- a/build/scripts/gen_py_reg.py
+++ b/build/scripts/gen_py_reg.py
@@ -1,30 +1,30 @@
-import sys
-
+import sys
+
template = '''
-extern "C" void PyImport_AppendInittab(const char* name, void (*fn)(void));
-extern "C" void {1}();
+extern "C" void PyImport_AppendInittab(const char* name, void (*fn)(void));
+extern "C" void {1}();
namespace {
struct TRegistrar {
inline TRegistrar() {
- PyImport_AppendInittab("{0}", {1});
+ PyImport_AppendInittab("{0}", {1});
}
} REG;
}
'''
-def mangle(name):
- if '.' not in name:
- return name
- return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
-
+def mangle(name):
+ if '.' not in name:
+ return name
+ return ''.join('{}{}'.format(len(s), s) for s in name.split('.'))
+
if __name__ == '__main__':
if len(sys.argv) != 3:
print >>sys.stderr, 'Usage: <path/to/gen_py_reg.py> <python_module_name> <output_file>'
print >>sys.stderr, 'Passed: ' + ' '.join(sys.argv)
sys.exit(1)
-
+
with open(sys.argv[2], 'w') as f:
modname = sys.argv[1]
initname = 'init' + mangle(modname)
diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py
index 23487f5c1e..1585cd60f9 100644
--- a/build/scripts/link_dyn_lib.py
+++ b/build/scripts/link_dyn_lib.py
@@ -19,19 +19,19 @@ def shlex_join(cmd):
def parse_export_file(p):
with open(p, 'r') as f:
- for l in f:
+ for l in f:
l = l.strip()
if l and '#' not in l:
- words = l.split()
- if len(words) == 2 and words[0] == 'linux_version':
- yield {'linux_version': words[1]}
- elif len(words) == 2:
- yield {'lang': words[0], 'sym': words[1]}
- elif len(words) == 1:
- yield {'lang': 'C', 'sym': words[0]}
+ words = l.split()
+ if len(words) == 2 and words[0] == 'linux_version':
+ yield {'linux_version': words[1]}
+ elif len(words) == 2:
+ yield {'lang': words[0], 'sym': words[1]}
+ elif len(words) == 1:
+ yield {'lang': 'C', 'sym': words[0]}
else:
- raise Exception('unsupported exports line: ' + l)
+ raise Exception('unsupported exports line: ' + l)
def to_c(sym):
@@ -124,10 +124,10 @@ def fix_windows_param(ex):
return ['/DEF:{}'.format(def_file.name)]
-musl_libs = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
-
-
-def fix_cmd(arch, musl, c):
+musl_libs = '-lc', '-lcrypt', '-ldl', '-lm', '-lpthread', '-lrt', '-lutil'
+
+
+def fix_cmd(arch, musl, c):
if arch == 'WINDOWS':
prefix = '/DEF:'
f = fix_windows_param
@@ -139,9 +139,9 @@ def fix_cmd(arch, musl, c):
f = lambda x: fix_gnu_param(arch, x)
def do_fix(p):
- if musl and p in musl_libs:
- return []
-
+ if musl and p in musl_libs:
+ return []
+
if p.startswith(prefix) and p.endswith('.exports'):
fname = p[len(prefix):]
@@ -166,7 +166,7 @@ def parse_args():
parser.add_option('--soname')
parser.add_option('--fix-elf')
parser.add_option('--linker-output')
- parser.add_option('--musl', action='store_true')
+ parser.add_option('--musl', action='store_true')
parser.add_option('--whole-archive-peers', action='append')
parser.add_option('--whole-archive-libs', action='append')
return parser.parse_args()
@@ -178,7 +178,7 @@ if __name__ == '__main__':
assert opts.arch
assert opts.target
- cmd = fix_cmd(opts.arch, opts.musl, args)
+ cmd = fix_cmd(opts.arch, opts.musl, args)
cmd = ProcessWholeArchiveOption(opts.arch, opts.whole_archive_peers, opts.whole_archive_libs).construct_cmd(cmd)
if opts.linker_output:
diff --git a/build/scripts/py_compile.py b/build/scripts/py_compile.py
index 936dbe8816..b1f6e0b7db 100755
--- a/build/scripts/py_compile.py
+++ b/build/scripts/py_compile.py
@@ -8,15 +8,15 @@ import sys
def main():
- srcpathx, in_fname, out_fname = sys.argv[1:]
- srcpath = srcpathx[:-1]
-
+ srcpathx, in_fname, out_fname = sys.argv[1:]
+ srcpath = srcpathx[:-1]
+
with open(in_fname, 'r') as in_file:
source = in_file.read()
-
- code = compile(source, srcpath, 'exec', dont_inherit=True)
-
- with open(out_fname, 'wb') as out_file:
+
+ code = compile(source, srcpath, 'exec', dont_inherit=True)
+
+ with open(out_fname, 'wb') as out_file:
marshal.dump(code, out_file)
diff --git a/build/scripts/resolve_java_srcs.py b/build/scripts/resolve_java_srcs.py
index a2e6c20012..411403c7fd 100644
--- a/build/scripts/resolve_java_srcs.py
+++ b/build/scripts/resolve_java_srcs.py
@@ -16,7 +16,7 @@ def list_all_files(directory, prefix='/', hidden_files=False):
def pattern_to_regexp(p):
return '^' + \
- ('/' if not p.startswith('**') else '') + \
+ ('/' if not p.startswith('**') else '') + \
re.escape(p).replace(
r'\*\*\/', '[_DIR_]'
).replace(
diff --git a/build/scripts/yndexer.py b/build/scripts/yndexer.py
index a38e28ba99..3737b0cce1 100644
--- a/build/scripts/yndexer.py
+++ b/build/scripts/yndexer.py
@@ -2,11 +2,11 @@ import sys
import subprocess
import threading
import os
-import re
+import re
-rx_resource_dir = re.compile(r'libraries: =([^:]*)')
-
+rx_resource_dir = re.compile(r'libraries: =([^:]*)')
+
def _try_to_kill(process):
try:
@@ -54,22 +54,22 @@ if __name__ == '__main__':
subprocess.check_call(tail_args)
- clang = tail_args[0]
- out = subprocess.check_output([clang, '-print-search-dirs'])
- resource_dir = rx_resource_dir.search(out).group(1)
-
- yndexer_args = [
- yndexer, input_file,
- '-pb2',
- '-i', 'arc::{}'.format(arc_root),
+ clang = tail_args[0]
+ out = subprocess.check_output([clang, '-print-search-dirs'])
+ resource_dir = rx_resource_dir.search(out).group(1)
+
+ yndexer_args = [
+ yndexer, input_file,
+ '-pb2',
+ '-i', 'arc::{}'.format(arc_root),
'-i', 'build::{}'.format(build_root),
- '-i', '.IGNORE::/',
- '-o', os.path.dirname(output_file),
- '-n', os.path.basename(output_file).rsplit('.ydx.pb2', 1)[0],
- '--'
- ] + tail_args + [
- '-resource-dir', resource_dir,
- ]
+ '-i', '.IGNORE::/',
+ '-o', os.path.dirname(output_file),
+ '-n', os.path.basename(output_file).rsplit('.ydx.pb2', 1)[0],
+ '--'
+ ] + tail_args + [
+ '-resource-dir', resource_dir,
+ ]
process = Process(yndexer_args)
result = process.wait(timeout=timeout)