diff options
author | orivej <orivej@yandex-team.ru> | 2022-02-10 16:45:01 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:01 +0300 |
commit | 2d37894b1b037cf24231090eda8589bbb44fb6fc (patch) | |
tree | be835aa92c6248212e705f25388ebafcf84bc7a1 /build/plugins | |
parent | 718c552901d703c502ccbefdfc3c9028d608b947 (diff) | |
download | ydb-2d37894b1b037cf24231090eda8589bbb44fb6fc.tar.gz |
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'build/plugins')
-rw-r--r-- | build/plugins/_common.py | 22 | ||||
-rw-r--r-- | build/plugins/_test_const.py | 2 | ||||
-rw-r--r-- | build/plugins/pybuild.py | 368 | ||||
-rw-r--r-- | build/plugins/res.py | 62 | ||||
-rw-r--r-- | build/plugins/ssqls.py | 80 | ||||
-rw-r--r-- | build/plugins/swig.py | 52 | ||||
-rw-r--r-- | build/plugins/tests/test_ssqls.py | 46 | ||||
-rw-r--r-- | build/plugins/tests/ya.make | 2 | ||||
-rw-r--r-- | build/plugins/ya.make | 2 | ||||
-rw-r--r-- | build/plugins/yql_python_udf.py | 2 | ||||
-rw-r--r-- | build/plugins/ytest.py | 12 |
11 files changed, 325 insertions, 325 deletions
diff --git a/build/plugins/_common.py b/build/plugins/_common.py index 5d276b54d7..2f831a94db 100644 --- a/build/plugins/_common.py +++ b/build/plugins/_common.py @@ -115,10 +115,10 @@ def resolve_to_abs_path(path, source_root, build_root): return path -def resolve_to_ymake_path(path): - return resolve_to_abs_path(path, '${ARCADIA_ROOT}', '${ARCADIA_BUILD_ROOT}') - - +def resolve_to_ymake_path(path): + return resolve_to_abs_path(path, '${ARCADIA_ROOT}', '${ARCADIA_BUILD_ROOT}') + + def join_intl_paths(*args): return '/'.join(args) @@ -135,17 +135,17 @@ def make_tuples(arg_list): return list(tpl()) -def resolve_includes(unit, src, paths): - return unit.resolve_include([src] + paths) if paths else [] - - +def resolve_includes(unit, src, paths): + return unit.resolve_include([src] + paths) if paths else [] + + def rootrel_arc_src(src, unit): if src.startswith('${ARCADIA_ROOT}/'): return src[16:] - if src.startswith('${ARCADIA_BUILD_ROOT}/'): - return src[22:] - + if src.startswith('${ARCADIA_BUILD_ROOT}/'): + return src[22:] + elif src.startswith('${CURDIR}/'): return unit.path()[3:] + '/' + src[10:] diff --git a/build/plugins/_test_const.py b/build/plugins/_test_const.py index 9cb249ffcd..0d03cc3d17 100644 --- a/build/plugins/_test_const.py +++ b/build/plugins/_test_const.py @@ -314,7 +314,7 @@ class _StatusColorMap(object): 'timeout': Highlight.BAD, 'flaky': Highlight.ALTERNATIVE3, 'xfail': Highlight.WARNING, - 'xpass': Highlight.WARNING, + 'xpass': Highlight.WARNING, 'diff': Highlight.BAD, 'internal': Highlight.BAD, 'deselected': Highlight.UNIMPORTANT, diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py index 7348d0630f..f32a2d39a0 100644 --- a/build/plugins/pybuild.py +++ b/build/plugins/pybuild.py @@ -2,20 +2,20 @@ import os import collections from hashlib import md5 -import ymake +import ymake from _common import stripext, rootrel_arc_src, tobuilddir, listid, resolve_to_ymake_path, generate_chunks, pathid - + YA_IDE_VENV_VAR = 'YA_IDE_VENV' PY_NAMESPACE_PREFIX = 'py/namespace' BUILTIN_PROTO = 'builtin_proto' -def is_arc_src(src, unit): - return ( - src.startswith('${ARCADIA_ROOT}/') or - src.startswith('${CURDIR}/') or - unit.resolve_arc_path(src).startswith('$S/') - ) +def is_arc_src(src, unit): + return ( + src.startswith('${ARCADIA_ROOT}/') or + src.startswith('${CURDIR}/') or + unit.resolve_arc_path(src).startswith('$S/') + ) def is_extended_source_search_enabled(path, unit): if not is_arc_src(path, unit): @@ -24,10 +24,10 @@ def is_extended_source_search_enabled(path, unit): return False return True -def to_build_root(path, unit): - if is_arc_src(path, unit): - return '${ARCADIA_BUILD_ROOT}/' + rootrel_arc_src(path, unit) - return path +def to_build_root(path, unit): + if is_arc_src(path, unit): + return '${ARCADIA_BUILD_ROOT}/' + rootrel_arc_src(path, unit) + return path def uniq_suffix(path, unit): upath = unit.path() @@ -45,22 +45,22 @@ def pb2_arg(suf, path, mod, unit): def proto_arg(path, mod, unit): return '{}.proto={}'.format(stripext(to_build_root(path, unit)), mod) - + def pb_cc_arg(suf, path, unit): return '{}{suf}'.format(stripext(to_build_root(path, unit)), suf=suf) def ev_cc_arg(path, unit): return '{}.ev.pb.cc'.format(stripext(to_build_root(path, unit))) -def ev_arg(path, mod, unit): +def ev_arg(path, mod, unit): return '{}__int___ev_pb2.py={}_ev_pb2'.format(stripext(to_build_root(path, unit)), mod) - -def mangle(name): - if '.' not in name: - return name - return ''.join('{}{}'.format(len(s), s) for s in name.split('.')) - - + +def mangle(name): + if '.' not in name: + return name + return ''.join('{}{}'.format(len(s), s) for s in name.split('.')) + + def parse_pyx_includes(filename, path, source_root, seen=None): normpath = lambda *x: os.path.normpath(os.path.join(*x)) @@ -115,7 +115,7 @@ def add_python_lint_checks(unit, py_ver, files): resolved_files = [] for path in files: resolved = unit.resolve_arc_path([path]) - if resolved.startswith('$S'): # path was resolved as source file. + if resolved.startswith('$S'): # path was resolved as source file. resolved_files.append(resolved) return resolved_files @@ -156,18 +156,18 @@ def py_program(unit, py3): Documentation: https://wiki.yandex-team.ru/devtools/commandsandvars/py_srcs/#modulpyprogramimakrospymain """ if py3: - peers = ['library/python/runtime_py3/main'] - if unit.get('PYTHON_SQLITE3') != 'no': - peers.append('contrib/tools/python3/src/Modules/_sqlite') + peers = ['library/python/runtime_py3/main'] + if unit.get('PYTHON_SQLITE3') != 'no': + peers.append('contrib/tools/python3/src/Modules/_sqlite') else: - peers = ['library/python/runtime/main'] - if unit.get('PYTHON_SQLITE3') != 'no': - peers.append('contrib/tools/python/src/Modules/_sqlite') - unit.onpeerdir(peers) - if unit.get('MODULE_TYPE') == 'PROGRAM': # can not check DLL - unit.onadd_check_py_imports() - - + peers = ['library/python/runtime/main'] + if unit.get('PYTHON_SQLITE3') != 'no': + peers.append('contrib/tools/python/src/Modules/_sqlite') + unit.onpeerdir(peers) + if unit.get('MODULE_TYPE') == 'PROGRAM': # can not check DLL + unit.onadd_check_py_imports() + + def onpy_srcs(unit, *args): """ @usage PY_SRCS({| CYTHON_C} { | TOP_LEVEL | NAMESPACE ns} Files...) @@ -188,52 +188,52 @@ def onpy_srcs(unit, *args): PY_REGISTER honors Python2 and Python3 differences and adjusts itself to Python version of a current module Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#modulipylibrarypy3libraryimakrospysrcs """ - # Each file arg must either be a path, or "${...}/buildpath=modname", where - # "${...}/buildpath" part will be used as a file source in a future macro, - # and "modname" will be used as a module name. + # Each file arg must either be a path, or "${...}/buildpath=modname", where + # "${...}/buildpath" part will be used as a file source in a future macro, + # and "modname" will be used as a module name. - upath = unit.path()[3:] + upath = unit.path()[3:] py3 = is_py3(unit) py_main_only = unit.get('PROCESS_PY_MAIN_ONLY') - with_py = not unit.get('PYBUILD_NO_PY') - with_pyc = not unit.get('PYBUILD_NO_PYC') + with_py = not unit.get('PYBUILD_NO_PY') + with_pyc = not unit.get('PYBUILD_NO_PYC') in_proto_library = unit.get('PY_PROTO') or unit.get('PY3_PROTO') venv = unit.get(YA_IDE_VENV_VAR) need_gazetteer_peerdir = False trim = 0 - if not upath.startswith('contrib/tools/python') and not upath.startswith('library/python/runtime') and unit.get('NO_PYTHON_INCLS') != 'yes': - unit.onpeerdir(['contrib/libs/python']) + if not upath.startswith('contrib/tools/python') and not upath.startswith('library/python/runtime') and unit.get('NO_PYTHON_INCLS') != 'yes': + unit.onpeerdir(['contrib/libs/python']) - unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') - if unit_needs_main: + unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') + if unit_needs_main: py_program(unit, py3) py_namespace_value = unit.get('PY_NAMESPACE_VALUE') if py_namespace_value == ".": ns = "" else: - ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.' + ns = (unit.get('PY_NAMESPACE_VALUE') or upath.replace('/', '.')) + '.' cython_coverage = unit.get('CYTHON_COVERAGE') == 'yes' - cythonize_py = False + cythonize_py = False optimize_proto = unit.get('OPTIMIZE_PY_PROTOS_FLAG') == 'yes' - cython_directives = [] + cython_directives = [] if cython_coverage: cython_directives += ['-X', 'linetrace=True'] - pyxs_c = [] - pyxs_c_h = [] - pyxs_c_api_h = [] - pyxs_cpp = [] - pyxs = pyxs_cpp - swigs_c = [] - swigs_cpp = [] - swigs = swigs_cpp - pys = [] - protos = [] - evs = [] + pyxs_c = [] + pyxs_c_h = [] + pyxs_c_api_h = [] + pyxs_cpp = [] + pyxs = pyxs_cpp + swigs_c = [] + swigs_cpp = [] + swigs = swigs_cpp + pys = [] + protos = [] + evs = [] fbss = [] py_namespaces = {} @@ -246,48 +246,48 @@ def onpy_srcs(unit, *args): dump_name = '{}-{}.dump'.format(pid, tid) dump_output = open(os.path.join(dump_dir, dump_name), 'a') - args = iter(args) - for arg in args: - # Namespace directives. - if arg == 'TOP_LEVEL': - ns = '' - elif arg == 'NAMESPACE': - ns = next(args) + '.' - # Cython directives. - elif arg == 'CYTHON_C': - pyxs = pyxs_c - elif arg == 'CYTHON_C_H': - pyxs = pyxs_c_h - elif arg == 'CYTHON_C_API_H': - pyxs = pyxs_c_api_h - elif arg == 'CYTHON_CPP': - pyxs = pyxs_cpp - elif arg == 'CYTHON_DIRECTIVE': - cython_directives += ['-X', next(args)] - elif arg == 'CYTHONIZE_PY': - cythonize_py = True - # SWIG. - elif arg == 'SWIG_C': - swigs = swigs_c - elif arg == 'SWIG_CPP': - swigs = swigs_cpp - # Unsupported but legal PROTO_LIBRARY arguments. + args = iter(args) + for arg in args: + # Namespace directives. + if arg == 'TOP_LEVEL': + ns = '' + elif arg == 'NAMESPACE': + ns = next(args) + '.' + # Cython directives. + elif arg == 'CYTHON_C': + pyxs = pyxs_c + elif arg == 'CYTHON_C_H': + pyxs = pyxs_c_h + elif arg == 'CYTHON_C_API_H': + pyxs = pyxs_c_api_h + elif arg == 'CYTHON_CPP': + pyxs = pyxs_cpp + elif arg == 'CYTHON_DIRECTIVE': + cython_directives += ['-X', next(args)] + elif arg == 'CYTHONIZE_PY': + cythonize_py = True + # SWIG. + elif arg == 'SWIG_C': + swigs = swigs_c + elif arg == 'SWIG_CPP': + swigs = swigs_cpp + # Unsupported but legal PROTO_LIBRARY arguments. elif arg == 'GLOBAL' or not in_proto_library and arg.endswith('.gztproto'): - pass + pass elif arg == '_MR': # GLOB support: convert arcadia-root-relative paths to module-relative # srcs are assumed to start with ${ARCADIA_ROOT} trim = len(unit.path()) + 14 - # Sources. - else: - main_mod = arg == 'MAIN' - if main_mod: - arg = next(args) - - if '=' in arg: + # Sources. + else: + main_mod = arg == 'MAIN' + if main_mod: + arg = next(args) + + if '=' in arg: main_py = False - path, mod = arg.split('=', 1) - else: + path, mod = arg.split('=', 1) + else: if trim: arg = arg[trim:] if arg.endswith('.gztproto'): @@ -296,11 +296,11 @@ def onpy_srcs(unit, *args): else: path = arg main_py = (path == '__main__.py' or path.endswith('/__main__.py')) - if not py3 and unit_needs_main and main_py: - mod = '__main__' - else: - if arg.startswith('../'): - ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg)) + if not py3 and unit_needs_main and main_py: + mod = '__main__' + else: + if arg.startswith('../'): + ymake.report_configure_error('PY_SRCS item starts with "../": {!r}'.format(arg)) if arg.startswith('/'): ymake.report_configure_error('PY_SRCS item starts with "/": {!r}'.format(arg)) continue @@ -312,47 +312,47 @@ def onpy_srcs(unit, *args): py_namespaces.setdefault(mod_root_path, set()).add(ns if ns else '.') mod = ns + mod_name - if main_mod: + if main_mod: py_main(unit, mod + ":main") - elif py3 and unit_needs_main and main_py: + elif py3 and unit_needs_main and main_py: py_main(unit, mod) - + if py_main_only: continue if py3 and mod == '__main__': ymake.report_configure_error('TOP_LEVEL __main__.py is not allowed in PY3_PROGRAM') - pathmod = (path, mod) + pathmod = (path, mod) if dump_output is not None: dump_output.write('{path}\t{module}\n'.format(path=rootrel_arc_src(path, unit), module=mod)) - if path.endswith('.py'): - if cythonize_py: - pyxs.append(pathmod) - else: - pys.append(pathmod) - elif path.endswith('.pyx'): - pyxs.append(pathmod) - elif path.endswith('.proto'): - protos.append(pathmod) - elif path.endswith('.ev'): - evs.append(pathmod) - elif path.endswith('.swg'): - swigs.append(pathmod) + if path.endswith('.py'): + if cythonize_py: + pyxs.append(pathmod) + else: + pys.append(pathmod) + elif path.endswith('.pyx'): + pyxs.append(pathmod) + elif path.endswith('.proto'): + protos.append(pathmod) + elif path.endswith('.ev'): + evs.append(pathmod) + elif path.endswith('.swg'): + swigs.append(pathmod) # Allow pyi files in PY_SRCS for autocomplete in IDE, but skip it during building elif path.endswith('.pyi'): pass elif path.endswith('.fbs'): fbss.append(pathmod) - else: - ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path)) - + else: + ymake.report_configure_error('in PY_SRCS: unrecognized arg {!r}'.format(path)) + if dump_output is not None: dump_output.close() - if pyxs: + if pyxs: files2res = set() # Include map stores files which were included in the processing pyx file, # to be able to find source code of the included file inside generated file @@ -385,30 +385,30 @@ def onpy_srcs(unit, *args): (pyxs_c_h, unit.on_buildwith_cython_c_h, ".c", True), (pyxs_c_api_h, unit.on_buildwith_cython_c_api_h, ".c", True), (pyxs_cpp, unit.on_buildwith_cython_cpp_dep, ".cpp", False), - ]: - for path, mod in pyxs: + ]: + for path, mod in pyxs: filename = rootrel_arc_src(path, unit) - cython_args = [path] - - dep = path - if path.endswith('.py'): - pxd = '/'.join(mod.split('.')) + '.pxd' - if unit.resolve_arc_path(pxd): - dep = pxd - cython_args.append(dep) - - cython_args += [ - '--module-name', mod, - '--init-suffix', mangle(mod), + cython_args = [path] + + dep = path + if path.endswith('.py'): + pxd = '/'.join(mod.split('.')) + '.pxd' + if unit.resolve_arc_path(pxd): + dep = pxd + cython_args.append(dep) + + cython_args += [ + '--module-name', mod, + '--init-suffix', mangle(mod), '--source-root', '${ARCADIA_ROOT}', # set arcadia root relative __file__ for generated modules '-X', 'set_initial_path={}'.format(filename), ] + cython_directives - - cython(cython_args) + + cython(cython_args) py_register(unit, mod, py3) process_pyx(filename, path, out_suffix, noext) - + if files2res: # Compile original and generated sources into target for proper cython coverage calculation unit.onresource_files([x for name, path in files2res for x in ('DEST', name, path)]) @@ -420,26 +420,26 @@ def onpy_srcs(unit, *args): data += ['-', line] unit.onresource(data) - for swigs, on_swig_python in [ - (swigs_c, unit.on_swig_python_c), - (swigs_cpp, unit.on_swig_python_cpp), - ]: - for path, mod in swigs: - # Make output prefix basename match swig module name. - prefix = path[:path.rfind('/') + 1] + mod.rsplit('.', 1)[-1] - swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix) - on_swig_python([path, prefix]) - onpy_register(unit, mod + '_swg') - onpy_srcs(unit, swg_py + '=' + mod) - - if pys: + for swigs, on_swig_python in [ + (swigs_c, unit.on_swig_python_c), + (swigs_cpp, unit.on_swig_python_cpp), + ]: + for path, mod in swigs: + # Make output prefix basename match swig module name. + prefix = path[:path.rfind('/') + 1] + mod.rsplit('.', 1)[-1] + swg_py = '{}/{}/{}.py'.format('${ARCADIA_BUILD_ROOT}', upath, prefix) + on_swig_python([path, prefix]) + onpy_register(unit, mod + '_swg') + onpy_srcs(unit, swg_py + '=' + mod) + + if pys: pys_seen = set() pys_dups = {m for _, m in pys if (m in pys_seen or pys_seen.add(m))} if pys_dups: ymake.report_configure_error('Duplicate(s) is found in the PY_SRCS macro: {}'.format(pys_dups)) - res = [] - + res = [] + if py3: mod_list_md5 = md5() for path, mod in pys: @@ -453,7 +453,7 @@ def onpy_srcs(unit, *args): dst = path + uniq_suffix(path, unit) unit.on_py3_compile_bytecode([root_rel_path + '-', path, dst]) res += ['DEST', dest + '.yapyc3', dst + '.yapyc3'] - + if py_namespaces: # Note: Add md5 to key to prevent key collision if two or more PY_SRCS() used in the same ya.make ns_res = [] @@ -468,18 +468,18 @@ def onpy_srcs(unit, *args): else: for path, mod in pys: root_rel_path = rootrel_arc_src(path, unit) - if with_py: - key = '/py_modules/' + mod - res += [ - path, key, - '-', 'resfs/src/{}={}'.format(key, root_rel_path), - ] - if with_pyc: - src = unit.resolve_arc_path(path) or path + if with_py: + key = '/py_modules/' + mod + res += [ + path, key, + '-', 'resfs/src/{}={}'.format(key, root_rel_path), + ] + if with_pyc: + src = unit.resolve_arc_path(path) or path dst = path + uniq_suffix(path, unit) unit.on_py_compile_bytecode([root_rel_path + '-', src, dst]) res += [dst + '.yapyc', '/py_code/' + mod] - + unit.onresource(res) add_python_lint_checks(unit, 2, [path for path, mod in pys] + unit.get(['_PY_EXTRA_LINT_FILES_VALUE']).split()) @@ -510,10 +510,10 @@ def onpy_srcs(unit, *args): if optimize_proto and need_gazetteer_peerdir: unit.onpeerdir(['kernel/gazetteer/proto']) - if evs: + if evs: unit.onpeerdir([cpp_runtime_path]) unit.on_generate_py_evs_internal([path for path, mod in evs]) - unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) + unit.onpy_srcs([ev_arg(path, mod, unit) for path, mod in evs]) if fbss: unit.onpeerdir(unit.get('_PY_FBS_DEPS').split()) @@ -533,8 +533,8 @@ def ontest_srcs(unit, *args): _check_test_srcs(*args) if unit.get('PY3TEST_BIN' if is_py3(unit) else 'PYTEST_BIN') != 'no': unit.onpy_srcs(["NAMESPACE", "__tests__"] + list(args)) - - + + def onpy_doctests(unit, *args): """ @usage PY_DOCTEST(Packages...) @@ -553,7 +553,7 @@ def py_register(unit, func, py3): unit.on_py_register([func]) -def onpy_register(unit, *args): +def onpy_register(unit, *args): """ @usage: PY_REGISTER([package.]module_name) @@ -562,36 +562,36 @@ def onpy_register(unit, *args): To register the modules from the sources in the SRCS(), you need to use PY_REGISTER(). PY_REGISTER(module_name) initializes module globally via call to initmodule_name() - PY_REGISTER(package.module_name) initializes module in the specified package - It renames its init function with CFLAGS(-Dinitmodule_name=init7package11module_name) - or CFLAGS(-DPyInit_module_name=PyInit_7package11module_name) + PY_REGISTER(package.module_name) initializes module in the specified package + It renames its init function with CFLAGS(-Dinitmodule_name=init7package11module_name) + or CFLAGS(-DPyInit_module_name=PyInit_7package11module_name) Documentation: https://wiki.yandex-team.ru/arcadia/python/pysrcs/#makrospyregister """ - + py3 = is_py3(unit) - + for name in args: - assert '=' not in name, name - py_register(unit, name, py3) - if '.' in name: - shortname = name.rsplit('.', 1)[1] + assert '=' not in name, name + py_register(unit, name, py3) + if '.' in name: + shortname = name.rsplit('.', 1)[1] if py3: - unit.oncflags(['-DPyInit_{}=PyInit_{}'.format(shortname, mangle(name))]) + unit.oncflags(['-DPyInit_{}=PyInit_{}'.format(shortname, mangle(name))]) else: - unit.oncflags(['-Dinit{}=init{}'.format(shortname, mangle(name))]) + unit.oncflags(['-Dinit{}=init{}'.format(shortname, mangle(name))]) def py_main(unit, arg): if unit.get('IGNORE_PY_MAIN'): return - unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') - if unit_needs_main: - py_program(unit, is_py3(unit)) + unit_needs_main = unit.get('MODULE_TYPE') in ('PROGRAM', 'DLL') + if unit_needs_main: + py_program(unit, is_py3(unit)) unit.onresource(['-', 'PY_MAIN={}'.format(arg)]) -def onpy_main(unit, arg): +def onpy_main(unit, arg): """ @usage: PY_MAIN(package.module[:func]) @@ -602,9 +602,9 @@ def onpy_main(unit, arg): arg = arg.replace('/', '.') - if ':' not in arg: - arg += ':main' - + if ':' not in arg: + arg += ':main' + py_main(unit, arg) diff --git a/build/plugins/res.py b/build/plugins/res.py index c01b999ed6..a937caba81 100644 --- a/build/plugins/res.py +++ b/build/plugins/res.py @@ -42,41 +42,41 @@ def onfat_resource(unit, *args): # https://msdn.microsoft.com/ru-ru/library/windows/desktop/ms682425.aspx for part_args in split(args, 8000): output = listid(part_args) + '.cpp' - inputs = [x for x, y in iterpair(part_args) if x != '-'] - if inputs: - inputs = ['IN'] + inputs + inputs = [x for x, y in iterpair(part_args) if x != '-'] + if inputs: + inputs = ['IN'] + inputs - unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output]) + unit.onrun_program(['tools/rescompiler', output] + part_args + inputs + ['OUT_NOAUTO', output]) unit.onsrcs(['GLOBAL', output]) -def onresource_files(unit, *args): - """ +def onresource_files(unit, *args): + """ @usage: RESOURCE_FILES([DONT_PARSE] [PREFIX {prefix}] [STRIP prefix_to_strip] {path}) This macro expands into RESOURCE([DONT_PARSE] {path} resfs/file/{prefix}{path} - resfs/src/resfs/file/{prefix}{remove_prefix(path, prefix_to_strip)}={rootrel_arc_src(path)} - ) - - resfs/src/{key} stores a source root (or build root) relative path of the - source of the value of the {key} resource. - - resfs/file/{key} stores any value whose source was a file on a filesystem. - resfs/src/resfs/file/{key} must store its path. - + ) + + resfs/src/{key} stores a source root (or build root) relative path of the + source of the value of the {key} resource. + + resfs/file/{key} stores any value whose source was a file on a filesystem. + resfs/src/resfs/file/{key} must store its path. + DONT_PARSE disables parsing for source code files (determined by extension) Please don't abuse: use separate DONT_PARSE macro call only for files subject to parsing - This form is for use from other plugins: - RESOURCE_FILES([DEST {dest}] {path}) expands into RESOURCE({path} resfs/file/{dest}) + This form is for use from other plugins: + RESOURCE_FILES([DEST {dest}] {path}) expands into RESOURCE({path} resfs/file/{dest}) @see: https://wiki.yandex-team.ru/devtools/commandsandvars/resourcefiles/ - """ - prefix = '' + """ + prefix = '' prefix_to_strip = None - dest = None - res = [] + dest = None + res = [] first = 0 if args and not unit.enabled('_GO_MODULE'): @@ -85,21 +85,21 @@ def onresource_files(unit, *args): if args and args[0] == 'DONT_PARSE': first = 1 - + args = iter(args[first:]) - for arg in args: - if arg == 'PREFIX': - prefix, dest = next(args), None - elif arg == 'DEST': - dest, prefix = next(args), None + for arg in args: + if arg == 'PREFIX': + prefix, dest = next(args), None + elif arg == 'DEST': + dest, prefix = next(args), None elif arg == 'STRIP': prefix_to_strip = next(args) - else: - path = arg + else: + path = arg key = 'resfs/file/' + (dest or (prefix + (path if not prefix_to_strip else remove_prefix(path, prefix_to_strip)))) - src = 'resfs/src/{}={}'.format(key, rootrel_arc_src(path, unit)) - res += ['-', src, path, key] - + src = 'resfs/src/{}={}'.format(key, rootrel_arc_src(path, unit)) + res += ['-', src, path, key] + if unit.enabled('_GO_MODULE'): unit.on_go_resource(res) else: diff --git a/build/plugins/ssqls.py b/build/plugins/ssqls.py index de3fe2cf5f..618cbc11bc 100644 --- a/build/plugins/ssqls.py +++ b/build/plugins/ssqls.py @@ -1,40 +1,40 @@ -from os.path import splitext - -import _import_wrapper as iw -from _common import resolve_includes - - -class SSQLSParser(object): - def __init__(self, path, unit): - s = unit.resolve_arc_path(path) - assert s.startswith('$S/') and s.endswith('.ssqls'), s - h = '$B/' + s[3:-6] + '.h' - - import xml.etree.cElementTree as ET - try: - doc = ET.parse(path) - except ET.ParseError as e: - unit.message(['error', 'malformed XML {}: {}'.format(path, e)]) - doc = ET.Element('DbObject') - xmls, headers = self.parse_doc(doc) - self._includes = resolve_includes(unit, s, xmls) - self._induced = {'cpp': [h], 'h': resolve_includes(unit, h, headers)} - - @staticmethod - def parse_doc(doc): - paths = lambda nodes: filter(None, (e.get('path') for e in nodes)) - includes = doc.findall('include') - ancestors = paths(doc.findall('ancestors/ancestor')) - headers = [e.text.strip('<>""') for e in includes] - headers += [splitext(s)[0] + '.h' for s in ancestors] - return paths(includes) + ancestors, headers - - def includes(self): - return self._includes - - def induced_deps(self): - return self._induced - - -def init(): - iw.addparser('ssqls', SSQLSParser) +from os.path import splitext + +import _import_wrapper as iw +from _common import resolve_includes + + +class SSQLSParser(object): + def __init__(self, path, unit): + s = unit.resolve_arc_path(path) + assert s.startswith('$S/') and s.endswith('.ssqls'), s + h = '$B/' + s[3:-6] + '.h' + + import xml.etree.cElementTree as ET + try: + doc = ET.parse(path) + except ET.ParseError as e: + unit.message(['error', 'malformed XML {}: {}'.format(path, e)]) + doc = ET.Element('DbObject') + xmls, headers = self.parse_doc(doc) + self._includes = resolve_includes(unit, s, xmls) + self._induced = {'cpp': [h], 'h': resolve_includes(unit, h, headers)} + + @staticmethod + def parse_doc(doc): + paths = lambda nodes: filter(None, (e.get('path') for e in nodes)) + includes = doc.findall('include') + ancestors = paths(doc.findall('ancestors/ancestor')) + headers = [e.text.strip('<>""') for e in includes] + headers += [splitext(s)[0] + '.h' for s in ancestors] + return paths(includes) + ancestors, headers + + def includes(self): + return self._includes + + def induced_deps(self): + return self._induced + + +def init(): + iw.addparser('ssqls', SSQLSParser) diff --git a/build/plugins/swig.py b/build/plugins/swig.py index 24a0bf4cb3..32a37204a6 100644 --- a/build/plugins/swig.py +++ b/build/plugins/swig.py @@ -1,15 +1,15 @@ import os import posixpath -import re +import re import _import_wrapper as iw import _common as common -def init(): - iw.addrule('swg', Swig) - - +def init(): + iw.addrule('swg', Swig) + + class Swig(iw.CustomCommand): def __init__(self, path, unit): self._tool = unit.get('SWIG_TOOL') @@ -17,7 +17,7 @@ class Swig(iw.CustomCommand): self._local_swig = unit.get('USE_LOCAL_SWIG') == "yes" self._path = path - self._flags = ['-cpperraswarn'] + self._flags = ['-cpperraswarn'] self._bindir = common.tobuilddir(unit.path()) self._input_name = common.stripext(os.path.basename(self._path)) @@ -42,12 +42,12 @@ class Swig(iw.CustomCommand): lang_specific_incl_dir = 'perl5' elif self._swig_lang in ['jni_cpp', 'jni_java']: lang_specific_incl_dir = 'java' - incl_dirs = [ + incl_dirs = [ "FOR", "swig", posixpath.join(self._library_dir, lang_specific_incl_dir), "FOR", "swig", self._library_dir - ] + ] self._incl_dirs = ['$S', '$B'] + [posixpath.join('$S', d) for d in incl_dirs] modname = unit.get('REALPRJNAME') @@ -99,7 +99,7 @@ class Swig(iw.CustomCommand): return [ (self._main_out, []), (common.join_intl_paths(self._bindir, self._out_name), (['noauto', 'add_to_outs'] if self._swig_lang != 'java' else [])), - ] + ([(self._out_header, [])] if self._swig_lang == 'java' else []) + ] + ([(self._out_header, [])] if self._swig_lang == 'java' else []) def output_includes(self): return [(self._out_header, [])] if self._swig_lang in ['java', 'jni_cpp'] else [] @@ -109,30 +109,30 @@ class Swig(iw.CustomCommand): binary = self._tool return self.do_run_java(binary, self._path) if self._swig_lang in ['java', 'jni_cpp', 'jni_java'] else self.do_run(binary, self._path) - def _incl_flags(self): - return ['-I' + self.resolve_path(x) for x in self._incl_dirs] - + def _incl_flags(self): + return ['-I' + self.resolve_path(x) for x in self._incl_dirs] + def do_run(self, binary, path): - self.call([binary] + self._flags + [ - '-o', self.resolve_path(common.get(self.output, 0)), - '-outdir', self.resolve_path(self._bindir) - ] + self._incl_flags() + [self.resolve_path(path)]) + self.call([binary] + self._flags + [ + '-o', self.resolve_path(common.get(self.output, 0)), + '-outdir', self.resolve_path(self._bindir) + ] + self._incl_flags() + [self.resolve_path(path)]) def do_run_java(self, binary, path): - import tarfile + import tarfile - outdir = self.resolve_path(self._bindir) + outdir = self.resolve_path(self._bindir) if self._swig_lang != 'jni_cpp': java_srcs_dir = os.path.join(outdir, self._package.replace('.', '/')) if not os.path.exists(java_srcs_dir): os.makedirs(java_srcs_dir) - flags = self._incl_flags() - src = self.resolve_path(path) - with open(src, 'r') as f: - if not re.search(r'(?m)^%module\b', f.read()): - flags += ['-module', os.path.splitext(os.path.basename(src))[0]] - + flags = self._incl_flags() + src = self.resolve_path(path) + with open(src, 'r') as f: + if not re.search(r'(?m)^%module\b', f.read()): + flags += ['-module', os.path.splitext(os.path.basename(src))[0]] + if self._swig_lang == 'jni_cpp': self.call([binary, '-c++', '-o', self._main_out, '-java', '-package', self._package] + flags + [src]) elif self._swig_lang == 'jni_java': @@ -142,11 +142,11 @@ class Swig(iw.CustomCommand): binary, '-c++', '-o', self._main_out, '-outdir', java_srcs_dir, '-java', '-package', self._package, ] + flags + [src]) - + if self._swig_lang in ['jni_java', 'java']: with tarfile.open(os.path.join(outdir, self._out_name), 'a') as tf: tf.add(java_srcs_dir, arcname=self._package.replace('.', '/')) - + if self._swig_lang in ['jni_cpp', 'java']: header = os.path.splitext(self.resolve_path(self._main_out))[0] + '.h' if not os.path.exists(header): diff --git a/build/plugins/tests/test_ssqls.py b/build/plugins/tests/test_ssqls.py index db9e8c93bc..2a1d032109 100644 --- a/build/plugins/tests/test_ssqls.py +++ b/build/plugins/tests/test_ssqls.py @@ -1,23 +1,23 @@ -import xml.etree.cElementTree as ET - -from build.plugins import ssqls - - -example = '''\ -<?xml version="1.0" encoding="utf-8"?> -<DbObject> - <include path="A.ssqls"><a.h></include> - <include>"b.h"</include> - - <ancestors> - <ancestor path="C.ssqls"/> - </ancestors> -</DbObject> -''' - - -def test_include_parser(): - doc = ET.fromstring(example) - xmls, headers = ssqls.SSQLSParser.parse_doc(doc) - assert headers == ['a.h', 'b.h', 'C.h'] - assert xmls == ['A.ssqls', 'C.ssqls'] +import xml.etree.cElementTree as ET + +from build.plugins import ssqls + + +example = '''\ +<?xml version="1.0" encoding="utf-8"?> +<DbObject> + <include path="A.ssqls"><a.h></include> + <include>"b.h"</include> + + <ancestors> + <ancestor path="C.ssqls"/> + </ancestors> +</DbObject> +''' + + +def test_include_parser(): + doc = ET.fromstring(example) + xmls, headers = ssqls.SSQLSParser.parse_doc(doc) + assert headers == ['a.h', 'b.h', 'C.h'] + assert xmls == ['A.ssqls', 'C.ssqls'] diff --git a/build/plugins/tests/ya.make b/build/plugins/tests/ya.make index ad8f2c6a3b..87228b98df 100644 --- a/build/plugins/tests/ya.make +++ b/build/plugins/tests/ya.make @@ -10,7 +10,7 @@ TEST_SRCS( test_code_generator.py test_common.py test_requirements.py - test_ssqls.py + test_ssqls.py ) END() diff --git a/build/plugins/ya.make b/build/plugins/ya.make index 1d2810bf46..4ad5f5988e 100644 --- a/build/plugins/ya.make +++ b/build/plugins/ya.make @@ -4,7 +4,7 @@ PY2_LIBRARY() PY_SRCS( code_generator.py - ssqls.py + ssqls.py swig.py _common.py diff --git a/build/plugins/yql_python_udf.py b/build/plugins/yql_python_udf.py index 9015ae0c89..c4f949d8a9 100644 --- a/build/plugins/yql_python_udf.py +++ b/build/plugins/yql_python_udf.py @@ -46,7 +46,7 @@ def onregister_yql_python_udf(unit, *args): path = name + '.yql_python_udf.cpp' libra_flag = '1' if add_libra_modules else '0' - unit.onpython([ + unit.onpython([ 'build/scripts/gen_yql_python_udf.py', flavor, name, resource_name, path, libra_flag, 'OUT', path, diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py index c7978206ca..8970837f0f 100644 --- a/build/plugins/ytest.py +++ b/build/plugins/ytest.py @@ -645,12 +645,12 @@ def onadd_check(unit, *args): save_in_file(unit.get('TEST_DART_OUT_FILE'), data) -def on_register_no_check_imports(unit): - s = unit.get('NO_CHECK_IMPORTS_FOR_VALUE') - if s not in ('', 'None'): - unit.onresource(['-', 'py/no_check_imports/{}="{}"'.format(_common.pathid(s), s)]) - - +def on_register_no_check_imports(unit): + s = unit.get('NO_CHECK_IMPORTS_FOR_VALUE') + if s not in ('', 'None'): + unit.onresource(['-', 'py/no_check_imports/{}="{}"'.format(_common.pathid(s), s)]) + + def onadd_check_py_imports(unit, *args): if unit.get("TIDY") == "yes": # graph changed for clang_tidy tests |