aboutsummaryrefslogtreecommitdiffstats
path: root/build/plugins
diff options
context:
space:
mode:
authorkakabba <kakabba@yandex-team.ru>2022-02-10 16:46:04 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:46:04 +0300
commitc8e3995898c443e78266f7420aac5fb3da15d413 (patch)
treea530e068cc107e227deccc80722204db63a4d75d /build/plugins
parent110a978b66fe6c0916572df51cfead2a9b647174 (diff)
downloadydb-c8e3995898c443e78266f7420aac5fb3da15d413.tar.gz
Restoring authorship annotation for <kakabba@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'build/plugins')
-rw-r--r--build/plugins/_common.py118
-rw-r--r--build/plugins/_custom_command.py110
-rw-r--r--build/plugins/_import_wrapper.py10
-rw-r--r--build/plugins/_unpickler.py58
-rw-r--r--build/plugins/_xsyn_includes.py8
-rw-r--r--build/plugins/build_mn_files.py4
-rw-r--r--build/plugins/cp.py46
-rw-r--r--build/plugins/create_init_py.py12
-rw-r--r--build/plugins/files.py10
-rw-r--r--build/plugins/print_module_type.py8
-rw-r--r--build/plugins/rodata.py8
-rw-r--r--build/plugins/split_codegen.py34
-rw-r--r--build/plugins/swig.py98
-rw-r--r--build/plugins/xsyn.py62
-rw-r--r--build/plugins/ya.make6
-rw-r--r--build/plugins/ytest.py64
-rw-r--r--build/plugins/ytest2.py104
17 files changed, 380 insertions, 380 deletions
diff --git a/build/plugins/_common.py b/build/plugins/_common.py
index 2f831a94db..c85dca1d4f 100644
--- a/build/plugins/_common.py
+++ b/build/plugins/_common.py
@@ -68,51 +68,51 @@ def before(s, ss):
return s[:p]
-
-def sort_by_keywords(keywords, args):
- flat = []
- res = {}
-
- cur_key = None
- limit = -1
- for arg in args:
- if arg in keywords:
- limit = keywords[arg]
- if limit == 0:
- res[arg] = True
- cur_key = None
- limit = -1
- else:
- cur_key = arg
- continue
- if limit == 0:
- cur_key = None
- limit = -1
- if cur_key:
- if cur_key in res:
- res[cur_key].append(arg)
- else:
- res[cur_key] = [arg]
- limit -= 1
- else:
- flat.append(arg)
- return (flat, res)
-
-
-def resolve_common_const(path):
- if path.startswith('${ARCADIA_ROOT}'):
- return path.replace('${ARCADIA_ROOT}', '$S', 1)
- if path.startswith('${ARCADIA_BUILD_ROOT}'):
- return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1)
- return path
-
-
-def resolve_to_abs_path(path, source_root, build_root):
- if path.startswith('$S') and source_root is not None:
- return path.replace('$S', source_root, 1)
- if path.startswith('$B') and build_root is not None:
- return path.replace('$B', build_root, 1)
- return path
+
+def sort_by_keywords(keywords, args):
+ flat = []
+ res = {}
+
+ cur_key = None
+ limit = -1
+ for arg in args:
+ if arg in keywords:
+ limit = keywords[arg]
+ if limit == 0:
+ res[arg] = True
+ cur_key = None
+ limit = -1
+ else:
+ cur_key = arg
+ continue
+ if limit == 0:
+ cur_key = None
+ limit = -1
+ if cur_key:
+ if cur_key in res:
+ res[cur_key].append(arg)
+ else:
+ res[cur_key] = [arg]
+ limit -= 1
+ else:
+ flat.append(arg)
+ return (flat, res)
+
+
+def resolve_common_const(path):
+ if path.startswith('${ARCADIA_ROOT}'):
+ return path.replace('${ARCADIA_ROOT}', '$S', 1)
+ if path.startswith('${ARCADIA_BUILD_ROOT}'):
+ return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1)
+ return path
+
+
+def resolve_to_abs_path(path, source_root, build_root):
+ if path.startswith('$S') and source_root is not None:
+ return path.replace('$S', source_root, 1)
+ if path.startswith('$B') and build_root is not None:
+ return path.replace('$B', build_root, 1)
+ return path
def resolve_to_ymake_path(path):
@@ -121,20 +121,20 @@ def resolve_to_ymake_path(path):
def join_intl_paths(*args):
return '/'.join(args)
-
-
-def get(fun, num):
- return fun()[num][0]
-
-
-def make_tuples(arg_list):
- def tpl():
- for x in arg_list:
- yield (x, [])
-
- return list(tpl())
-
-
+
+
+def get(fun, num):
+ return fun()[num][0]
+
+
+def make_tuples(arg_list):
+ def tpl():
+ for x in arg_list:
+ yield (x, [])
+
+ return list(tpl())
+
+
def resolve_includes(unit, src, paths):
return unit.resolve_include([src] + paths) if paths else []
diff --git a/build/plugins/_custom_command.py b/build/plugins/_custom_command.py
index 9692214b22..5330d6c0bb 100644
--- a/build/plugins/_custom_command.py
+++ b/build/plugins/_custom_command.py
@@ -1,65 +1,65 @@
-import subprocess
-import sys
+import subprocess
+import sys
import os
-import _common as common
-
-
-class CustomCommand(object):
- def __setstate__(self, sdict):
- if isinstance(sdict, tuple):
- for elem in sdict:
- if isinstance(elem, dict):
- for key in elem:
- setattr(self, key, elem[key])
-
- self._source_root = None
- self._build_root = None
-
- def set_source_root(self, path):
- self._source_root = path
-
- def set_build_root(self, path):
- self._build_root = path
-
- def call(self, args, **kwargs):
- cwd = self._get_call_specs('cwd', kwargs)
- stdout_path = self._get_call_specs('stdout', kwargs)
-
- resolved_args = []
-
- for arg in args:
+import _common as common
+
+
+class CustomCommand(object):
+ def __setstate__(self, sdict):
+ if isinstance(sdict, tuple):
+ for elem in sdict:
+ if isinstance(elem, dict):
+ for key in elem:
+ setattr(self, key, elem[key])
+
+ self._source_root = None
+ self._build_root = None
+
+ def set_source_root(self, path):
+ self._source_root = path
+
+ def set_build_root(self, path):
+ self._build_root = path
+
+ def call(self, args, **kwargs):
+ cwd = self._get_call_specs('cwd', kwargs)
+ stdout_path = self._get_call_specs('stdout', kwargs)
+
+ resolved_args = []
+
+ for arg in args:
resolved_args.append(self.resolve_path(arg))
-
- if stdout_path:
- stdout = open(stdout_path, 'wb')
- else:
- stdout = None
-
+
+ if stdout_path:
+ stdout = open(stdout_path, 'wb')
+ else:
+ stdout = None
+
env = os.environ.copy()
env['ASAN_OPTIONS'] = 'detect_leaks=0'
-
+
rc = subprocess.call(resolved_args, cwd=cwd, stdout=stdout, env=env)
- if stdout:
- stdout.close()
- if rc:
- sys.exit(rc)
-
+ if stdout:
+ stdout.close()
+ if rc:
+ sys.exit(rc)
+
def resolve_path(self, path):
return common.resolve_to_abs_path(path, self._source_root, self._build_root)
-
- def _get_call_specs(self, name, kwargs):
- if isinstance(kwargs, dict):
- param = kwargs.get(name, None)
- if param:
- return self.resolve_path(param)
- return None
-
-
-def addrule(*unused):
- pass
-
-
+
+ def _get_call_specs(self, name, kwargs):
+ if isinstance(kwargs, dict):
+ param = kwargs.get(name, None)
+ if param:
+ return self.resolve_path(param)
+ return None
+
+
+def addrule(*unused):
+ pass
+
+
def addparser(*unused, **kwargs):
- pass
+ pass
diff --git a/build/plugins/_import_wrapper.py b/build/plugins/_import_wrapper.py
index 883f662314..f21bb1dc11 100644
--- a/build/plugins/_import_wrapper.py
+++ b/build/plugins/_import_wrapper.py
@@ -1,9 +1,9 @@
-try:
+try:
from ymake import CustomCommand as RealCustomCommand
- from ymake import addrule
- from ymake import addparser
+ from ymake import addrule
+ from ymake import addparser
from ymake import subst
-
+
class CustomCommand(RealCustomCommand):
def __init__(self, *args, **kwargs):
RealCustomCommand.__init__(*args, **kwargs)
@@ -11,7 +11,7 @@ try:
def resolve_path(self, path):
return subst(path)
-except ImportError:
+except ImportError:
from _custom_command import CustomCommand # noqa
from _custom_command import addrule # noqa
from _custom_command import addparser # noqa
diff --git a/build/plugins/_unpickler.py b/build/plugins/_unpickler.py
index e01e7b3118..9e6249c2f7 100644
--- a/build/plugins/_unpickler.py
+++ b/build/plugins/_unpickler.py
@@ -2,41 +2,41 @@ import sys
sys.dont_write_bytecode = True
-import argparse
-import base64
+import argparse
+import base64
try:
import cPickle as pickle
except Exception:
import pickle
-
+
import _common as common
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--data', help='pickled object of TCustomCommand class', required=True)
- parser.add_argument('--src-root', help='$S real path', required=True)
- parser.add_argument('--build-root', help='$B real path', required=True)
- parser.add_argument('--tools', help='binaries needed by command', required=True, nargs='+')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--data', help='pickled object of TCustomCommand class', required=True)
+ parser.add_argument('--src-root', help='$S real path', required=True)
+ parser.add_argument('--build-root', help='$B real path', required=True)
+ parser.add_argument('--tools', help='binaries needed by command', required=True, nargs='+')
args, unknown_args = parser.parse_known_args()
-
- encoded_cmd = args.data
- src_root = args.src_root
- build_root = args.build_root
- tools = args.tools
-
- assert (int(tools[0]) == len(tools[1:])), "tools quantity != tools number!"
-
+
+ encoded_cmd = args.data
+ src_root = args.src_root
+ build_root = args.build_root
+ tools = args.tools
+
+ assert (int(tools[0]) == len(tools[1:])), "tools quantity != tools number!"
+
cmd_object = pickle.loads(base64.b64decode(encoded_cmd))
-
- cmd_object.set_source_root(src_root)
- cmd_object.set_build_root(build_root)
-
- if len(tools[1:]) == 0:
+
+ cmd_object.set_source_root(src_root)
+ cmd_object.set_build_root(build_root)
+
+ if len(tools[1:]) == 0:
cmd_object.run(unknown_args, common.get_interpreter_path())
- else:
+ else:
cmd_object.run(unknown_args, *tools[1:])
-
-
-if __name__ == '__main__':
- main()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build/plugins/_xsyn_includes.py b/build/plugins/_xsyn_includes.py
index 8d33cea2f0..5077419912 100644
--- a/build/plugins/_xsyn_includes.py
+++ b/build/plugins/_xsyn_includes.py
@@ -1,4 +1,4 @@
-def get_include_callback():
+def get_include_callback():
"""
.. function: get_include_callback returns function that processes each DOM element to get xsyn include from it, and it's aware of directory with all the xsyns.
@@ -55,6 +55,6 @@ def process_xsyn(filepath, on_element):
return res
-def get_all_includes(filepath):
- callback = get_include_callback()
- return process_xsyn(filepath, callback)
+def get_all_includes(filepath):
+ callback = get_include_callback()
+ return process_xsyn(filepath, callback)
diff --git a/build/plugins/build_mn_files.py b/build/plugins/build_mn_files.py
index 4da76f1852..a2f5582417 100644
--- a/build/plugins/build_mn_files.py
+++ b/build/plugins/build_mn_files.py
@@ -7,7 +7,7 @@ def on_build_mns_files(unit, *args):
ranking_suffix = ''
check = ''
index = 0
- fml_unused_tool = ''
+ fml_unused_tool = ''
while index < len(args):
if args[index] == 'NAME':
index += 1
@@ -17,7 +17,7 @@ def on_build_mns_files(unit, *args):
ranking_suffix = args[index]
elif args[index] == 'CHECK':
check = 'CHECK'
- fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL'
+ fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL'
else:
files.append(args[index])
index += 1
diff --git a/build/plugins/cp.py b/build/plugins/cp.py
index 5c663a3bdd..2525177e1b 100644
--- a/build/plugins/cp.py
+++ b/build/plugins/cp.py
@@ -1,30 +1,30 @@
-import os
+import os
-from _common import sort_by_keywords
-
-
-def oncopy(unit, *args):
+from _common import sort_by_keywords
+
+
+def oncopy(unit, *args):
keywords = {'RESULT': 1, 'KEEP_DIR_STRUCT': 0, 'DESTINATION': 1, 'FROM': 1}
-
- flat_args, spec_args = sort_by_keywords(keywords, args)
-
- dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else ''
+
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+
+ dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else ''
from_dir = spec_args['FROM'][0] if 'FROM' in spec_args else ''
- keep_struct = 'KEEP_DIR_STRUCT' in spec_args
- save_in_var = 'RESULT' in spec_args
- targets = []
-
- for source in flat_args:
- rel_path = ''
- path_list = source.split(os.sep)
- filename = path_list[-1]
- if keep_struct:
- if path_list[:-1]:
- rel_path = os.path.join(*path_list[:-1])
+ keep_struct = 'KEEP_DIR_STRUCT' in spec_args
+ save_in_var = 'RESULT' in spec_args
+ targets = []
+
+ for source in flat_args:
+ rel_path = ''
+ path_list = source.split(os.sep)
+ filename = path_list[-1]
+ if keep_struct:
+ if path_list[:-1]:
+ rel_path = os.path.join(*path_list[:-1])
source_path = os.path.join(from_dir, rel_path, filename)
target_path = os.path.join(dest_dir, rel_path, filename)
- if save_in_var:
+ if save_in_var:
targets.append(target_path)
unit.oncopy_file([source_path, target_path])
- if save_in_var:
- unit.set([spec_args["RESULT"][0], " ".join(targets)])
+ if save_in_var:
+ unit.set([spec_args["RESULT"][0], " ".join(targets)])
diff --git a/build/plugins/create_init_py.py b/build/plugins/create_init_py.py
index e41a4d22df..ca71928c31 100644
--- a/build/plugins/create_init_py.py
+++ b/build/plugins/create_init_py.py
@@ -1,15 +1,15 @@
-import os
-
-from _common import sort_by_keywords
+import os
+from _common import sort_by_keywords
+
def oncreate_init_py_structure(unit, *args):
- if unit.get('DISTBUILD'):
- return
+ if unit.get('DISTBUILD'):
+ return
target_dir = unit.get('PY_PROTOS_FOR_DIR')
path_list = target_dir.split(os.path.sep)[1:]
inits = [os.path.join("${ARCADIA_BUILD_ROOT}", '__init__.py')]
for i in range(1, len(path_list) + 1):
inits.append(os.path.join("${ARCADIA_BUILD_ROOT}", os.path.join(*path_list[0:i]), '__init__.py'))
unit.ontouch(inits)
-
+
diff --git a/build/plugins/files.py b/build/plugins/files.py
index 78a6fe6169..0381aa792e 100644
--- a/build/plugins/files.py
+++ b/build/plugins/files.py
@@ -1,5 +1,5 @@
-def onfiles(unit, *args):
- args = list(args)
- for arg in args:
- if not arg.startswith('${ARCADIA_BUILD_ROOT}'):
- unit.oncopy_file([arg, arg])
+def onfiles(unit, *args):
+ args = list(args)
+ for arg in args:
+ if not arg.startswith('${ARCADIA_BUILD_ROOT}'):
+ unit.oncopy_file([arg, arg])
diff --git a/build/plugins/print_module_type.py b/build/plugins/print_module_type.py
index cc54c55675..e19b38b165 100644
--- a/build/plugins/print_module_type.py
+++ b/build/plugins/print_module_type.py
@@ -1,5 +1,5 @@
-def onprint_module_type(unit, *args):
- filepath = unit.get('KIWI_OUT_FILE')
+def onprint_module_type(unit, *args):
+ filepath = unit.get('KIWI_OUT_FILE')
if len(args) >= 2 and filepath is not None:
- with open(filepath, "a") as file_handler:
- print >>file_handler, "{0} {1} {2}".format(args[0], args[1], unit.path())
+ with open(filepath, "a") as file_handler:
+ print >>file_handler, "{0} {1} {2}".format(args[0], args[1], unit.path())
diff --git a/build/plugins/rodata.py b/build/plugins/rodata.py
index 3ecb0f9a83..4c5f5749b4 100644
--- a/build/plugins/rodata.py
+++ b/build/plugins/rodata.py
@@ -127,13 +127,13 @@ class RODataCXX(iw.CustomCommand):
return 'RD', self._path, 'light-green'
def input(self):
- return common.make_tuples([self._path])
+ return common.make_tuples([self._path])
def main_out(self):
return common.tobuilddir(common.stripext(self._path)) + '.cpp'
def output(self):
- return common.make_tuples([self.main_out()])
+ return common.make_tuples([self.main_out()])
def run(self, extra_args, binary):
with open(self.resolve_path(self.main_out()), 'w') as f:
@@ -164,5 +164,5 @@ def ro_data(path, unit):
return ROData(path, unit)
-def init():
- iw.addrule('rodata', ro_data)
+def init():
+ iw.addrule('rodata', ro_data)
diff --git a/build/plugins/split_codegen.py b/build/plugins/split_codegen.py
index f1e60bc142..4fa7b833f2 100644
--- a/build/plugins/split_codegen.py
+++ b/build/plugins/split_codegen.py
@@ -1,12 +1,12 @@
-from _common import sort_by_keywords
-
+from _common import sort_by_keywords
+
# This hard-coded many times in CppParts in various codegens
_DEFAULT_CPP_PARTS = 20
# See TCodegenParams::MethodStream usage in factor codegen
_ADDITIONAL_STREAM_COUNT = 5
-def onsplit_codegen(unit, *args):
+def onsplit_codegen(unit, *args):
'''
@usage: SPLIT_CODEGEN(tool prefix opts... [OUT_NUM num] [OUTPUT_INCLUDES output_includes...])
@@ -16,28 +16,28 @@ def onsplit_codegen(unit, *args):
1. OUT_NUM <the number of generated Prefix.N.cpp default 25 (N varies from 0 to 24)>
2. OUTPUT_INCLUDES <path to files that will be included in generalnyj of macro files>
'''
- keywords = {"OUT_NUM": 1}
- flat_args, spec_args = sort_by_keywords(keywords, args)
-
+ keywords = {"OUT_NUM": 1}
+ flat_args, spec_args = sort_by_keywords(keywords, args)
+
num_outputs = _DEFAULT_CPP_PARTS + _ADDITIONAL_STREAM_COUNT
if "OUT_NUM" in spec_args:
num_outputs = int(spec_args["OUT_NUM"][0])
- tool = flat_args[0]
- prefix = flat_args[1]
-
- cmd = [tool, prefix, 'OUT']
- for num in range(num_outputs):
- cmd.append('{}.{}.cpp'.format(prefix, num))
-
+ tool = flat_args[0]
+ prefix = flat_args[1]
+
+ cmd = [tool, prefix, 'OUT']
+ for num in range(num_outputs):
+ cmd.append('{}.{}.cpp'.format(prefix, num))
+
cpp_parts = int(num_outputs) - _ADDITIONAL_STREAM_COUNT
cpp_parts_args = ['--cpp-parts', str(cpp_parts)]
- if len(flat_args) > 2:
- if flat_args[2] != 'OUTPUT_INCLUDES':
- cmd.append('OPTS')
+ if len(flat_args) > 2:
+ if flat_args[2] != 'OUTPUT_INCLUDES':
+ cmd.append('OPTS')
cmd += cpp_parts_args + flat_args[2:]
else:
cmd += ['OPTS'] + cpp_parts_args
-
+
unit.on_split_codegen_base(cmd)
diff --git a/build/plugins/swig.py b/build/plugins/swig.py
index 32a37204a6..7b7e3e549d 100644
--- a/build/plugins/swig.py
+++ b/build/plugins/swig.py
@@ -1,31 +1,31 @@
-import os
+import os
import posixpath
import re
-
-import _import_wrapper as iw
-import _common as common
-
-
+
+import _import_wrapper as iw
+import _common as common
+
+
def init():
iw.addrule('swg', Swig)
-class Swig(iw.CustomCommand):
- def __init__(self, path, unit):
+class Swig(iw.CustomCommand):
+ def __init__(self, path, unit):
self._tool = unit.get('SWIG_TOOL')
self._library_dir = unit.get('SWIG_LIBRARY') or 'contrib/tools/swig/Lib'
self._local_swig = unit.get('USE_LOCAL_SWIG') == "yes"
- self._path = path
+ self._path = path
self._flags = ['-cpperraswarn']
-
- self._bindir = common.tobuilddir(unit.path())
- self._input_name = common.stripext(os.path.basename(self._path))
-
- relpath = os.path.relpath(os.path.dirname(self._path), unit.path())
-
+
+ self._bindir = common.tobuilddir(unit.path())
+ self._input_name = common.stripext(os.path.basename(self._path))
+
+ relpath = os.path.relpath(os.path.dirname(self._path), unit.path())
+
self._swig_lang = unit.get('SWIG_LANG')
-
+
if self._swig_lang != 'jni_java':
self._main_out = os.path.join(
self._bindir,
@@ -50,57 +50,57 @@ class Swig(iw.CustomCommand):
]
self._incl_dirs = ['$S', '$B'] + [posixpath.join('$S', d) for d in incl_dirs]
- modname = unit.get('REALPRJNAME')
- self._flags.extend(['-module', modname])
-
+ modname = unit.get('REALPRJNAME')
+ self._flags.extend(['-module', modname])
+
if not self._local_swig:
unit.onaddincl(incl_dirs)
-
+
if self._swig_lang == 'python':
- self._out_name = modname + '.py'
- self._flags.extend(['-interface', unit.get('MODULE_PREFIX') + modname])
-
+ self._out_name = modname + '.py'
+ self._flags.extend(['-interface', unit.get('MODULE_PREFIX') + modname])
+
if self._swig_lang == 'perl':
- self._out_name = modname + '.pm'
- self._flags.append('-shadow')
+ self._out_name = modname + '.pm'
+ self._flags.append('-shadow')
unit.onpeerdir(['build/platform/perl'])
-
+
if self._swig_lang in ['jni_cpp', 'java']:
self._out_header = os.path.splitext(self._main_out)[0] + '.h'
if (not unit.get('USE_SYSTEM_JDK')) and (unit.get('OS_ANDROID') != "yes"):
unit.onpeerdir(['contrib/libs/jdk'])
-
+
self._package = 'ru.yandex.' + os.path.dirname(self._path).replace('$S/', '').replace('$B/', '').replace('/', '.').replace('-', '_')
if self._swig_lang in ['jni_java', 'java']:
self._out_name = os.path.splitext(os.path.basename(self._path))[0] + '.jsrc'
elif self._swig_lang != 'jni_cpp':
self._flags.append('-' + self._swig_lang)
-
- def descr(self):
- return 'SW', self._path, 'yellow'
-
- def flags(self):
- return self._flags
-
- def tools(self):
+
+ def descr(self):
+ return 'SW', self._path, 'yellow'
+
+ def flags(self):
+ return self._flags
+
+ def tools(self):
return ['contrib/tools/swig'] if not self._tool else []
-
- def input(self):
- return [
- (self._path, [])
- ]
-
- def output(self):
+
+ def input(self):
+ return [
+ (self._path, [])
+ ]
+
+ def output(self):
if self._swig_lang == 'jni_java':
return [(common.join_intl_paths(self._bindir, self._out_name), [])]
elif self._swig_lang == 'jni_cpp':
return [(self._main_out, []), (self._out_header, [])]
- return [
- (self._main_out, []),
+ return [
+ (self._main_out, []),
(common.join_intl_paths(self._bindir, self._out_name), (['noauto', 'add_to_outs'] if self._swig_lang != 'java' else [])),
] + ([(self._out_header, [])] if self._swig_lang == 'java' else [])
-
+
def output_includes(self):
return [(self._out_header, [])] if self._swig_lang in ['java', 'jni_cpp'] else []
@@ -108,19 +108,19 @@ class Swig(iw.CustomCommand):
if self._local_swig:
binary = self._tool
return self.do_run_java(binary, self._path) if self._swig_lang in ['java', 'jni_cpp', 'jni_java'] else self.do_run(binary, self._path)
-
+
def _incl_flags(self):
return ['-I' + self.resolve_path(x) for x in self._incl_dirs]
- def do_run(self, binary, path):
+ def do_run(self, binary, path):
self.call([binary] + self._flags + [
'-o', self.resolve_path(common.get(self.output, 0)),
'-outdir', self.resolve_path(self._bindir)
] + self._incl_flags() + [self.resolve_path(path)])
-
+
def do_run_java(self, binary, path):
import tarfile
-
+
outdir = self.resolve_path(self._bindir)
if self._swig_lang != 'jni_cpp':
java_srcs_dir = os.path.join(outdir, self._package.replace('.', '/'))
diff --git a/build/plugins/xsyn.py b/build/plugins/xsyn.py
index ab7c1639db..1e0d9ba0bc 100644
--- a/build/plugins/xsyn.py
+++ b/build/plugins/xsyn.py
@@ -1,34 +1,34 @@
-import _import_wrapper as iw
-import _common as common
-
-
-class Xsyn(iw.CustomCommand):
-
- def __init__(self, path, unit):
- self._path = path
-
- def descr(self):
- return 'XN', self._path, 'yellow'
-
- def tools(self):
- return []
-
- def input(self):
- return common.make_tuples([
+import _import_wrapper as iw
+import _common as common
+
+
+class Xsyn(iw.CustomCommand):
+
+ def __init__(self, path, unit):
+ self._path = path
+
+ def descr(self):
+ return 'XN', self._path, 'yellow'
+
+ def tools(self):
+ return []
+
+ def input(self):
+ return common.make_tuples([
'$S/library/cpp/xml/parslib/xsyn2ragel.py',
- self._path,
+ self._path,
'$S/library/cpp/xml/parslib/xmlpars.xh'
- ])
-
- def output(self):
- return common.make_tuples([
- common.tobuilddir(self._path + '.h.rl5')
- ])
-
+ ])
+
+ def output(self):
+ return common.make_tuples([
+ common.tobuilddir(self._path + '.h.rl5')
+ ])
+
def run(self, extra_args, interpeter):
- self.call(interpeter + [self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)),
- self.resolve_path(common.get(self.input, 2)), 'dontuse'], stdout=common.get(self.output, 0))
-
-
-def init():
- iw.addrule('xsyn', Xsyn)
+ self.call(interpeter + [self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)),
+ self.resolve_path(common.get(self.input, 2)), 'dontuse'], stdout=common.get(self.output, 0))
+
+
+def init():
+ iw.addrule('xsyn', Xsyn)
diff --git a/build/plugins/ya.make b/build/plugins/ya.make
index 4ad5f5988e..82733a5516 100644
--- a/build/plugins/ya.make
+++ b/build/plugins/ya.make
@@ -1,7 +1,7 @@
OWNER(g:ymake)
-
+
PY2_LIBRARY()
-
+
PY_SRCS(
code_generator.py
ssqls.py
@@ -16,7 +16,7 @@ PY_SRCS(
PEERDIR(build/plugins/lib)
-END()
+END()
RECURSE(
tests
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index 8970837f0f..820a4d6bb9 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -9,12 +9,12 @@ import _common
import lib._metric_resolvers as mr
import _test_const as consts
import _requirements as reqs
-import StringIO
+import StringIO
import subprocess
import collections
import ymake
-
+
MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/'
MDS_SHEME = 'mds'
@@ -27,14 +27,14 @@ CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))')
VALID_NETWORK_REQUIREMENTS = ("full", "restricted")
VALID_DNS_REQUIREMENTS = ("default", "local", "dns64")
-BLOCK_SEPARATOR = '============================================================='
+BLOCK_SEPARATOR = '============================================================='
SPLIT_FACTOR_MAX_VALUE = 1000
SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250
PARTITION_MODS = ('SEQUENTIAL', 'MODULO')
DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml"
DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json"
PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json"
-
+
tidy_config_map = None
@@ -42,14 +42,14 @@ def ontest_data(unit, *args):
ymake.report_configure_error("TEST_DATA is removed in favour of DATA")
-def save_in_file(filepath, data):
- if filepath:
- with open(filepath, 'a') as file_handler:
- if os.stat(filepath).st_size == 0:
- print >>file_handler, BLOCK_SEPARATOR
- print >> file_handler, data
-
-
+def save_in_file(filepath, data):
+ if filepath:
+ with open(filepath, 'a') as file_handler:
+ if os.stat(filepath).st_size == 0:
+ print >>file_handler, BLOCK_SEPARATOR
+ print >> file_handler, data
+
+
def prepare_recipes(data):
data = data.replace('"USE_RECIPE_DELIM"', "\n")
data = data.replace("$TEST_RECIPES_VALUE", "")
@@ -321,15 +321,15 @@ def dump_test(unit, kw):
ymake.report_configure_error(e)
if valid_kw is None:
return None
- string_handler = StringIO.StringIO()
+ string_handler = StringIO.StringIO()
for k, v in valid_kw.iteritems():
- print >>string_handler, k + ': ' + v
+ print >>string_handler, k + ': ' + v
print >>string_handler, BLOCK_SEPARATOR
- data = string_handler.getvalue()
- string_handler.close()
- return data
-
-
+ data = string_handler.getvalue()
+ string_handler.close()
+ return data
+
+
def serialize_list(lst):
lst = filter(None, lst)
return '\"' + ';'.join(lst) + '\"' if lst else ''
@@ -414,8 +414,8 @@ def get_project_tidy_config(unit):
def onadd_ytest(unit, *args):
- keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
- "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
+ keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1,
+ "FORK_SUBTESTS": 0, "FORK_TESTS": 0}
flat_args, spec_args = _common.sort_by_keywords(keywords, args)
test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED'))
@@ -463,14 +463,14 @@ def onadd_ytest(unit, *args):
unit.set(["DEFAULT_TIDY_CONFIG", default_config_path])
unit.set(["PROJECT_TIDY_CONFIG", project_config_path])
- fork_mode = []
- if 'FORK_SUBTESTS' in spec_args:
- fork_mode.append('subtests')
- if 'FORK_TESTS' in spec_args:
- fork_mode.append('tests')
- fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
- fork_mode = ' '.join(fork_mode) if fork_mode else ''
-
+ fork_mode = []
+ if 'FORK_SUBTESTS' in spec_args:
+ fork_mode.append('subtests')
+ if 'FORK_TESTS' in spec_args:
+ fork_mode.append('tests')
+ fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split()
+ fork_mode = ' '.join(fork_mode) if fork_mode else ''
+
unit_path = get_norm_unit_path(unit)
test_record = {
@@ -506,7 +506,7 @@ def onadd_ytest(unit, *args):
'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL',
'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '',
}
-
+
if flat_args[1] == "go.bench":
if "ya:run_go_benchmark" not in test_record["TAG"]:
return
@@ -522,8 +522,8 @@ def onadd_ytest(unit, *args):
if data:
unit.set_property(["DART_DATA", data])
save_in_file(unit.get('TEST_DART_OUT_FILE'), data)
-
-
+
+
def java_srcdirs_to_data(unit, var):
extra_data = []
for srcdir in (unit.get(var) or '').replace('$' + var, '').split():
diff --git a/build/plugins/ytest2.py b/build/plugins/ytest2.py
index 0a34263c35..08c9d72f86 100644
--- a/build/plugins/ytest2.py
+++ b/build/plugins/ytest2.py
@@ -1,54 +1,54 @@
-import os
-import _common
-
-
-def dir_stmts(unit, dir):
- unit.onpeerdir(dir)
- unit.onsrcdir(os.sep.join([dir, 'tests']))
-
-
-def pytest_base(unit, args):
- related_prj_dir = args[0]
- related_prj_name = args[1]
- dir_stmts(unit, related_prj_dir)
- ytest_base(unit, related_prj_dir, related_prj_name, args[2:])
- unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test'])
-
-
-def ytest_base(unit, related_prj_dir, related_prj_name, args):
- keywords = {"DEPENDS": -1, "DATA": -1}
- flat_args, spec_args = _common.sort_by_keywords(keywords, args)
- unit.set(['TEST-NAME', os.path.basename(flat_args[0])])
- unit.set(['SCRIPT-REL-PATH', flat_args[1]])
- unit.set(['SOURCE-FOLDER-PATH', related_prj_dir])
- unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)])
- unit.set(['TESTED-BINARY-PATH', flat_args[0]])
-
- custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
- unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
- data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ')
+import os
+import _common
+
+
+def dir_stmts(unit, dir):
+ unit.onpeerdir(dir)
+ unit.onsrcdir(os.sep.join([dir, 'tests']))
+
+
+def pytest_base(unit, args):
+ related_prj_dir = args[0]
+ related_prj_name = args[1]
+ dir_stmts(unit, related_prj_dir)
+ ytest_base(unit, related_prj_dir, related_prj_name, args[2:])
+ unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test'])
+
+
+def ytest_base(unit, related_prj_dir, related_prj_name, args):
+ keywords = {"DEPENDS": -1, "DATA": -1}
+ flat_args, spec_args = _common.sort_by_keywords(keywords, args)
+ unit.set(['TEST-NAME', os.path.basename(flat_args[0])])
+ unit.set(['SCRIPT-REL-PATH', flat_args[1]])
+ unit.set(['SOURCE-FOLDER-PATH', related_prj_dir])
+ unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)])
+ unit.set(['TESTED-BINARY-PATH', flat_args[0]])
+
+ custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else ''
+ unit.set(['CUSTOM-DEPENDENCIES', custom_deps])
+ data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ')
data_lst.sort()
- data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
- unit.set(['TEST-DATA', data])
-
+ data = '\"' + ';'.join(data_lst) + '\"' if data_lst else ''
+ unit.set(['TEST-DATA', data])
+
related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR']
- related_dirs_value = []
- for rel in related_dirs_list:
- related_dirs_value.extend(['--test-related-path', rel])
- unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)])
- unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))])
-
-
-def on_unittest(unit, *args):
- related_prj_name = args[0]
- related_prj_dir = args[1][3:]
- unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}'])
- ytest_base(unit, related_prj_dir, related_prj_name, args)
-
-
-def on_ytest(unit, *args):
- pytest_base(unit, args)
-
-
-def on_py_test(unit, *args):
- pytest_base(unit, args)
+ related_dirs_value = []
+ for rel in related_dirs_list:
+ related_dirs_value.extend(['--test-related-path', rel])
+ unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)])
+ unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))])
+
+
+def on_unittest(unit, *args):
+ related_prj_name = args[0]
+ related_prj_dir = args[1][3:]
+ unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}'])
+ ytest_base(unit, related_prj_dir, related_prj_name, args)
+
+
+def on_ytest(unit, *args):
+ pytest_base(unit, args)
+
+
+def on_py_test(unit, *args):
+ pytest_base(unit, args)