diff options
author | kakabba <kakabba@yandex-team.ru> | 2022-02-10 16:46:04 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:46:04 +0300 |
commit | 9c914f41ba5e9f9365f404e892197553ac23809e (patch) | |
tree | 1a2c5ffcf89eb53ecd79dbc9bc0a195c27404d0c | |
parent | c8e3995898c443e78266f7420aac5fb3da15d413 (diff) | |
download | ydb-9c914f41ba5e9f9365f404e892197553ac23809e.tar.gz |
Restoring authorship annotation for <kakabba@yandex-team.ru>. Commit 2 of 2.
85 files changed, 1054 insertions, 1054 deletions
diff --git a/build/platform/linux_sdk/ya.make b/build/platform/linux_sdk/ya.make index 2fd5abedf6..add74eee9e 100644 --- a/build/platform/linux_sdk/ya.make +++ b/build/platform/linux_sdk/ya.make @@ -40,14 +40,14 @@ ELSEIF (ARCH_PPC64LE) ELSE() MESSAGE(FATAL_ERROR "There is no ${OS_SDK} SDK for PPC64LE") ENDIF() -ELSEIF (ARCH_ARM7) +ELSEIF (ARCH_ARM7) IF (ARM7_FLOAT_ABI == "hard" AND OS_SDK == "ubuntu-16") - DECLARE_EXTERNAL_RESOURCE(OS_SDK_ROOT sbr:1323200692) + DECLARE_EXTERNAL_RESOURCE(OS_SDK_ROOT sbr:1323200692) ELSEIF (ARM7_FLOAT_ABI == "softfp" AND OS_SDK == "ubuntu-18") DECLARE_EXTERNAL_RESOURCE(OS_SDK_ROOT sbr:2402287545) - ELSE() + ELSE() MESSAGE(FATAL_ERROR "There is no ${OS_SDK} SDK for ARMv7 32 bit (float ABI: ${ARM7_FLOAT_ABI})") - ENDIF() + ENDIF() ELSE() MESSAGE(FATAL_ERROR "Unexpected OS_SDK value: ${OS_SDK}") ENDIF() diff --git a/build/platform/yocto_sdk/yocto_sdk/ya.make b/build/platform/yocto_sdk/yocto_sdk/ya.make index d93906b621..37d1727168 100644 --- a/build/platform/yocto_sdk/yocto_sdk/ya.make +++ b/build/platform/yocto_sdk/yocto_sdk/ya.make @@ -1,11 +1,11 @@ -RESOURCES_LIBRARY() - -OWNER(heretic) - -DECLARE_EXTERNAL_RESOURCE(YOCTO_SDK_ROOT sbr:882588946) -CFLAGS( - GLOBAL -cxx-isystem GLOBAL $YOCTO_SDK_ROOT_RESOURCE_GLOBAL/usr/include/c++/5.3.0/arm-poky-linux-gnueabi - GLOBAL -cxx-isystem GLOBAL $YOCTO_SDK_ROOT_RESOURCE_GLOBAL/usr/include/c++/5.3.0 -) - -END() +RESOURCES_LIBRARY() + +OWNER(heretic) + +DECLARE_EXTERNAL_RESOURCE(YOCTO_SDK_ROOT sbr:882588946) +CFLAGS( + GLOBAL -cxx-isystem GLOBAL $YOCTO_SDK_ROOT_RESOURCE_GLOBAL/usr/include/c++/5.3.0/arm-poky-linux-gnueabi + GLOBAL -cxx-isystem GLOBAL $YOCTO_SDK_ROOT_RESOURCE_GLOBAL/usr/include/c++/5.3.0 +) + +END() diff --git a/build/plugins/_common.py b/build/plugins/_common.py index c85dca1d4f..2f831a94db 100644 --- a/build/plugins/_common.py +++ b/build/plugins/_common.py @@ -68,51 +68,51 @@ def before(s, ss): return s[:p] - -def sort_by_keywords(keywords, args): - flat = [] - res = {} - - cur_key = None - limit = -1 - for arg in args: - if arg in keywords: - limit = keywords[arg] - if limit == 0: - res[arg] = True - cur_key = None - limit = -1 - else: - cur_key = arg - continue - if limit == 0: - cur_key = None - limit = -1 - if cur_key: - if cur_key in res: - res[cur_key].append(arg) - else: - res[cur_key] = [arg] - limit -= 1 - else: - flat.append(arg) - return (flat, res) - - -def resolve_common_const(path): - if path.startswith('${ARCADIA_ROOT}'): - return path.replace('${ARCADIA_ROOT}', '$S', 1) - if path.startswith('${ARCADIA_BUILD_ROOT}'): - return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1) - return path - - -def resolve_to_abs_path(path, source_root, build_root): - if path.startswith('$S') and source_root is not None: - return path.replace('$S', source_root, 1) - if path.startswith('$B') and build_root is not None: - return path.replace('$B', build_root, 1) - return path + +def sort_by_keywords(keywords, args): + flat = [] + res = {} + + cur_key = None + limit = -1 + for arg in args: + if arg in keywords: + limit = keywords[arg] + if limit == 0: + res[arg] = True + cur_key = None + limit = -1 + else: + cur_key = arg + continue + if limit == 0: + cur_key = None + limit = -1 + if cur_key: + if cur_key in res: + res[cur_key].append(arg) + else: + res[cur_key] = [arg] + limit -= 1 + else: + flat.append(arg) + return (flat, res) + + +def resolve_common_const(path): + if path.startswith('${ARCADIA_ROOT}'): + return path.replace('${ARCADIA_ROOT}', '$S', 1) + if path.startswith('${ARCADIA_BUILD_ROOT}'): + return path.replace('${ARCADIA_BUILD_ROOT}', '$B', 1) + return path + + +def resolve_to_abs_path(path, source_root, build_root): + if path.startswith('$S') and source_root is not None: + return path.replace('$S', source_root, 1) + if path.startswith('$B') and build_root is not None: + return path.replace('$B', build_root, 1) + return path def resolve_to_ymake_path(path): @@ -121,20 +121,20 @@ def resolve_to_ymake_path(path): def join_intl_paths(*args): return '/'.join(args) - - -def get(fun, num): - return fun()[num][0] - - -def make_tuples(arg_list): - def tpl(): - for x in arg_list: - yield (x, []) - - return list(tpl()) - - + + +def get(fun, num): + return fun()[num][0] + + +def make_tuples(arg_list): + def tpl(): + for x in arg_list: + yield (x, []) + + return list(tpl()) + + def resolve_includes(unit, src, paths): return unit.resolve_include([src] + paths) if paths else [] diff --git a/build/plugins/_custom_command.py b/build/plugins/_custom_command.py index 5330d6c0bb..9692214b22 100644 --- a/build/plugins/_custom_command.py +++ b/build/plugins/_custom_command.py @@ -1,65 +1,65 @@ -import subprocess -import sys +import subprocess +import sys import os -import _common as common - - -class CustomCommand(object): - def __setstate__(self, sdict): - if isinstance(sdict, tuple): - for elem in sdict: - if isinstance(elem, dict): - for key in elem: - setattr(self, key, elem[key]) - - self._source_root = None - self._build_root = None - - def set_source_root(self, path): - self._source_root = path - - def set_build_root(self, path): - self._build_root = path - - def call(self, args, **kwargs): - cwd = self._get_call_specs('cwd', kwargs) - stdout_path = self._get_call_specs('stdout', kwargs) - - resolved_args = [] - - for arg in args: +import _common as common + + +class CustomCommand(object): + def __setstate__(self, sdict): + if isinstance(sdict, tuple): + for elem in sdict: + if isinstance(elem, dict): + for key in elem: + setattr(self, key, elem[key]) + + self._source_root = None + self._build_root = None + + def set_source_root(self, path): + self._source_root = path + + def set_build_root(self, path): + self._build_root = path + + def call(self, args, **kwargs): + cwd = self._get_call_specs('cwd', kwargs) + stdout_path = self._get_call_specs('stdout', kwargs) + + resolved_args = [] + + for arg in args: resolved_args.append(self.resolve_path(arg)) - - if stdout_path: - stdout = open(stdout_path, 'wb') - else: - stdout = None - + + if stdout_path: + stdout = open(stdout_path, 'wb') + else: + stdout = None + env = os.environ.copy() env['ASAN_OPTIONS'] = 'detect_leaks=0' - + rc = subprocess.call(resolved_args, cwd=cwd, stdout=stdout, env=env) - if stdout: - stdout.close() - if rc: - sys.exit(rc) - + if stdout: + stdout.close() + if rc: + sys.exit(rc) + def resolve_path(self, path): return common.resolve_to_abs_path(path, self._source_root, self._build_root) - - def _get_call_specs(self, name, kwargs): - if isinstance(kwargs, dict): - param = kwargs.get(name, None) - if param: - return self.resolve_path(param) - return None - - -def addrule(*unused): - pass - - + + def _get_call_specs(self, name, kwargs): + if isinstance(kwargs, dict): + param = kwargs.get(name, None) + if param: + return self.resolve_path(param) + return None + + +def addrule(*unused): + pass + + def addparser(*unused, **kwargs): - pass + pass diff --git a/build/plugins/_import_wrapper.py b/build/plugins/_import_wrapper.py index f21bb1dc11..883f662314 100644 --- a/build/plugins/_import_wrapper.py +++ b/build/plugins/_import_wrapper.py @@ -1,9 +1,9 @@ -try: +try: from ymake import CustomCommand as RealCustomCommand - from ymake import addrule - from ymake import addparser + from ymake import addrule + from ymake import addparser from ymake import subst - + class CustomCommand(RealCustomCommand): def __init__(self, *args, **kwargs): RealCustomCommand.__init__(*args, **kwargs) @@ -11,7 +11,7 @@ try: def resolve_path(self, path): return subst(path) -except ImportError: +except ImportError: from _custom_command import CustomCommand # noqa from _custom_command import addrule # noqa from _custom_command import addparser # noqa diff --git a/build/plugins/_unpickler.py b/build/plugins/_unpickler.py index 9e6249c2f7..e01e7b3118 100644 --- a/build/plugins/_unpickler.py +++ b/build/plugins/_unpickler.py @@ -2,41 +2,41 @@ import sys sys.dont_write_bytecode = True -import argparse -import base64 +import argparse +import base64 try: import cPickle as pickle except Exception: import pickle - + import _common as common - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--data', help='pickled object of TCustomCommand class', required=True) - parser.add_argument('--src-root', help='$S real path', required=True) - parser.add_argument('--build-root', help='$B real path', required=True) - parser.add_argument('--tools', help='binaries needed by command', required=True, nargs='+') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--data', help='pickled object of TCustomCommand class', required=True) + parser.add_argument('--src-root', help='$S real path', required=True) + parser.add_argument('--build-root', help='$B real path', required=True) + parser.add_argument('--tools', help='binaries needed by command', required=True, nargs='+') args, unknown_args = parser.parse_known_args() - - encoded_cmd = args.data - src_root = args.src_root - build_root = args.build_root - tools = args.tools - - assert (int(tools[0]) == len(tools[1:])), "tools quantity != tools number!" - + + encoded_cmd = args.data + src_root = args.src_root + build_root = args.build_root + tools = args.tools + + assert (int(tools[0]) == len(tools[1:])), "tools quantity != tools number!" + cmd_object = pickle.loads(base64.b64decode(encoded_cmd)) - - cmd_object.set_source_root(src_root) - cmd_object.set_build_root(build_root) - - if len(tools[1:]) == 0: + + cmd_object.set_source_root(src_root) + cmd_object.set_build_root(build_root) + + if len(tools[1:]) == 0: cmd_object.run(unknown_args, common.get_interpreter_path()) - else: + else: cmd_object.run(unknown_args, *tools[1:]) - - -if __name__ == '__main__': - main() + + +if __name__ == '__main__': + main() diff --git a/build/plugins/_xsyn_includes.py b/build/plugins/_xsyn_includes.py index 5077419912..8d33cea2f0 100644 --- a/build/plugins/_xsyn_includes.py +++ b/build/plugins/_xsyn_includes.py @@ -1,4 +1,4 @@ -def get_include_callback(): +def get_include_callback(): """ .. function: get_include_callback returns function that processes each DOM element to get xsyn include from it, and it's aware of directory with all the xsyns. @@ -55,6 +55,6 @@ def process_xsyn(filepath, on_element): return res -def get_all_includes(filepath): - callback = get_include_callback() - return process_xsyn(filepath, callback) +def get_all_includes(filepath): + callback = get_include_callback() + return process_xsyn(filepath, callback) diff --git a/build/plugins/build_mn_files.py b/build/plugins/build_mn_files.py index a2f5582417..4da76f1852 100644 --- a/build/plugins/build_mn_files.py +++ b/build/plugins/build_mn_files.py @@ -7,7 +7,7 @@ def on_build_mns_files(unit, *args): ranking_suffix = '' check = '' index = 0 - fml_unused_tool = '' + fml_unused_tool = '' while index < len(args): if args[index] == 'NAME': index += 1 @@ -17,7 +17,7 @@ def on_build_mns_files(unit, *args): ranking_suffix = args[index] elif args[index] == 'CHECK': check = 'CHECK' - fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL' + fml_unused_tool = unit.get('FML_UNUSED_TOOL') or '$FML_UNUSED_TOOL' else: files.append(args[index]) index += 1 diff --git a/build/plugins/cp.py b/build/plugins/cp.py index 2525177e1b..5c663a3bdd 100644 --- a/build/plugins/cp.py +++ b/build/plugins/cp.py @@ -1,30 +1,30 @@ -import os +import os -from _common import sort_by_keywords - - -def oncopy(unit, *args): +from _common import sort_by_keywords + + +def oncopy(unit, *args): keywords = {'RESULT': 1, 'KEEP_DIR_STRUCT': 0, 'DESTINATION': 1, 'FROM': 1} - - flat_args, spec_args = sort_by_keywords(keywords, args) - - dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else '' + + flat_args, spec_args = sort_by_keywords(keywords, args) + + dest_dir = spec_args['DESTINATION'][0] if 'DESTINATION' in spec_args else '' from_dir = spec_args['FROM'][0] if 'FROM' in spec_args else '' - keep_struct = 'KEEP_DIR_STRUCT' in spec_args - save_in_var = 'RESULT' in spec_args - targets = [] - - for source in flat_args: - rel_path = '' - path_list = source.split(os.sep) - filename = path_list[-1] - if keep_struct: - if path_list[:-1]: - rel_path = os.path.join(*path_list[:-1]) + keep_struct = 'KEEP_DIR_STRUCT' in spec_args + save_in_var = 'RESULT' in spec_args + targets = [] + + for source in flat_args: + rel_path = '' + path_list = source.split(os.sep) + filename = path_list[-1] + if keep_struct: + if path_list[:-1]: + rel_path = os.path.join(*path_list[:-1]) source_path = os.path.join(from_dir, rel_path, filename) target_path = os.path.join(dest_dir, rel_path, filename) - if save_in_var: + if save_in_var: targets.append(target_path) unit.oncopy_file([source_path, target_path]) - if save_in_var: - unit.set([spec_args["RESULT"][0], " ".join(targets)]) + if save_in_var: + unit.set([spec_args["RESULT"][0], " ".join(targets)]) diff --git a/build/plugins/create_init_py.py b/build/plugins/create_init_py.py index ca71928c31..e41a4d22df 100644 --- a/build/plugins/create_init_py.py +++ b/build/plugins/create_init_py.py @@ -1,15 +1,15 @@ -import os +import os + +from _common import sort_by_keywords -from _common import sort_by_keywords - def oncreate_init_py_structure(unit, *args): - if unit.get('DISTBUILD'): - return + if unit.get('DISTBUILD'): + return target_dir = unit.get('PY_PROTOS_FOR_DIR') path_list = target_dir.split(os.path.sep)[1:] inits = [os.path.join("${ARCADIA_BUILD_ROOT}", '__init__.py')] for i in range(1, len(path_list) + 1): inits.append(os.path.join("${ARCADIA_BUILD_ROOT}", os.path.join(*path_list[0:i]), '__init__.py')) unit.ontouch(inits) - + diff --git a/build/plugins/files.py b/build/plugins/files.py index 0381aa792e..78a6fe6169 100644 --- a/build/plugins/files.py +++ b/build/plugins/files.py @@ -1,5 +1,5 @@ -def onfiles(unit, *args): - args = list(args) - for arg in args: - if not arg.startswith('${ARCADIA_BUILD_ROOT}'): - unit.oncopy_file([arg, arg]) +def onfiles(unit, *args): + args = list(args) + for arg in args: + if not arg.startswith('${ARCADIA_BUILD_ROOT}'): + unit.oncopy_file([arg, arg]) diff --git a/build/plugins/print_module_type.py b/build/plugins/print_module_type.py index e19b38b165..cc54c55675 100644 --- a/build/plugins/print_module_type.py +++ b/build/plugins/print_module_type.py @@ -1,5 +1,5 @@ -def onprint_module_type(unit, *args): - filepath = unit.get('KIWI_OUT_FILE') +def onprint_module_type(unit, *args): + filepath = unit.get('KIWI_OUT_FILE') if len(args) >= 2 and filepath is not None: - with open(filepath, "a") as file_handler: - print >>file_handler, "{0} {1} {2}".format(args[0], args[1], unit.path()) + with open(filepath, "a") as file_handler: + print >>file_handler, "{0} {1} {2}".format(args[0], args[1], unit.path()) diff --git a/build/plugins/rodata.py b/build/plugins/rodata.py index 4c5f5749b4..3ecb0f9a83 100644 --- a/build/plugins/rodata.py +++ b/build/plugins/rodata.py @@ -127,13 +127,13 @@ class RODataCXX(iw.CustomCommand): return 'RD', self._path, 'light-green' def input(self): - return common.make_tuples([self._path]) + return common.make_tuples([self._path]) def main_out(self): return common.tobuilddir(common.stripext(self._path)) + '.cpp' def output(self): - return common.make_tuples([self.main_out()]) + return common.make_tuples([self.main_out()]) def run(self, extra_args, binary): with open(self.resolve_path(self.main_out()), 'w') as f: @@ -164,5 +164,5 @@ def ro_data(path, unit): return ROData(path, unit) -def init(): - iw.addrule('rodata', ro_data) +def init(): + iw.addrule('rodata', ro_data) diff --git a/build/plugins/split_codegen.py b/build/plugins/split_codegen.py index 4fa7b833f2..f1e60bc142 100644 --- a/build/plugins/split_codegen.py +++ b/build/plugins/split_codegen.py @@ -1,12 +1,12 @@ -from _common import sort_by_keywords - +from _common import sort_by_keywords + # This hard-coded many times in CppParts in various codegens _DEFAULT_CPP_PARTS = 20 # See TCodegenParams::MethodStream usage in factor codegen _ADDITIONAL_STREAM_COUNT = 5 -def onsplit_codegen(unit, *args): +def onsplit_codegen(unit, *args): ''' @usage: SPLIT_CODEGEN(tool prefix opts... [OUT_NUM num] [OUTPUT_INCLUDES output_includes...]) @@ -16,28 +16,28 @@ def onsplit_codegen(unit, *args): 1. OUT_NUM <the number of generated Prefix.N.cpp default 25 (N varies from 0 to 24)> 2. OUTPUT_INCLUDES <path to files that will be included in generalnyj of macro files> ''' - keywords = {"OUT_NUM": 1} - flat_args, spec_args = sort_by_keywords(keywords, args) - + keywords = {"OUT_NUM": 1} + flat_args, spec_args = sort_by_keywords(keywords, args) + num_outputs = _DEFAULT_CPP_PARTS + _ADDITIONAL_STREAM_COUNT if "OUT_NUM" in spec_args: num_outputs = int(spec_args["OUT_NUM"][0]) - tool = flat_args[0] - prefix = flat_args[1] - - cmd = [tool, prefix, 'OUT'] - for num in range(num_outputs): - cmd.append('{}.{}.cpp'.format(prefix, num)) - + tool = flat_args[0] + prefix = flat_args[1] + + cmd = [tool, prefix, 'OUT'] + for num in range(num_outputs): + cmd.append('{}.{}.cpp'.format(prefix, num)) + cpp_parts = int(num_outputs) - _ADDITIONAL_STREAM_COUNT cpp_parts_args = ['--cpp-parts', str(cpp_parts)] - if len(flat_args) > 2: - if flat_args[2] != 'OUTPUT_INCLUDES': - cmd.append('OPTS') + if len(flat_args) > 2: + if flat_args[2] != 'OUTPUT_INCLUDES': + cmd.append('OPTS') cmd += cpp_parts_args + flat_args[2:] else: cmd += ['OPTS'] + cpp_parts_args - + unit.on_split_codegen_base(cmd) diff --git a/build/plugins/swig.py b/build/plugins/swig.py index 7b7e3e549d..32a37204a6 100644 --- a/build/plugins/swig.py +++ b/build/plugins/swig.py @@ -1,31 +1,31 @@ -import os +import os import posixpath import re - -import _import_wrapper as iw -import _common as common - - + +import _import_wrapper as iw +import _common as common + + def init(): iw.addrule('swg', Swig) -class Swig(iw.CustomCommand): - def __init__(self, path, unit): +class Swig(iw.CustomCommand): + def __init__(self, path, unit): self._tool = unit.get('SWIG_TOOL') self._library_dir = unit.get('SWIG_LIBRARY') or 'contrib/tools/swig/Lib' self._local_swig = unit.get('USE_LOCAL_SWIG') == "yes" - self._path = path + self._path = path self._flags = ['-cpperraswarn'] - - self._bindir = common.tobuilddir(unit.path()) - self._input_name = common.stripext(os.path.basename(self._path)) - - relpath = os.path.relpath(os.path.dirname(self._path), unit.path()) - + + self._bindir = common.tobuilddir(unit.path()) + self._input_name = common.stripext(os.path.basename(self._path)) + + relpath = os.path.relpath(os.path.dirname(self._path), unit.path()) + self._swig_lang = unit.get('SWIG_LANG') - + if self._swig_lang != 'jni_java': self._main_out = os.path.join( self._bindir, @@ -50,57 +50,57 @@ class Swig(iw.CustomCommand): ] self._incl_dirs = ['$S', '$B'] + [posixpath.join('$S', d) for d in incl_dirs] - modname = unit.get('REALPRJNAME') - self._flags.extend(['-module', modname]) - + modname = unit.get('REALPRJNAME') + self._flags.extend(['-module', modname]) + if not self._local_swig: unit.onaddincl(incl_dirs) - + if self._swig_lang == 'python': - self._out_name = modname + '.py' - self._flags.extend(['-interface', unit.get('MODULE_PREFIX') + modname]) - + self._out_name = modname + '.py' + self._flags.extend(['-interface', unit.get('MODULE_PREFIX') + modname]) + if self._swig_lang == 'perl': - self._out_name = modname + '.pm' - self._flags.append('-shadow') + self._out_name = modname + '.pm' + self._flags.append('-shadow') unit.onpeerdir(['build/platform/perl']) - + if self._swig_lang in ['jni_cpp', 'java']: self._out_header = os.path.splitext(self._main_out)[0] + '.h' if (not unit.get('USE_SYSTEM_JDK')) and (unit.get('OS_ANDROID') != "yes"): unit.onpeerdir(['contrib/libs/jdk']) - + self._package = 'ru.yandex.' + os.path.dirname(self._path).replace('$S/', '').replace('$B/', '').replace('/', '.').replace('-', '_') if self._swig_lang in ['jni_java', 'java']: self._out_name = os.path.splitext(os.path.basename(self._path))[0] + '.jsrc' elif self._swig_lang != 'jni_cpp': self._flags.append('-' + self._swig_lang) - - def descr(self): - return 'SW', self._path, 'yellow' - - def flags(self): - return self._flags - - def tools(self): + + def descr(self): + return 'SW', self._path, 'yellow' + + def flags(self): + return self._flags + + def tools(self): return ['contrib/tools/swig'] if not self._tool else [] - - def input(self): - return [ - (self._path, []) - ] - - def output(self): + + def input(self): + return [ + (self._path, []) + ] + + def output(self): if self._swig_lang == 'jni_java': return [(common.join_intl_paths(self._bindir, self._out_name), [])] elif self._swig_lang == 'jni_cpp': return [(self._main_out, []), (self._out_header, [])] - return [ - (self._main_out, []), + return [ + (self._main_out, []), (common.join_intl_paths(self._bindir, self._out_name), (['noauto', 'add_to_outs'] if self._swig_lang != 'java' else [])), ] + ([(self._out_header, [])] if self._swig_lang == 'java' else []) - + def output_includes(self): return [(self._out_header, [])] if self._swig_lang in ['java', 'jni_cpp'] else [] @@ -108,19 +108,19 @@ class Swig(iw.CustomCommand): if self._local_swig: binary = self._tool return self.do_run_java(binary, self._path) if self._swig_lang in ['java', 'jni_cpp', 'jni_java'] else self.do_run(binary, self._path) - + def _incl_flags(self): return ['-I' + self.resolve_path(x) for x in self._incl_dirs] - def do_run(self, binary, path): + def do_run(self, binary, path): self.call([binary] + self._flags + [ '-o', self.resolve_path(common.get(self.output, 0)), '-outdir', self.resolve_path(self._bindir) ] + self._incl_flags() + [self.resolve_path(path)]) - + def do_run_java(self, binary, path): import tarfile - + outdir = self.resolve_path(self._bindir) if self._swig_lang != 'jni_cpp': java_srcs_dir = os.path.join(outdir, self._package.replace('.', '/')) diff --git a/build/plugins/xsyn.py b/build/plugins/xsyn.py index 1e0d9ba0bc..ab7c1639db 100644 --- a/build/plugins/xsyn.py +++ b/build/plugins/xsyn.py @@ -1,34 +1,34 @@ -import _import_wrapper as iw -import _common as common - - -class Xsyn(iw.CustomCommand): - - def __init__(self, path, unit): - self._path = path - - def descr(self): - return 'XN', self._path, 'yellow' - - def tools(self): - return [] - - def input(self): - return common.make_tuples([ +import _import_wrapper as iw +import _common as common + + +class Xsyn(iw.CustomCommand): + + def __init__(self, path, unit): + self._path = path + + def descr(self): + return 'XN', self._path, 'yellow' + + def tools(self): + return [] + + def input(self): + return common.make_tuples([ '$S/library/cpp/xml/parslib/xsyn2ragel.py', - self._path, + self._path, '$S/library/cpp/xml/parslib/xmlpars.xh' - ]) - - def output(self): - return common.make_tuples([ - common.tobuilddir(self._path + '.h.rl5') - ]) - + ]) + + def output(self): + return common.make_tuples([ + common.tobuilddir(self._path + '.h.rl5') + ]) + def run(self, extra_args, interpeter): - self.call(interpeter + [self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)), - self.resolve_path(common.get(self.input, 2)), 'dontuse'], stdout=common.get(self.output, 0)) - - -def init(): - iw.addrule('xsyn', Xsyn) + self.call(interpeter + [self.resolve_path(common.get(self.input, 0)), self.resolve_path(common.get(self.input, 1)), + self.resolve_path(common.get(self.input, 2)), 'dontuse'], stdout=common.get(self.output, 0)) + + +def init(): + iw.addrule('xsyn', Xsyn) diff --git a/build/plugins/ya.make b/build/plugins/ya.make index 82733a5516..4ad5f5988e 100644 --- a/build/plugins/ya.make +++ b/build/plugins/ya.make @@ -1,7 +1,7 @@ OWNER(g:ymake) - + PY2_LIBRARY() - + PY_SRCS( code_generator.py ssqls.py @@ -16,7 +16,7 @@ PY_SRCS( PEERDIR(build/plugins/lib) -END() +END() RECURSE( tests diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py index 820a4d6bb9..8970837f0f 100644 --- a/build/plugins/ytest.py +++ b/build/plugins/ytest.py @@ -9,12 +9,12 @@ import _common import lib._metric_resolvers as mr import _test_const as consts import _requirements as reqs -import StringIO +import StringIO import subprocess import collections import ymake - + MDS_URI_PREFIX = 'https://storage.yandex-team.ru/get-devtools/' MDS_SHEME = 'mds' @@ -27,14 +27,14 @@ CANON_SBR_RESOURCE_REGEX = re.compile(r'(sbr:/?/?(\d+))') VALID_NETWORK_REQUIREMENTS = ("full", "restricted") VALID_DNS_REQUIREMENTS = ("default", "local", "dns64") -BLOCK_SEPARATOR = '=============================================================' +BLOCK_SEPARATOR = '=============================================================' SPLIT_FACTOR_MAX_VALUE = 1000 SPLIT_FACTOR_TEST_FILES_MAX_VALUE = 4250 PARTITION_MODS = ('SEQUENTIAL', 'MODULO') DEFAULT_TIDY_CONFIG = "build/config/tests/clang_tidy/config.yaml" DEFAULT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_default_map.json" PROJECT_TIDY_CONFIG_MAP_PATH = "build/yandex_specific/config/clang_tidy/tidy_project_map.json" - + tidy_config_map = None @@ -42,14 +42,14 @@ def ontest_data(unit, *args): ymake.report_configure_error("TEST_DATA is removed in favour of DATA") -def save_in_file(filepath, data): - if filepath: - with open(filepath, 'a') as file_handler: - if os.stat(filepath).st_size == 0: - print >>file_handler, BLOCK_SEPARATOR - print >> file_handler, data - - +def save_in_file(filepath, data): + if filepath: + with open(filepath, 'a') as file_handler: + if os.stat(filepath).st_size == 0: + print >>file_handler, BLOCK_SEPARATOR + print >> file_handler, data + + def prepare_recipes(data): data = data.replace('"USE_RECIPE_DELIM"', "\n") data = data.replace("$TEST_RECIPES_VALUE", "") @@ -321,15 +321,15 @@ def dump_test(unit, kw): ymake.report_configure_error(e) if valid_kw is None: return None - string_handler = StringIO.StringIO() + string_handler = StringIO.StringIO() for k, v in valid_kw.iteritems(): - print >>string_handler, k + ': ' + v + print >>string_handler, k + ': ' + v print >>string_handler, BLOCK_SEPARATOR - data = string_handler.getvalue() - string_handler.close() - return data - - + data = string_handler.getvalue() + string_handler.close() + return data + + def serialize_list(lst): lst = filter(None, lst) return '\"' + ';'.join(lst) + '\"' if lst else '' @@ -414,8 +414,8 @@ def get_project_tidy_config(unit): def onadd_ytest(unit, *args): - keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1, - "FORK_SUBTESTS": 0, "FORK_TESTS": 0} + keywords = {"DEPENDS": -1, "DATA": -1, "TIMEOUT": 1, "FORK_MODE": 1, "SPLIT_FACTOR": 1, + "FORK_SUBTESTS": 0, "FORK_TESTS": 0} flat_args, spec_args = _common.sort_by_keywords(keywords, args) test_data = sorted(_common.filter_out_by_keyword(spec_args.get('DATA', []) + get_norm_paths(unit, 'TEST_DATA_VALUE'), 'AUTOUPDATED')) @@ -463,14 +463,14 @@ def onadd_ytest(unit, *args): unit.set(["DEFAULT_TIDY_CONFIG", default_config_path]) unit.set(["PROJECT_TIDY_CONFIG", project_config_path]) - fork_mode = [] - if 'FORK_SUBTESTS' in spec_args: - fork_mode.append('subtests') - if 'FORK_TESTS' in spec_args: - fork_mode.append('tests') - fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split() - fork_mode = ' '.join(fork_mode) if fork_mode else '' - + fork_mode = [] + if 'FORK_SUBTESTS' in spec_args: + fork_mode.append('subtests') + if 'FORK_TESTS' in spec_args: + fork_mode.append('tests') + fork_mode = fork_mode or spec_args.get('FORK_MODE', []) or unit.get('TEST_FORK_MODE').split() + fork_mode = ' '.join(fork_mode) if fork_mode else '' + unit_path = get_norm_unit_path(unit) test_record = { @@ -506,7 +506,7 @@ def onadd_ytest(unit, *args): 'TEST_PARTITION': unit.get("TEST_PARTITION") or 'SEQUENTIAL', 'GO_BENCH_TIMEOUT': unit.get('GO_BENCH_TIMEOUT') or '', } - + if flat_args[1] == "go.bench": if "ya:run_go_benchmark" not in test_record["TAG"]: return @@ -522,8 +522,8 @@ def onadd_ytest(unit, *args): if data: unit.set_property(["DART_DATA", data]) save_in_file(unit.get('TEST_DART_OUT_FILE'), data) - - + + def java_srcdirs_to_data(unit, var): extra_data = [] for srcdir in (unit.get(var) or '').replace('$' + var, '').split(): diff --git a/build/plugins/ytest2.py b/build/plugins/ytest2.py index 08c9d72f86..0a34263c35 100644 --- a/build/plugins/ytest2.py +++ b/build/plugins/ytest2.py @@ -1,54 +1,54 @@ -import os -import _common - - -def dir_stmts(unit, dir): - unit.onpeerdir(dir) - unit.onsrcdir(os.sep.join([dir, 'tests'])) - - -def pytest_base(unit, args): - related_prj_dir = args[0] - related_prj_name = args[1] - dir_stmts(unit, related_prj_dir) - ytest_base(unit, related_prj_dir, related_prj_name, args[2:]) - unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test']) - - -def ytest_base(unit, related_prj_dir, related_prj_name, args): - keywords = {"DEPENDS": -1, "DATA": -1} - flat_args, spec_args = _common.sort_by_keywords(keywords, args) - unit.set(['TEST-NAME', os.path.basename(flat_args[0])]) - unit.set(['SCRIPT-REL-PATH', flat_args[1]]) - unit.set(['SOURCE-FOLDER-PATH', related_prj_dir]) - unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)]) - unit.set(['TESTED-BINARY-PATH', flat_args[0]]) - - custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else '' - unit.set(['CUSTOM-DEPENDENCIES', custom_deps]) - data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ') +import os +import _common + + +def dir_stmts(unit, dir): + unit.onpeerdir(dir) + unit.onsrcdir(os.sep.join([dir, 'tests'])) + + +def pytest_base(unit, args): + related_prj_dir = args[0] + related_prj_name = args[1] + dir_stmts(unit, related_prj_dir) + ytest_base(unit, related_prj_dir, related_prj_name, args[2:]) + unit.set(['ADDITIONAL_PATH', '--test-related-path ${ARCADIA_ROOT}/test']) + + +def ytest_base(unit, related_prj_dir, related_prj_name, args): + keywords = {"DEPENDS": -1, "DATA": -1} + flat_args, spec_args = _common.sort_by_keywords(keywords, args) + unit.set(['TEST-NAME', os.path.basename(flat_args[0])]) + unit.set(['SCRIPT-REL-PATH', flat_args[1]]) + unit.set(['SOURCE-FOLDER-PATH', related_prj_dir]) + unit.set(['BUILD-FOLDER-PATH', os.path.join('$B', related_prj_dir)]) + unit.set(['TESTED-BINARY-PATH', flat_args[0]]) + + custom_deps = ' '.join(spec_args["DEPENDS"]) if "DEPENDS" in spec_args else '' + unit.set(['CUSTOM-DEPENDENCIES', custom_deps]) + data_lst = spec_args.get('DATA', []) + (unit.get(['__test_data']) or '').split(' ') data_lst.sort() - data = '\"' + ';'.join(data_lst) + '\"' if data_lst else '' - unit.set(['TEST-DATA', data]) - + data = '\"' + ';'.join(data_lst) + '\"' if data_lst else '' + unit.set(['TEST-DATA', data]) + related_dirs_list = ['{ARCADIA_ROOT}/devtools/${YA_ROOT}', '${ARCADIA_ROOT}/devtools/${YA_ROOT}', '$RELATED_TARGET_SRCDIR'] - related_dirs_value = [] - for rel in related_dirs_list: - related_dirs_value.extend(['--test-related-path', rel]) - unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)]) - unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))]) - - -def on_unittest(unit, *args): - related_prj_name = args[0] - related_prj_dir = args[1][3:] - unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}']) - ytest_base(unit, related_prj_dir, related_prj_name, args) - - -def on_ytest(unit, *args): - pytest_base(unit, args) - - -def on_py_test(unit, *args): - pytest_base(unit, args) + related_dirs_value = [] + for rel in related_dirs_list: + related_dirs_value.extend(['--test-related-path', rel]) + unit.set(['RELATED_DIRS', ' '.join(related_dirs_value)]) + unit.set(['TEST_KV', '${{kv;hide:"test_related_dirs {}"}}'.format(' '.join(related_dirs_list))]) + + +def on_unittest(unit, *args): + related_prj_name = args[0] + related_prj_dir = args[1][3:] + unit.set(['TEST_TYPE', '${kv;hide:"test-type unittest"}']) + ytest_base(unit, related_prj_dir, related_prj_name, args) + + +def on_ytest(unit, *args): + pytest_base(unit, args) + + +def on_py_test(unit, *args): + pytest_base(unit, args) diff --git a/build/scripts/_check_compiler.cpp b/build/scripts/_check_compiler.cpp index dd9dd73067..53c5fdf179 100644 --- a/build/scripts/_check_compiler.cpp +++ b/build/scripts/_check_compiler.cpp @@ -1 +1 @@ -#include <stdio.h> +#include <stdio.h> diff --git a/build/scripts/build_mn.py b/build/scripts/build_mn.py index b0cc12ed3a..5bb03c247c 100755 --- a/build/scripts/build_mn.py +++ b/build/scripts/build_mn.py @@ -5,7 +5,7 @@ import sys import os import shutil import re -import subprocess +import subprocess def get_value(val): @@ -78,30 +78,30 @@ class BuildMnBase(object): class BuildMn(BuildMnBase): def Run(self, argv): - if len(argv) < 6: - print >>sys.stderr, "BuildMn.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <mnrankingSuffix> <cppOutput> [params...])" + if len(argv) < 6: + print >>sys.stderr, "BuildMn.Run(<ARCADIA_ROOT> <archiver> <mninfo> <mnname> <mnrankingSuffix> <cppOutput> [params...])" sys.exit(1) self.SrcRoot = argv[0] self.archiver = argv[1] - mninfo = argv[2] - mnname = argv[3] - mnrankingSuffix = argv[4] - mncppPath = argv[5] + mninfo = argv[2] + mnname = argv[3] + mnrankingSuffix = argv[4] + mncppPath = argv[5] check = False ptr = False multi = False - self.fml_unused_tool = '' - for param in argv[6:]: + self.fml_unused_tool = '' + for param in argv[6:]: if param == "CHECK": check = True elif param == "PTR": ptr = True elif param == "MULTI": multi = True - elif param.startswith('fml_tool='): - self.fml_unused_tool = get_value(param) + elif param.startswith('fml_tool='): + self.fml_unused_tool = get_value(param) else: print >>sys.stdout, "Unknown param: {0}".format(param) super(BuildMn, self).Run(mninfo, mnname, mnrankingSuffix, mncppPath, check=check, ptr=ptr, multi=multi) @@ -122,29 +122,29 @@ class BuildMns(BuildMnBase): self.mnmultilist = "const {0} {1}".format(mnmultilisttype, self.mnmultilistname) def InitForAll(self, argv): - if len(argv) < 8: - print >>sys.stderr, "BuildMns.InitForAll(<ARCADIA_ROOT> <BINDIR> <archiver> <listname> <mnranking_suffix> <hdrfile> <srcfile> <mninfos> [fml_tool=<fml_unused_tool> CHECK])" + if len(argv) < 8: + print >>sys.stderr, "BuildMns.InitForAll(<ARCADIA_ROOT> <BINDIR> <archiver> <listname> <mnranking_suffix> <hdrfile> <srcfile> <mninfos> [fml_tool=<fml_unused_tool> CHECK])" sys.exit(1) bmns_args = [] self.check = False - self.fml_unused_tool = '' + self.fml_unused_tool = '' for arg in argv: if arg == "CHECK": self.check = True - elif arg.startswith('fml_tool='): - self.fml_unused_tool = get_value(arg) + elif arg.startswith('fml_tool='): + self.fml_unused_tool = get_value(arg) else: bmns_args.append(arg) self.SrcRoot = bmns_args[0] self.BINDIR = bmns_args[1] self.archiver = bmns_args[2] - self.listname = bmns_args[3] - self.mnrankingSuffix = get_value(bmns_args[4]) - self.hdrfile = bmns_args[5] - self.srcfile = bmns_args[6] - self.mninfos = bmns_args[7:] + self.listname = bmns_args[3] + self.mnrankingSuffix = get_value(bmns_args[4]) + self.hdrfile = bmns_args[5] + self.srcfile = bmns_args[6] + self.mninfos = bmns_args[7:] self.InitBase(self.listname, self.mnrankingSuffix) @@ -180,21 +180,21 @@ class BuildMns(BuildMnBase): bmns_args = [] self.check = False - self.fml_unused_tool = '' + self.fml_unused_tool = '' for arg in argv: if arg == "CHECK": self.check = True - elif arg.startswith('fml_tool='): - self.fml_unused_tool = get_value(arg) + elif arg.startswith('fml_tool='): + self.fml_unused_tool = get_value(arg) else: bmns_args.append(arg) self.SrcRoot = bmns_args[0] self.BINDIR = bmns_args[1] self.archiver = bmns_args[2] - self.listname = bmns_args[3] - self.mnrankingSuffix = get_value(bmns_args[4]) - self.mninfos = bmns_args[5:] + self.listname = bmns_args[3] + self.mnrankingSuffix = get_value(bmns_args[4]) + self.mninfos = bmns_args[5:] def BuildMnsHeader(self): if self.mninfos: diff --git a/build/scripts/c_templates/svn_interface.c b/build/scripts/c_templates/svn_interface.c index af08627865..d8bdd1a70d 100644 --- a/build/scripts/c_templates/svn_interface.c +++ b/build/scripts/c_templates/svn_interface.c @@ -142,15 +142,15 @@ const char* GetProgramBuildDate() { #endif } -int GetProgramBuildTimestamp() { -#if defined(BUILD_TIMESTAMP) - return BUILD_TIMESTAMP; -#else - return 0; -#endif -} - - +int GetProgramBuildTimestamp() { +#if defined(BUILD_TIMESTAMP) + return BUILD_TIMESTAMP; +#else + return 0; +#endif +} + + const char* GetVCS() { #if defined(VCS) return VCS; diff --git a/build/scripts/c_templates/svnversion.h b/build/scripts/c_templates/svnversion.h index 71a950125f..7aece931aa 100644 --- a/build/scripts/c_templates/svnversion.h +++ b/build/scripts/c_templates/svnversion.h @@ -32,7 +32,7 @@ const char* GetProgramShortVersionData() Y_HIDDEN; const char* GetProgramBuildUser() Y_HIDDEN; const char* GetProgramBuildHost() Y_HIDDEN; const char* GetProgramBuildDate() Y_HIDDEN; -int GetProgramBuildTimestamp() Y_HIDDEN; +int GetProgramBuildTimestamp() Y_HIDDEN; const char* GetVCS() Y_HIDDEN; const char* GetBranch() Y_HIDDEN; const char* GetTag() Y_HIDDEN; diff --git a/build/scripts/cat.py b/build/scripts/cat.py index 8d83194263..0c3f73d96f 100755 --- a/build/scripts/cat.py +++ b/build/scripts/cat.py @@ -1,13 +1,13 @@ #!/usr/bin/env python import sys from shutil import copyfileobj as copy -import os.path +import os.path if __name__ == '__main__': for filename in sys.argv[1:] or ["-"]: if filename == "-": copy(sys.stdin, sys.stdout) - else: + else: if os.path.exists(filename): with open(filename, 'rb') as file: copy(file, sys.stdout) diff --git a/build/scripts/copy_to_dir.py b/build/scripts/copy_to_dir.py index de2e420c6a..9baeb5ffac 100644 --- a/build/scripts/copy_to_dir.py +++ b/build/scripts/copy_to_dir.py @@ -1,11 +1,11 @@ -import errno -import sys -import os -import shutil +import errno +import sys +import os +import shutil import optparse import tarfile - - + + def parse_args(): parser = optparse.OptionParser() parser.add_option('--build-root') @@ -15,15 +15,15 @@ def parse_args(): def ensure_dir_exists(path): - try: - os.makedirs(path) - except OSError as e: - if e.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise - - + try: + os.makedirs(path) + except OSError as e: + if e.errno == errno.EEXIST and os.path.isdir(path): + pass + else: + raise + + def hardlink_or_copy(src, dst): if os.name == 'nt': shutil.copy(src, dst) @@ -40,7 +40,7 @@ def hardlink_or_copy(src, dst): raise -def main(): +def main(): opts, args = parse_args() assert opts.build_root assert opts.dest_dir @@ -56,13 +56,13 @@ def main(): raise Exception('Unsopported archive type for {}. Use one of: tar, tar.gz, tgz.'.format(os.path.basename(opts.dest_arch))) for arg in args: - dst = arg + dst = arg if dst.startswith(opts.build_root): dst = dst[len(opts.build_root) + 1:] - + if dest_arch and not arg.endswith('.pkg.fake'): dest_arch.add(arg, arcname=dst) - + dst = os.path.join(opts.dest_dir, dst) ensure_dir_exists(os.path.dirname(dst)) hardlink_or_copy(arg, dst) @@ -71,5 +71,5 @@ def main(): dest_arch.close() -if __name__ == '__main__': - sys.exit(main()) +if __name__ == '__main__': + sys.exit(main()) diff --git a/build/scripts/link_dyn_lib.py b/build/scripts/link_dyn_lib.py index 58f054b37e..23487f5c1e 100644 --- a/build/scripts/link_dyn_lib.py +++ b/build/scripts/link_dyn_lib.py @@ -1,6 +1,6 @@ import sys import os -import subprocess +import subprocess import tempfile import collections import optparse @@ -170,8 +170,8 @@ def parse_args(): parser.add_option('--whole-archive-peers', action='append') parser.add_option('--whole-archive-libs', action='append') return parser.parse_args() - - + + if __name__ == '__main__': opts, args = parse_args() diff --git a/build/scripts/link_fat_obj.py b/build/scripts/link_fat_obj.py index 2700aa4395..c189668b9e 100644 --- a/build/scripts/link_fat_obj.py +++ b/build/scripts/link_fat_obj.py @@ -1,7 +1,7 @@ -import argparse -import subprocess -import sys - +import argparse +import subprocess +import sys + from process_whole_archive_option import ProcessWholeArchiveOption YA_ARG_PREFIX = '-Ya,' @@ -16,7 +16,7 @@ def get_args(): parser.add_argument('--build-root', default=None) parser.add_argument('--with-own-obj', action='store_true', default=False) parser.add_argument('--with-global-srcs', action='store_true', default=False) - + groups = {} args_list = groups.setdefault('default', []) for arg in sys.argv[1:]: @@ -68,17 +68,17 @@ def main(): do_archive += auto_input if args.with_global_srcs: do_archive += global_srcs - + def call(c): proc = subprocess.Popen(c, shell=False, stderr=sys.stderr, stdout=sys.stdout, cwd=args.build_root) proc.communicate() return proc.returncode - + if obj_output: link_res = call(do_link) if link_res: sys.exit(link_res) - + if do_globals: glob_res = call(do_globals) if glob_res: diff --git a/build/scripts/link_lib.py b/build/scripts/link_lib.py index 7d4cf84c5e..344d50d4eb 100644 --- a/build/scripts/link_lib.py +++ b/build/scripts/link_lib.py @@ -1,85 +1,85 @@ -import sys -import subprocess -import tempfile -import os - - -class Opts(object): - def __init__(self, args): - self.archiver = args[0] - self.arch_type = args[1] +import sys +import subprocess +import tempfile +import os + + +class Opts(object): + def __init__(self, args): + self.archiver = args[0] + self.arch_type = args[1] self.llvm_ar_format = args[2] self.build_root = args[3] self.plugin = args[4] self.output = args[5] auto_input = args[6:] - + if self.arch_type == 'GNU_AR': - self.create_flags = ['rcs'] - self.modify_flags = ['-M'] + self.create_flags = ['rcs'] + self.modify_flags = ['-M'] elif self.arch_type == 'LLVM_AR': self.create_flags = ['rcs', '-format=%s' % self.llvm_ar_format] self.modify_flags = ['-M'] - elif self.arch_type == 'LIBTOOL': - self.create_flags = ['-static', '-o'] - self.modify_flags = [] - + elif self.arch_type == 'LIBTOOL': + self.create_flags = ['-static', '-o'] + self.modify_flags = [] + need_modify = self.arch_type != 'LIBTOOL' and any(item.endswith('.a') for item in auto_input) - if need_modify: - self.objs = filter(lambda x: x.endswith('.o'), auto_input) - self.libs = filter(lambda x: x.endswith('.a'), auto_input) - else: - self.objs = auto_input - self.libs = [] - + if need_modify: + self.objs = filter(lambda x: x.endswith('.o'), auto_input) + self.libs = filter(lambda x: x.endswith('.a'), auto_input) + else: + self.objs = auto_input + self.libs = [] + self.plugin_flags = ['--plugin', self.plugin] if self.plugin != 'None' else [] - -def get_opts(args): - return Opts(args) - - -if __name__ == "__main__": - opts = get_opts(sys.argv[1:]) - + +def get_opts(args): + return Opts(args) + + +if __name__ == "__main__": + opts = get_opts(sys.argv[1:]) + # There is a bug in llvm-ar. Some files with size slightly greater 2^32 # still have GNU format instead of GNU64 and cause link problems. # Workaround just lowers llvm-ar's GNU64 threshold to 2^31. if opts.arch_type == 'LLVM_AR': os.environ['SYM64_THRESHOLD'] = '31' - def call(): + def call(): try: p = subprocess.Popen(cmd, stdin=stdin, cwd=opts.build_root) rc = p.wait() return rc except OSError as e: raise Exception('while running %s: %s' % (' '.join(cmd), e)) - + try: os.unlink(opts.output) except OSError: pass - if not opts.libs: + if not opts.libs: cmd = [opts.archiver] + opts.create_flags + opts.plugin_flags + [opts.output] + opts.objs - stdin = None - exit_code = call() - else: - temp = tempfile.NamedTemporaryFile(dir=os.path.dirname(opts.output), delete=False) - - with open(temp.name, 'w') as tmp: - tmp.write('CREATE {0}\n'.format(opts.output)) - for lib in opts.libs: - tmp.write('ADDLIB {0}\n'.format(lib)) - for obj in opts.objs: - tmp.write('ADDMOD {0}\n'.format(obj)) - tmp.write('SAVE\n') - tmp.write('END\n') + stdin = None + exit_code = call() + else: + temp = tempfile.NamedTemporaryFile(dir=os.path.dirname(opts.output), delete=False) + + with open(temp.name, 'w') as tmp: + tmp.write('CREATE {0}\n'.format(opts.output)) + for lib in opts.libs: + tmp.write('ADDLIB {0}\n'.format(lib)) + for obj in opts.objs: + tmp.write('ADDMOD {0}\n'.format(obj)) + tmp.write('SAVE\n') + tmp.write('END\n') cmd = [opts.archiver] + opts.modify_flags + opts.plugin_flags - stdin = open(temp.name) - exit_code = call() - os.remove(temp.name) - - if exit_code != 0: - raise Exception('{0} returned non-zero exit code {1}. Stop.'.format(' '.join(cmd), exit_code)) + stdin = open(temp.name) + exit_code = call() + os.remove(temp.name) + + if exit_code != 0: + raise Exception('{0} returned non-zero exit code {1}. Stop.'.format(' '.join(cmd), exit_code)) diff --git a/build/scripts/run_llvm_dsymutil.py b/build/scripts/run_llvm_dsymutil.py index 88e94306c3..4f43362ad9 100644 --- a/build/scripts/run_llvm_dsymutil.py +++ b/build/scripts/run_llvm_dsymutil.py @@ -1,11 +1,11 @@ -import os -import sys -import subprocess - - +import os +import sys +import subprocess + + if __name__ == '__main__': with open(os.devnull, 'w') as fnull: p = subprocess.Popen(sys.argv[1:], shell=False, stderr=fnull, stdout=sys.stdout) - + p.communicate() sys.exit(p.returncode) diff --git a/build/scripts/run_msvc_wine.py b/build/scripts/run_msvc_wine.py index a54b906e3f..439d1f8831 100644 --- a/build/scripts/run_msvc_wine.py +++ b/build/scripts/run_msvc_wine.py @@ -5,7 +5,7 @@ import subprocess import signal import time import json -import argparse +import argparse import errno import process_command_files as pcf @@ -52,7 +52,7 @@ def subst_path(l): return l -def call_wine_cmd_once(wine, cmd, env, mode): +def call_wine_cmd_once(wine, cmd, env, mode): p = run_subprocess(wine + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, close_fds=True, shell=False) output = find_cmd_out(cmd) @@ -76,14 +76,14 @@ def call_wine_cmd_once(wine, cmd, env, mode): return_code = p.returncode if not stdout_and_stderr: if return_code != 0: - raise Exception('wine did something strange') + raise Exception('wine did something strange') return return_code elif ' : fatal error ' in stdout_and_stderr: return_code = 1 elif ' : error ' in stdout_and_stderr: return_code = 2 - + lines = [x.strip() for x in stdout_and_stderr.split('\n')] prefixes = [ @@ -155,8 +155,8 @@ def prepare_vc(fr, to): def run_slave(): - args = json.loads(sys.argv[3]) - wine = sys.argv[1] + args = json.loads(sys.argv[3]) + wine = sys.argv[1] signal.signal(signal.SIGTERM, sig_term) @@ -168,7 +168,7 @@ def run_slave(): while True: try: - return call_wine_cmd_once([wine], args['cmd'], args['env'], args['mode']) + return call_wine_cmd_once([wine], args['cmd'], args['env'], args['mode']) except Exception as e: print >>sys.stderr, '%s, will retry in %s' % (str(e), tout) @@ -409,51 +409,51 @@ def process_free_args(args, wine, bld_root, mode): return pwa.ProcessWholeArchiveOption('WINDOWS', wa_peers, wa_libs).construct_cmd(result) def run_main(): - parser = argparse.ArgumentParser() - parser.add_argument('wine', action='store') - parser.add_argument('-v', action='store', dest='version', default='120') - parser.add_argument('-I', action='append', dest='incl_paths') - parser.add_argument('mode', action='store') + parser = argparse.ArgumentParser() + parser.add_argument('wine', action='store') + parser.add_argument('-v', action='store', dest='version', default='120') + parser.add_argument('-I', action='append', dest='incl_paths') + parser.add_argument('mode', action='store') parser.add_argument('arcadia_root', action='store') parser.add_argument('arcadia_build_root', action='store') - parser.add_argument('binary', action='store') - parser.add_argument('free_args', nargs=argparse.REMAINDER) + parser.add_argument('binary', action='store') + parser.add_argument('free_args', nargs=argparse.REMAINDER) # By now just unpack. Ideally we should fix path and pack arguments back into command file args = parser.parse_args() - - wine = args.wine - mode = args.mode - binary = args.binary - version = args.version - incl_paths = args.incl_paths + + wine = args.wine + mode = args.mode + binary = args.binary + version = args.version + incl_paths = args.incl_paths bld_root = args.arcadia_build_root free_args = args.free_args - + wine_dir = os.path.dirname(os.path.dirname(wine)) bin_dir = os.path.dirname(binary) - tc_dir = os.path.dirname(os.path.dirname(os.path.dirname(bin_dir))) - if not incl_paths: - incl_paths = [tc_dir + '/VC/include', tc_dir + '/include'] + tc_dir = os.path.dirname(os.path.dirname(os.path.dirname(bin_dir))) + if not incl_paths: + incl_paths = [tc_dir + '/VC/include', tc_dir + '/include'] cmd_out = find_cmd_out(free_args) - + env = os.environ.copy() env.pop('DISPLAY', None) - env['WINEDLLOVERRIDES'] = 'msvcr{}=n'.format(version) + env['WINEDLLOVERRIDES'] = 'msvcr{}=n'.format(version) env['WINEDEBUG'] = 'fixme-all' env['INCLUDE'] = ';'.join(fix_path(p) for p in incl_paths) env['VSINSTALLDIR'] = fix_path(tc_dir) env['VCINSTALLDIR'] = fix_path(tc_dir + '/VC') env['WindowsSdkDir'] = fix_path(tc_dir) - env['LIBPATH'] = fix_path(tc_dir + '/VC/lib/amd64') - env['LIB'] = fix_path(tc_dir + '/VC/lib/amd64') + env['LIBPATH'] = fix_path(tc_dir + '/VC/lib/amd64') + env['LIB'] = fix_path(tc_dir + '/VC/lib/amd64') env['LD_LIBRARY_PATH'] = ':'.join(wine_dir + d for d in ['/lib', '/lib64', '/lib64/wine']) cmd = [binary] + process_free_args(free_args, wine, bld_root, mode) - for x in ('/NOLOGO', '/nologo', '/FD'): + for x in ('/NOLOGO', '/nologo', '/FD'): try: cmd.remove(x) except ValueError: @@ -490,7 +490,7 @@ def run_main(): if rc in (-signal.SIGALRM, signal.SIGALRM): print_err_log(out) print >>sys.stderr, '##append_tag##time out' - elif out and ' stack overflow ' in out: + elif out and ' stack overflow ' in out: print >>sys.stderr, '##append_tag##stack overflow' elif out and 'recvmsg: Connection reset by peer' in out: print >>sys.stderr, '##append_tag##wine gone' diff --git a/build/scripts/run_tool.py b/build/scripts/run_tool.py index 681e213978..00e3ff6f1e 100755 --- a/build/scripts/run_tool.py +++ b/build/scripts/run_tool.py @@ -1,6 +1,6 @@ import sys import subprocess -import os +import os if __name__ == '__main__': diff --git a/build/scripts/vcs_info.py b/build/scripts/vcs_info.py index fff4f7e2d0..319d1b4631 100644 --- a/build/scripts/vcs_info.py +++ b/build/scripts/vcs_info.py @@ -76,7 +76,7 @@ def get_default_json(): "ARCADIA_SOURCE_URL": "", "BRANCH": "unknown-vcs-branch", "BUILD_DATE": "", - "BUILD_TIMESTAMP": 0, + "BUILD_TIMESTAMP": 0, "BUILD_HOST": "localhost", "BUILD_USER": "nobody", "PROGRAM_VERSION": "Arc info:\\n Branch: unknown-vcs-branch\\n Commit: 0000000000000000000000000000000000000000\\n Author: <UNKNOWN>\\n Summary: No VCS\\n\\n", @@ -233,8 +233,8 @@ def print_java_mf(info): lines += wrap('SVN-Arcroot: ', info['SVN_ARCROOT']) lines += wrap('SVN-Time: ', info['SVN_TIME']) lines += wrap('Build-Date: ', info['BUILD_DATE']) - if 'BUILD_TIMESTAMP' in info: - lines += wrap('Build-Timestamp: ', str(info['BUILD_TIMESTAMP'])) + if 'BUILD_TIMESTAMP' in info: + lines += wrap('Build-Timestamp: ', str(info['BUILD_TIMESTAMP'])) return lines, names diff --git a/build/sysincl/stl-to-nothing.yml b/build/sysincl/stl-to-nothing.yml index c6ca8a510e..c7e27d1411 100644 --- a/build/sysincl/stl-to-nothing.yml +++ b/build/sysincl/stl-to-nothing.yml @@ -42,7 +42,7 @@ - cerrno - cfenv - cfloat - - charconv + - charconv - chrono - cinttypes - ciso646 @@ -120,8 +120,8 @@ - valarray - variant - vector - - unwind.h: - - contrib/libs/libunwind/include/unwind.h + - unwind.h: + - contrib/libs/libunwind/include/unwind.h # Though these are headers provided by libcxx, we do not want to allow them to be included. diff --git a/build/ya.conf.json b/build/ya.conf.json index 341ae7bf74..5f7cc875d6 100644 --- a/build/ya.conf.json +++ b/build/ya.conf.json @@ -45,9 +45,9 @@ "nm": { "description": "Run nm" }, - "objcopy": { - "description": "Run objcopy" - }, + "objcopy": { + "description": "Run objcopy" + }, "svn": { "description": "Subversion command-line client" }, @@ -1032,7 +1032,7 @@ "executable": "ymake" } }, - "platforms": [ + "platforms": [ { "host": { "os": "LINUX" @@ -1072,7 +1072,7 @@ }, "default": true } - ] + ] }, "maven_import_sandbox_uploader": { "tools": { @@ -2713,7 +2713,7 @@ "executable": "gdb" } }, - "platforms": [ + "platforms": [ { "host": { "os": "LINUX" @@ -2746,12 +2746,12 @@ }, "default": true } - ], + ], "env": { "TERMINFO": [ "$(ROOT)/gdb/lib/terminfo" ] - } + } }, "gdbserver": { "tools": { @@ -2795,7 +2795,7 @@ "executable": "python" } }, - "platforms": [ + "platforms": [ { "host": { "os": "LINUX" @@ -2835,7 +2835,7 @@ }, "default": true } - ] + ] }, "ipython": { "tools": { @@ -2893,13 +2893,13 @@ } ] }, - "wine": { - "tools": { + "wine": { + "tools": { "wine": { "bottle": "wine", "executable": "wine" } - }, + }, "platforms": [ { "host": { @@ -2908,7 +2908,7 @@ "default": true } ] - }, + }, "wine32": { "tools": { "wine32": { @@ -3180,7 +3180,7 @@ "executable": "jar" } }, - "platforms": [ + "platforms": [ { "host": { "os": "LINUX" @@ -3199,7 +3199,7 @@ }, "default": true } - ] + ] }, "jdk10": { "tools": { @@ -3421,7 +3421,7 @@ "executable": "cmake" } }, - "platforms": [ + "platforms": [ { "host": { "os": "LINUX" @@ -3440,7 +3440,7 @@ }, "default": true } - ] + ] }, "cling": { "tools": { @@ -3471,7 +3471,7 @@ "executable": "ninja" } }, - "platforms": [ + "platforms": [ { "host": { "os": "LINUX" @@ -3490,7 +3490,7 @@ }, "default": true } - ] + ] }, "valgrind": { "tools": { @@ -6306,7 +6306,7 @@ "bin", "llvm-nm" ], - "objcopy": [ + "objcopy": [ "llvm-toolchain", "bin", "llvm-objcopy" @@ -6483,18 +6483,18 @@ "match": "SCRIPTGEN" } }, - "wine": { - "formula": { + "wine": { + "formula": { "sandbox_id": 495594294, - "match": "Wine" - }, - "executable": { + "match": "Wine" + }, + "executable": { "wine": [ "bin", "wine64" ] - } - }, + } + }, "wine32": { "formula": { "sandbox_id": 1010959442, diff --git a/build/ymake.core.conf b/build/ymake.core.conf index 4618ff0967..081833998b 100644 --- a/build/ymake.core.conf +++ b/build/ymake.core.conf @@ -210,8 +210,8 @@ when ($HAVE_MKL == "") { } } -SFDL_TMP_OUT= ${output;tmp:SRC.tmp} - +SFDL_TMP_OUT= ${output;tmp:SRC.tmp} + # tag:perl-specific when ($USE_LOCAL_TOOLS == "yes") { PERL=$LOCAL_PERL @@ -411,7 +411,7 @@ when ($USE_ARCADIA_PYTHON == "no") { # tag:allocator DEFAULT_ALLOCATOR=LF - + # tag:allocator when ($OS_ANDROID == "yes" || $MSVC == "yes") { DEFAULT_ALLOCATOR=J @@ -900,7 +900,7 @@ macro _PY_PROTO_CMD_BASE(File, Suf, Args...) { macro _PY_PROTO_CMD(File) { .CMD=${cwd;rootdir;input:File} $_PY_PROTO_CMD_BASE($File _pb2.py $PY_PROTO_OPTS $PY_PROTO_OUTS $PY_PROTO_MYPY_PLUGIN) } - + # tag:proto tag:python-specific macro _PY_PROTO_CMD_INTERNAL(File) { .CMD=${cwd;rootdir;input:File} $GEN_PY_PROTOS --suffixes $PY_PROTO_SUFFIXES $PY_PROTO_MYPY_SUFFIX -- $_PY_PROTO_CMD_BASE($File __int___pb2.py $PY_PROTO_OPTS $PY_PROTO_OUTS_INTERNAL ${hide;kv:"ext_out_name_for_${nopath;noext;suf=__int___pb2.py:File} ${nopath;noext;suf=_pb2.py:File}"} $PY_PROTO_MYPY_PLUGIN_INTERNAL) @@ -956,7 +956,7 @@ macro XS_PROTO(File, Dir, Outputs...) { macro PROTO2FBS(File) { .CMD=${cwd:BINDIR} ${tool:"contrib/tools/flatc"} -I . -I ${ARCADIA_ROOT} --proto ${input:File} ${output;hide;nopath;noext:File.fbs} ${kv;hide:"p FBS"} ${kv;hide:"pc yellow"} } - + # tag:proto tag:python-specific when ($PY_PROTOS_FOR == "yes") { PEERDIR+=contrib/libs/protobuf/python @@ -986,7 +986,7 @@ macro _JAVA_EVLOG_CMD(File) { # tag:sanitize RUN_NO_SANITIZE=$YMAKE_PYTHON ${input:"build/scripts/run_tool.py"} -- - + # tag:sanitize when ($IS_CROSS_SANITIZE) { RUN_NO_SANITIZE= @@ -1126,7 +1126,7 @@ module _BARE_UNIT { .PEERDIR_POLICY=as_include .RESTRICTED=GRPC USE_SKIFF INDUCED_DEPS FUZZ_DICTS FUZZ_OPTS PACK DOCS_DIR DOCS_CONFIG DOCS_VARS YT_SPEC USE_CXX USE_UTIL WHOLE_ARCHIVE PRIMARY_OUTPUT SECONDARY_OUTPUT DEPENDENCY_MANAGEMENT EXCLUDE NO_DOCTESTS EMBED_JAVA_VCS_INFO RESOURCE_FILES PACK_GLOBALS_IN_LIBRARY EXPOSE GLOBAL_DEPS .FINAL_TARGET=no - + PEERDIR_TAGS=__EMPTY__ when ($FATAL_ERROR_MESSAGE && $FATAL_ERROR_MODULE != "yes") { @@ -1172,7 +1172,7 @@ module _BASE_UNIT: _BARE_UNIT { MACRO_ALIAS(PROTO_CMD _CPP_VANILLA_PROTO_CMD) } } - + SANITIZER_DEFINED=no when ($SANITIZER_TYPE && $SANITIZER_TYPE != "no") { @@ -1344,8 +1344,8 @@ module _BASE_UNIT: _BARE_UNIT { when ($NEED_BINUTILS_PEERDIR && $BINUTILS_USED && $NEED_PLATFORM_PEERDIRS == "yes") { PEERDIR+=build/platform/binutils } -} - +} + _LINKER_ID= # GCC does not support -fuse-ld with an executable path, only # -fuse-ld=bfd or -fuse-ld=gold (or -fuse-ld=lld in later versions). @@ -1382,7 +1382,7 @@ macro USE_LINKER_LLD() { COMMON_LINK_SETTINGS= LINK_ADDITIONAL_SECTIONS= LINK_ADDITIONAL_SECTIONS_COMMAND= - + when ($COMMON_LINK_SETTINGS == "yes") { when ($_LINKER_ID == "lld" || $_LINKER_ID == "gold") { LDFLAGS += -Wl,--gdb-index @@ -1407,7 +1407,7 @@ when ($COMMON_LINK_SETTINGS == "yes") { when ($USE_MKL == "yes") { NOPLATFORM=yes } - + when (($USE_EAT_MY_DATA == "yes") && ($WIN32 != "yes") && ($DARWIN != "yes") && ($OS_ANDROID != "yes") && ($OS_IOS != "yes")) { PEERDIR+=contrib/libs/libeatmydata/autocheck_wrapper } @@ -1505,8 +1505,8 @@ when ($COMMON_LINK_SETTINGS == "yes") { when ($WITH_VALGRIND == "yes") { PEERDIR+=contrib/libs/valgrind } -} - +} + when ($EMBED_LINKER_MAP == "yes" || $EMBED_LINKER_CREF == "yes") { LINK_ADDITIONAL_SECTIONS_COMMAND+= \ $OBJCOPY_TOOL $LINK_ADDITIONAL_SECTIONS $TARGET @@ -1678,8 +1678,8 @@ module PROGRAM: _BASE_PROGRAM { ADD_YTEST($MODULE_PREFIX$REALPRJNAME coverage.extractor) ADD_CLANG_TIDY() SET(MODULE_LANG CPP) -} - +} + # tag:python-specific tag:deprecated tag:internal module _PY2_PROGRAM: _BASE_PY_PROGRAM { .ALIASES=REQUIREMENTS=PY_REQUIREMENTS @@ -1857,8 +1857,8 @@ module UNITTEST: _BASE_UNITTEST { PEERDIR(library/cpp/testing/unittest_main) ADD_YTEST($MODULE_PREFIX$REALPRJNAME unittest.py) SET(MODULE_LANG CPP) -} - +} + # tag:yt-specific tag:test ### @usage: YT_UNITTEST([name]) ### @@ -2320,18 +2320,18 @@ module G_BENCHMARK: _BASE_PROGRAM { } # tag:test -TEST_ROOT=$(TESTS_DATA_ROOT) -RESULT_MAX_FILE=0 -STRIP_FILES=--dont-strip-files -VERIFY_RESULTS=--verify-results -ADDITIONAL_PATH= - -# set for tests variables to fill it by YA_DEV or YA -YA_ROOT=ya +TEST_ROOT=$(TESTS_DATA_ROOT) +RESULT_MAX_FILE=0 +STRIP_FILES=--dont-strip-files +VERIFY_RESULTS=--verify-results +ADDITIONAL_PATH= + +# set for tests variables to fill it by YA_DEV or YA +YA_ROOT=ya when ($YA_DEV == "yes") { YA_ROOT=ya-dev } - + # tag:test ### @usage: UNITTEST_FOR(path/to/lib) @@ -2345,8 +2345,8 @@ module UNITTEST_FOR: UNITTEST { .SEM=UNITTEST_SEM PEERDIR(ADDINCL $UNITTEST_DIR) SRCDIR($UNITTEST_DIR) -} - +} + ### @usage: _LIBRARY # internal ### ### Base module definition for all libraries. @@ -2567,8 +2567,8 @@ module FAT_OBJECT: LIBRARY { # ymake's handling of NEED_ADD_FAKE_SRC may insert arbitrary command, not necessarily compilation DISABLE(NEED_ADD_FAKE_SRC) SRCS(build/scripts/_fake_src.cpp) -} - +} + ### @usage: RECURSIVE_LIBRARY() ### ### The recursive ("fat") library module. It will contain all its transitive dependencies reachable by PEERDIRs: @@ -2647,8 +2647,8 @@ module DLL_UNIT: _LINK_UNIT { when ($IDE_MSVS == "yes") { PEERDIR+=build/scripts/c_templates } -} - +} + # tag:python-specific ### @usage: PY_ANY_MODULE(name major_ver [minor_ver] [EXPORTS symlist_file] [PREFIX prefix]) ### @@ -2699,7 +2699,7 @@ module PY_ANY_MODULE: DLL_UNIT { MODULE_SUFFIX=.so } .RESTRICTED=USE_PYTHON2 USE_PYTHON3 PY_SRCS PY_MAIN -} +} # tag:python-specific ### @usage: PY2MODULE(name major_ver [minor_ver] [EXPORTS symlist_file] [PREFIX prefix]) @@ -2815,7 +2815,7 @@ module DLL: DLL_UNIT { ### SRCDIR($DLL_FOR_DIR) ### ADDINCL($DLL_FOR_DIR) ###} -} +} ### DLL_TOOL is a DLL that can be used as a LD_PRELOAD tool. module DLL_TOOL: DLL { @@ -2978,8 +2978,8 @@ module _DLL_COMPATIBLE_LIBRARY: LIBRARY { @import "${CONF_ROOT}/conf/project_specific/yql_udf.conf" -# as SRCS in packages use macro BUNDLE_SRCS! - +# as SRCS in packages use macro BUNDLE_SRCS! + PACKED_PACKAGE_ARGS= PACKED_PACKAGE_EXT= ### @usage: PACK(archive_type) @@ -3023,8 +3023,8 @@ module PACKAGE: _BASE_UNIT { } SET(NEED_PLATFORM_PEERDIRS no) PEERDIR_TAGS=CPP_PROTO CPP_FBS PY2 PY2_NATIVE YQL_UDF_SHARED __EMPTY__ DOCBOOK JAR_RUNNABLE PY3_BIN PY3TEST_PROGRAM DLL -} - +} + TOUCH_GROUP=$TOUCH_PACKAGE ${kv;hide:"p CI"} ### @usage: CI_GROUP() @@ -3079,8 +3079,8 @@ module UNION: _BASE_UNIT { .CMD=TOUCH_UNIT_MF SET(NEED_PLATFORM_PEERDIRS no) PEERDIR_TAGS=CPP_PROTO CPP_FBS PY2 PY2_NATIVE YQL_UDF_SHARED __EMPTY__ DOCBOOK JAR_RUNABLE PY3_BIN DLL -} - +} + # tag:python-specific module _PY_PACKAGE: UNION { .EXTS=.py @@ -3097,8 +3097,8 @@ module _PY_PACKAGE: UNION { ### This is module created via PY_PROTOS_FOR() macro module PY_PACKAGE: _PY_PACKAGE { .FINAL_TARGET=yes -} - +} + # tag:internal ### @usage: _SET_FIRST_VALUE(name args...) # interanl ### @@ -4432,7 +4432,7 @@ module METAQUERY: _BASE_UNIT { .FINAL_TARGET=yes PRINT_MODULE_TYPE(METAQUERY $MODULE_PREFIX$REALPRJNAME) } - + ARGS_DELIM="MACRO_CALLS_DELIM" SYSTEM_PROPERTIES_VALUE= @@ -5131,7 +5131,7 @@ macro _PYTHON_ADDINCL() { CFLAGS+=$PYTHON_INCLUDE } } -} +} # tag:python-specific ### @usage: PYTHON3_ADDINCL() @@ -5181,7 +5181,7 @@ macro _PYTHON3_ADDINCL() { CFLAGS+=$PYTHON_INCLUDE } } -} +} # tag:python-specific ### @usage: USE_PYTHON2() @@ -5233,7 +5233,7 @@ when ($PERL_DEFAULT_PEERDIR == "yes") { ### Add dependency on Perl to your LIBRARY macro USE_PERL_LIB() { PEERDIR(build/platform/perl) -} +} # tag:perl-specific ### @usage: USE_PERL_514_LIB() @@ -5285,7 +5285,7 @@ macro ALLOCATOR(Alloc) { otherwise { PEERDIR+=___configure_error___unknown_allocator_type___$ALLOCATOR } -} +} ### @usage: CC_REQUIREMENTS([cpu: <value>] [ram: <value>]) ### @@ -5335,7 +5335,7 @@ when ($MIC_ARCH == "yes") { when ($USE_OPENMP == "yes") { PEERDIR+=contrib/libs/openmp } - + when ($STRIP == "yes" && $NO_STRIP != "yes") { STRIP_FLAG=$LD_STRIP_FLAG DWARF_COMMAND= @@ -5403,7 +5403,7 @@ macro ADD_COMPILABLE_TRANSLATE(Dict, Name, MakeTransDictOptions...) { __translatename_lower=${tolower:Name} __translate_dict=${BINDIR}/transdict.${__translatename_lower}.cpp RUN_PROGRAM(dict/tools/maketransdict -i ${Dict} ${MakeTransDictOptions} ${Name} STDOUT_NOAUTO ${__translate_dict} IN ${Dict}) -} +} ### @usage ADD_COMPILABLE_TRANSLIT(TranslitTable NGrams Name Options...) ### @@ -5411,15 +5411,15 @@ macro ADD_COMPILABLE_TRANSLATE(Dict, Name, MakeTransDictOptions...) { ### This will emit both translit, untranslit and ngrams table codes those will be than further compiled into library macro ADD_COMPILABLE_TRANSLIT(TranslitTable, NGrams, Name, Options...) { __translitname_lower=${tolower:Name} - __translit_table=${BINDIR}/translit_trie_${__translitname_lower}.cpp - __untranslit_table=${BINDIR}/untranslit_trie_${__translitname_lower}.cpp - __ngrams_table=${BINDIR}/ngr_arr_${__translitname_lower}.cpp - __gentrie_dir=dict/tools/make_untranslit_trie - + __translit_table=${BINDIR}/translit_trie_${__translitname_lower}.cpp + __untranslit_table=${BINDIR}/untranslit_trie_${__translitname_lower}.cpp + __ngrams_table=${BINDIR}/ngr_arr_${__translitname_lower}.cpp + __gentrie_dir=dict/tools/make_untranslit_trie + RUN_PROGRAM(${__gentrie_dir} -i ${TranslitTable} ${Options} ${__translitname_lower} IN ${TranslitTable} STDOUT ${__untranslit_table}) RUN_PROGRAM(${__gentrie_dir} -i ${TranslitTable} -n ${Options} ${__translitname_lower} IN ${TranslitTable} STDOUT ${__translit_table}) RUN_PROGRAM(dict/tools/make_ngrams -i ${NGrams} ${Options} ${__translitname_lower} IN ${NGrams} STDOUT ${__ngrams_table}) -} +} # tag:python-specific tag:proto tag:deprecated @@ -5488,8 +5488,8 @@ macro _BUNDLE_TARGET(Target, Destination) { ### Documentation about the system test: https://wiki.yandex-team.ru/yatool/test/ macro TIMEOUT(Time) { SET(TEST_TIMEOUT $Time) -} - +} + SBR_UID_EXT="" # tag:test ### @usage: VALIDATE_DATA_RESTART(ext) @@ -5868,7 +5868,7 @@ macro _SRC("cfgproto", SRC, SRCFLAGS...) { # tag:src-processing macro _SRC("pyx", SRC, SRCFLAGS...) { - # Copy-paste from BUILDWITH_CYTHON + # Copy-paste from BUILDWITH_CYTHON .CMD=$RUN_CYTHON_SCRIPT $CYTHON_OPTIONS --cplus ${CYTHON_CPP_OUTPUT_INCLUDES} ${pre=-I:_CYTHON__INCLUDE} ${input:SRC} -o ${output;tobindir:SRC.cpp} $CYTHON_OUTPUT_INCLUDES ${SRCFLAGS} ${requirements;hide:PY_REQUIREMENTS} ${kv;hide:"p CY"} ${kv;hide:"pc yellow"} .ADDINCL=FOR cython contrib/tools/cython/Cython/Includes } @@ -6161,9 +6161,9 @@ macro SRC(FILE, FLAGS...) { macro SRCS(FILES...) { foreach (FILE : $FILES) { _SRC(${lastext:FILE} $FILE) - } -} - + } +} + # tag:cpu tag:src-processing ### @usage SRC_C_SSE2(File Flags...) ### Compile single .c-file with SSE2 and extra Flags. @@ -6270,14 +6270,14 @@ macro SRC_CPP_AVX2(FILE, FLAGS...) { } # tag:python-processing tag:cython -# TODO: use it in [.pyx] cmd +# TODO: use it in [.pyx] cmd ### @usage: BUILDWITH_CYTHON_CPP(Src Options...) ### ### Generates .cpp file from .pyx. macro BUILDWITH_CYTHON_CPP(Src, Options...) { .CMD=$RUN_CYTHON_SCRIPT $CYTHON_OPTIONS ${Options} --cplus ${CYTHON_CPP_OUTPUT_INCLUDES} ${pre=-I:_CYTHON__INCLUDE} ${input:Src} -o ${output;tobindir:Src.cpp} $CYTHON_OUTPUT_INCLUDES ${requirements;hide:PY_REQUIREMENTS} ${kv;hide:"p CY"} ${kv;hide:"pc yellow"} ADDINCL(FOR cython contrib/tools/cython/Cython/Includes) -} +} # tag:python-processing tag:cython tag:internal ### @usage: _BUILDWITH_CYTHON_CPP_DEP(Src Dep Options...) # internal @@ -6296,7 +6296,7 @@ macro _BUILDWITH_CYTHON_CPP_DEP(Src, Dep, Options...) { macro BUILDWITH_CYTHON_C(Src, Options...) { .CMD=$RUN_CYTHON_SCRIPT $CYTHON_OPTIONS ${Options} ${pre=-I:_CYTHON__INCLUDE} ${input:Src} -o ${output;tobindir:Src.c} $CYTHON_OUTPUT_INCLUDES ${requirements;hide:PY_REQUIREMENTS} ${kv;hide:"p CY"} ${kv;hide:"pc yellow"} ADDINCL(FOR cython contrib/tools/cython/Cython/Includes) -} +} # tag:python-processing tag:cython tag:internal ### @usage: _BUILDWITH_CYTHON_C_DEP(Src Dep Options...) # internal @@ -6407,15 +6407,15 @@ macro ARCHIVE_ASM(NAME="", DONTCOMPRESS?"-p":"", REQUIREMENTS[], Files...) { # tag:yweb-specific macro PIRE_INLINE_CMD(SRC) { .CMD=${tool:"library/cpp/regex/pire/inline"} -o ${output:SRC} ${input:SRC} ${output_include;hide:SRC} ${kv;hide:"p PI"} ${kv;hide:"pc yellow"} -} +} # tag:yweb-specific macro PIRE_INLINE(FILES...) { foreach (FILE : $FILES) { PIRE_INLINE_CMD($FILE) } -} - +} + ### @usage: ARCHIVE(archive_name [DONT_COMPRESS] files...) ### ### Add arbitrary data to a modules. Unlike RESOURCE macro the result should be futher processed by othet macros in the module. @@ -6435,9 +6435,9 @@ macro ARCHIVE_BY_KEYS(NAME="", KEYS="", DONTCOMPRESS?"-p":"", REQUIREMENTS[], Fi .CMD=$ARCH_TOOL -q -x $DONTCOMPRESS ${input:Files} -k $KEYS -o ${output;chksum;addincl;noauto:NAME} ${requirements;hide:REQUIREMENTS} ${kv;hide:"p AR"} ${kv;hide:"pc light-red"} } -#scripts +#scripts -#special commands +#special commands BUILDVERSION_SCRIPT=build/scripts/build_info_gen.py SVNVERSION_JAVA_MARKER=output-java-class SVNVERSION_GO_MARKER=output-go @@ -6499,7 +6499,7 @@ macro CFG_VARS() { macro CONFIGURE_FILE(Src, Dst) { .CMD=$YMAKE_PYTHON ${input:"build/scripts/configure_file.py"} ${input:Src} ${output;addincl:Dst} $CFG_VARS ${kv;hide:"p CF"} ${kv;hide:"pc yellow"} .SEM=set_vasrs ${CFG_VARS} && configure_file $S/${input;rootrel:Src} $B/${output;rootrel:Dst} -} +} ### @usage: BASE_CODEGEN(tool_path prefix) ### @@ -6515,7 +6515,7 @@ macro BASE_CODEGEN(Tool, Prefix, Opts...) { ### This is the call of the generator. Python macro SPLIT_CODEGEN() is defined in order to properly fill command outputs from OUT_NUM argument. macro _SPLIT_CODEGEN_BASE(Tool, Prefix, OUT[], OPTS[], OUTPUT_INCLUDES[]) { .CMD=${tool:Tool} ${input:Prefix.in} ${output;hide:OUT} ${output;nopath;noauto:Prefix.cpp} ${output;nopath:Prefix.h} $OPTS ${output_include;hide:OUTPUT_INCLUDES} ${kv;hide:"p SC"} ${kv;hide:"pc yellow"} -} +} STRUCT_CODEGEN_OUTPUT_INCLUDES=${output_include;hide:"util/generic/singleton.h"} \ ${output_include;hide:"util/generic/strbuf.h"} \ @@ -6536,8 +6536,8 @@ macro STRUCT_CODEGEN(Prefix) { ### A special case BASE_CODEGEN, in which the extsearch/images/robot/tools/dumperf/codegen tool is used macro DUMPERF_CODEGEN(Prefix) { .CMD=$BASE_CODEGEN(extsearch/images/robot/tools/dumperf/codegen, $Prefix, ${output_include;hide:"extsearch/images/kernel/erf/erf_format.h"}) -} - +} + # tag:flags ### @usage: LDFLAGS(LinkerFlags...) ### @@ -6563,22 +6563,22 @@ macro CFLAGS(Flags...) { ### Add the specified flags to the compilation command of .masm files. macro MASMFLAGS(Flags...) { SET_APPEND(MASMFLAGS $Flags) -} - +} + # tag:flags ### @usage: CONLYFLAGS([GLOBAL compiler_flag]* compiler_flags) ### Add the specified flags to the compilation command of .c (but not .cpp) files. ### @params: GLOBAL - Distributes these flags on dependent projects macro CONLYFLAGS(Flags...) { SET_APPEND_WITH_GLOBAL(USER_CONLYFLAGS $Flags) -} - +} + # tag:flags ### @usage: CXXFLAGS(compiler_flags) ### Add the specified flags to the compilation command of .cpp (but not .c) files. macro CXXFLAGS(Flags...) { SET_APPEND_WITH_GLOBAL(USER_CXXFLAGS $Flags) -} +} # tag:flags ### @usage: CUDA_NVCC_FLAGS(compiler flags) @@ -6814,7 +6814,7 @@ macro GENERATE_ENUM_SERIALIZATION(File) { .SEM=generate_enum_serilization ${input:File} ${output;hide;suf=_serialized.o:File} INCLUDE_HEADERS ${input;rootrel:File} ${tool;hide:"tools/enum_parser/enum_parser/bin"} PEERDIR(tools/enum_parser/enum_serialization_runtime) } - + ### @usage: GENERATE_ENUM_SERIALIZATION_WITH_HEADER(File.h) ### ### Create serialization support for enumeration members defined in the header (String <-> Enum conversions) and compile it into the module @@ -6832,7 +6832,7 @@ macro GENERATE_ENUM_SERIALIZATION_WITH_HEADER(File) { ### Creates a header file DebianVersion.h define the DEBIAN_VERSION taken from the File. macro DEB_VERSION(File) { .CMD=$YMAKE_PYTHON ${input:"build/scripts/mkver.py"} ${input:File} ${output;stdout:"DebianVersion.h"} ${kv;hide:"p CL"} ${kv;hide:"pc yellow"} -} +} BUILD_MN_SCRIPT=build/scripts/build_mn.py @@ -6859,7 +6859,7 @@ macro _BUILD_MNS_HEADER(NAME="", CHECK?, RANKING_SUFFIX="", Files...) { .CMD=$YMAKE_PYTHON ${input:BUILD_MN_SCRIPT} BuildMnsHeaderF $NAME ranking_suffix=$RANKING_SUFFIX ${output:MNS_OUTPUT.h} ${input:Files} ${output_include;hide:"kernel/matrixnet/mn_sse.h"} ${output_include;hide:"kernel/matrixnet/mn_multi_categ.h"} ${kv;hide:"p MN"} ${kv;hide:"pc yellow"} } -# TODO: support foreach_in and keywords simultaneous usage (look at BUILD_MNS_FILES) +# TODO: support foreach_in and keywords simultaneous usage (look at BUILD_MNS_FILES) # tag:matrixnet ### @usage: BUILD_MNS([CHECK] NAME listname mninfos...) # matrixnet @@ -6936,7 +6936,7 @@ when ($WIN32 == "yes") { SCHEEME2_CFLAGS= /E /TP } -SCHEEME2_STRUCT_INFO_FLAGS=-f "const static ui32 RecordSig" -u "RecordSig" --gcc44_no_typename --no_complex_overloaded_func_export +SCHEEME2_STRUCT_INFO_FLAGS=-f "const static ui32 RecordSig" -u "RecordSig" --gcc44_no_typename --no_complex_overloaded_func_export ### @usage: GEN_SCHEEME2(scheeme_name from_file dependent_files...) ### ### Generates a C++ description for structure(contains the field RecordSig) in the specified file (and connected). @@ -6992,7 +6992,7 @@ macro SYMLINK(From, To) { macro RUN_PROGRAM(Tool, IN{input}[], OUT{output}[], OUT_NOAUTO{output}[], TOOL{tool}[], OUTPUT_INCLUDES[], IN_DEPS[], STDOUT="", STDOUT_NOAUTO="", CWD="", ENV[], REQUIREMENTS[], Args...) { .CMD=${cwd:CWD} ${env:ENV} ${tool:Tool} $Args ${input;hide:IN} ${input;hide:IN_DEPS} ${output_include;hide:OUTPUT_INCLUDES} ${tool;hide:TOOL} ${output;hide:OUT} ${output;noauto;hide:OUT_NOAUTO} ${output;stdout:STDOUT} ${output;stdout;noauto:STDOUT_NOAUTO} ${requirements;hide:REQUIREMENTS} ${requirements;hide:"network:restricted"} ${kv;hide:"p PR"} ${kv;hide:"pc yellow"} ${kv;hide:"show_out"} } - + # tag:lua-specific ### @usage: LUA(script_path args... [CWD dir] [ENV key=value...] [TOOL tools...] [IN inputs...] [OUT[_NOAUTO] outputs...] [STDOUT[_NOAUTO] output] [OUTPUT_INCLUDES output_includes...][ REQUIREMENTS reqs]) ### @@ -7015,8 +7015,8 @@ macro RUN_PROGRAM(Tool, IN{input}[], OUT{output}[], OUT_NOAUTO{output}[], TOOL{t ### ${CURDIR} and ${BINDIR} which are expanded where the outputs are used. macro LUA(ScriptPath, IN{input}[], OUT{output}[], OUT_NOAUTO{output}[], TOOL{tool}[], OUTPUT_INCLUDES[], IN_DEPS[], STDOUT="", STDOUT_NOAUTO="", CWD="", ENV[], REQUIREMENTS[], Args...) { .CMD=${cwd:CWD} ${env:ENV} $LUA_TOOL ${input:ScriptPath} $Args ${input;hide:IN} ${input;hide:IN_DEPS} ${output_include;hide:OUTPUT_INCLUDES} ${tool;hide:TOOL} ${output;hide:OUT} ${output;noauto;hide:OUT_NOAUTO} ${output;stdout:STDOUT} ${output;stdout;noauto:STDOUT_NOAUTO} ${requirements;hide:REQUIREMENTS} ${requirements;hide:"network:restricted"} ${kv;hide:"p LU"} ${kv;hide:"pc yellow"} ${kv;hide:"show_out"} -} - +} + # tag:python-specific ### @usage: PYTHON(script_path args... [CWD dir] [ENV key=value...] [TOOL tools...] [IN inputs...] [OUT[_NOAUTO] outputs...] [STDOUT[_NOAUTO] output] [OUTPUT_INCLUDES output_includes...] [REQUIREMENTS reqs]) ### @@ -7248,7 +7248,7 @@ when ($CLANG && $DEBUGINFO_LINES_ONLY == "yes" && $NO_DEBUGINFO != "yes") { DEBUG_INFO_FLAGS=-gline-tables-only } -# TODO: configurable tar and touch +# TODO: configurable tar and touch PACK_TGZ=${cwd:ARCADIA_BUILD_ROOT} tar -czf ${rootrel:OUTPUT} ${rootrel:INPUT} ${kv;hide:"p AR"} ${kv;hide:"pc light-red"} # tag:internal diff --git a/build/ymake_conf.py b/build/ymake_conf.py index 2626dab71a..30219eb85e 100755 --- a/build/ymake_conf.py +++ b/build/ymake_conf.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python # coding=utf-8 - + from __future__ import print_function import base64 @@ -9,18 +9,18 @@ import json import logging import ntpath import optparse -import os +import os import posixpath -import re -import subprocess +import re +import subprocess import sys import tempfile - + import six logger = logging.getLogger(__name__ if __name__ != '__main__' else 'ymake_conf.py') - + def init_logger(verbose): logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO) @@ -34,9 +34,9 @@ class DebugString(object): class ConfigureError(Exception): - pass - - + pass + + class Platform(object): def __init__(self, name, os, arch): """ @@ -77,17 +77,17 @@ class Platform(object): self.is_power8le = self.arch == 'ppc64le' self.is_power9le = self.arch == 'power9le' self.is_powerpc = self.is_power8le or self.is_power9le - + self.is_32_bit = self.is_x86 or self.is_armv7 or self.is_armv8m self.is_64_bit = self.is_x86_64 or self.is_armv8 or self.is_powerpc assert self.is_32_bit or self.is_64_bit assert not (self.is_32_bit and self.is_64_bit) - - self.is_linux = self.os == 'linux' or 'yocto' in self.os + + self.is_linux = self.os == 'linux' or 'yocto' in self.os self.is_linux_x86_64 = self.is_linux and self.is_x86_64 self.is_linux_armv8 = self.is_linux and self.is_armv8 - self.is_linux_armv7 = self.is_linux and self.is_armv7 + self.is_linux_armv7 = self.is_linux and self.is_armv7 self.is_linux_power8le = self.is_linux and self.is_power8le self.is_linux_power9le = self.is_linux and self.is_power9le self.is_linux_powerpc = self.is_linux_power8le or self.is_linux_power9le @@ -98,10 +98,10 @@ class Platform(object): self.is_iossim = self.os == 'iossim' or (self.os == 'ios' and self.is_intel) self.is_ios = self.os == 'ios' or self.is_iossim self.is_apple = self.is_macos or self.is_ios - + self.is_windows = self.os == 'windows' self.is_windows_x86_64 = self.is_windows and self.is_x86_64 - + self.is_android = self.os == 'android' if self.is_android: # This is default Android API level unless `ANDROID_API` is specified @@ -116,20 +116,20 @@ class Platform(object): self.is_none = self.os == 'none' self.is_posix = self.is_linux or self.is_apple or self.is_android or self.is_cygwin or self.is_yocto - + @staticmethod def from_json(data): name = data.get('visible_name', data['toolchain']) return Platform(name, os=data['os'], arch=data['arch']) - + @property def os_variables(self): # 'LINUX' variable, for backward compatibility yield self.os.upper() - + # 'OS_LINUX' variable yield 'OS_{}'.format(self.os.upper()) - + # yocto is linux if 'yocto' in self.os: yield 'LINUX' @@ -142,7 +142,7 @@ class Platform(object): yield 'IOS' yield 'OS_IOS' yield 'OS_IOSSIM' - + @property def arch_variables(self): return select_multiple(( @@ -161,7 +161,7 @@ class Platform(object): (self.is_32_bit, 'ARCH_TYPE_32'), (self.is_64_bit, 'ARCH_TYPE_64'), )) - + @property def library_path_variables(self): return ['LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH'] @@ -213,11 +213,11 @@ class Platform(object): def which(prog): if os.path.exists(prog) and os.access(prog, os.X_OK): return prog - + # Ищем в $PATH только простые команды, без путей. if os.path.dirname(prog) != '': - return None - + return None + path = os.getenv('PATH', '') pathext = os.environ.get('PATHEXT') @@ -247,7 +247,7 @@ def get_stdout_and_code(command): return stdout, process.returncode except Exception: return None, None - + def to_strings(o): if isinstance(o, (list, tuple)): @@ -262,8 +262,8 @@ def to_strings(o): yield str(o) else: raise ConfigureError('Unexpected value {} {}'.format(type(o), o)) - - + + def emit(key, *value): print('{0}={1}'.format(key, ' '.join(to_strings(value)))) @@ -279,7 +279,7 @@ def emit_with_ignore_comment(key, *value): def append(key, *value): print('{0}+={1}'.format(key, ' '.join(to_strings(value)))) - + def emit_big(text): prefix = None @@ -347,18 +347,18 @@ def preset(key, default=None): return opts().presets.get(key, default) -def is_positive(key): +def is_positive(key): return is_positive_str(preset(key, '')) - - + + def is_positive_str(s): return s.lower() in ('yes', 'true', 'on', '1') -def is_negative(key): +def is_negative(key): return is_negative_str(preset(key, '')) - - + + def is_negative_str(s): return s.lower() in ('no', 'false', 'off', '0') @@ -397,8 +397,8 @@ def unique(it): yield i -class Options(object): - def __init__(self, argv): +class Options(object): + def __init__(self, argv): def parse_presets(raw_presets): presets = {} for p in raw_presets: @@ -409,14 +409,14 @@ class Options(object): return presets parser = optparse.OptionParser(add_help_option=False) - opt_group = optparse.OptionGroup(parser, 'Conf script options') + opt_group = optparse.OptionGroup(parser, 'Conf script options') opt_group.add_option('--toolchain-params', dest='toolchain_params', action='store', help='Set toolchain params via file') opt_group.add_option('-D', '--preset', dest='presets', action='append', default=[], help='set or override presets') opt_group.add_option('-l', '--local-distbuild', dest='local_distbuild', action='store_true', default=False, help='conf for local distbuild') - parser.add_option_group(opt_group) - - self.options, self.arguments = parser.parse_args(argv) - + parser.add_option_group(opt_group) + + self.options, self.arguments = parser.parse_args(argv) + argv = self.arguments if len(argv) < 4: print('Usage: ArcRoot, --BuildType--, Verbosity, [Path to local.ymake]', file=sys.stderr) @@ -429,57 +429,57 @@ class Options(object): self.build_type = argv[2].lower() self.local_distbuild = self.options.local_distbuild self.toolchain_params = self.options.toolchain_params - + self.presets = parse_presets(self.options.presets) userify_presets(self.presets, ('CFLAGS', 'CXXFLAGS', 'CONLYFLAGS', 'LDFLAGS', 'GO_COMPILE_FLAGS', 'GO_LINK_FLAGS', 'USE_LOCAL_SWIG', 'SWIG_TOOL', 'SWIG_LIBRARY')) - + Instance = None - + def opts(): if Options.Instance is None: Options.Instance = Options(sys.argv) return Options.Instance - + class Profiler(object): Generic = 'generic' GProf = 'gprof' - + class Arcadia(object): def __init__(self, root): self.root = root - - + + class Build(object): def __init__(self, arcadia, build_type, toolchain_params, force_ignore_local_files=False): self.arcadia = arcadia self.params = self._load_json_from_base64(toolchain_params) self.build_type = build_type - + platform = self.params['platform'] self.host = Platform.from_json(platform['host']) self.target = Platform.from_json(platform['target']) - + self.tc = self._get_toolchain_options() - + # TODO(somov): Удалить, когда перестанет использоваться. self.build_system = 'ymake' self.ignore_local_files = False - + dist_prefix = 'dist-' if self.build_type.startswith(dist_prefix): self.build_system = 'distbuild' self.build_type = self.build_type[len(dist_prefix):] - + if force_ignore_local_files: self.ignore_local_files = True if self.is_ide_build_type(self.build_type): self.ignore_local_files = True - + self.pic = not is_positive('FORCE_NO_PIC') @property @@ -499,19 +499,19 @@ class Build(object): emit('PIC', 'yes') emit('COMPILER_ID', self.tc.type.upper()) - + if self.is_valgrind: emit('WITH_VALGRIND', 'yes') - + toolchain_type, compiler_type, linker_type = Compilers[self.tc.type] toolchain = toolchain_type(self.tc, self) compiler = compiler_type(self.tc, self) linker = linker_type(self.tc, self) - + toolchain.print_toolchain() compiler.print_compiler() linker.print_linker() - + self._print_other_settings(compiler) def _print_build_settings(self): @@ -531,7 +531,7 @@ class Build(object): @property def is_release(self): # TODO(somov): Проверить, бывают ли тут суффиксы на самом деле - return self.build_type in ('release', 'relwithdebinfo', 'minsizerel', 'profile', 'gprof') or self.build_type.endswith('-release') + return self.build_type in ('release', 'relwithdebinfo', 'minsizerel', 'profile', 'gprof') or self.build_type.endswith('-release') @property def is_debug(self): @@ -557,7 +557,7 @@ class Build(object): @property def with_ndebug(self): return self.build_type in ('release', 'minsizerel', 'valgrind-release', 'profile', 'gprof', 'debugnoasserts') - + @property def is_valgrind(self): return self.build_type == 'valgrind' or self.build_type == 'valgrind-release' @@ -565,7 +565,7 @@ class Build(object): @property def is_ide(self): return self.is_ide_build_type(self.build_type) - + @property def profiler_type(self): if self.build_type == 'profile': @@ -574,11 +574,11 @@ class Build(object): return Profiler.GProf else: return None - + @staticmethod def is_ide_build_type(build_type): return build_type == 'nobuild' - + def _configure_runtime_versions(self): res = subprocess.check_output(['xcrun', 'simctl', 'list', '--json', 'runtimes']) raw_object = json.loads(res) @@ -590,7 +590,7 @@ class Build(object): def _get_toolchain_options(self): type_ = self.params['params']['type'] - + if self.params['params'].get('local') and type_ == 'xcode': detector = CompilerDetector() detector.detect(self.params['params']['c_compiler'], self.params['params']['cxx_compiler']) @@ -610,7 +610,7 @@ class Build(object): return MSVCToolchainOptions(self, detector) else: return GnuToolchainOptions(self, detector) - + def _print_other_settings(self, compiler): host = self.host @@ -619,7 +619,7 @@ class Build(object): ragel = Ragel() ragel.configure_toolchain(self, compiler) ragel.print_variables() - + perl = Perl() perl.configure_local() perl.print_variables('LOCAL_') @@ -643,7 +643,7 @@ class Build(object): if self.ignore_local_files or host.is_windows or is_positive('NO_SVN_DEPENDS'): emit_with_ignore_comment('SVN_DEPENDS') emit_with_ignore_comment('SVN_DEPENDS_CACHE__NO_UID__') - else: + else: def find_svn(): for i in range(0, 3): for path in (['.svn', 'wc.db'], ['.svn', 'entries'], ['.git', 'logs', 'HEAD']): @@ -663,16 +663,16 @@ class Build(object): return '${input;hide:"%s"}' % out_path return '' - + emit_with_ignore_comment('SVN_DEPENDS', find_svn()) emit_with_ignore_comment('SVN_DEPENDS_CACHE__NO_UID__', '${hide;kv:"disable_cache"}') - - @staticmethod + + @staticmethod def _load_json_from_base64(base64str): """ :rtype: dict[str, Any] """ - + def un_unicode(o): if isinstance(o, six.text_type): return six.ensure_str(o) @@ -710,7 +710,7 @@ class YMake(object): continue else: emit(key, opts().presets[key]) - + @staticmethod def _print_conf_content(path): with open(path, 'r') as fin: @@ -730,7 +730,7 @@ class YMake(object): if os.path.exists(full_path): return full_path return None - + def _find_core_conf(self): return self._find_conf('ymake.core.conf') @@ -759,12 +759,12 @@ class System(object): @staticmethod def print_nix_host_const(): emit('WRITE_COMMAND', '/bin/echo', '-e') - + print(''' when ($USE_PYTHON) { C_DEFINES+= -DUSE_PYTHON }''') - + @staticmethod def print_linux_const(): print(''' @@ -829,7 +829,7 @@ class CompilerDetector(object): except Exception as e: logger.debug('Preprocessing failed: %s', e) return None, None - + @staticmethod def get_compiler_vars(compiler, names): prefix = '____YA_VAR_' @@ -886,14 +886,14 @@ class CompilerDetector(object): return list(iter_version()) except Exception: return None - + clang_version = version(clang_vars) apple_build = apple_var in compiler_vars # TODO(somov): Учитывать номера версий сборки Apple компилятора Clang. _ = apple_build gcc_version = version(gcc_vars) msvc_version = version(msvc_vars) - + if clang_version: logger.debug('Detected Clang version %s', clang_version) self.type = 'clang' @@ -908,11 +908,11 @@ class CompilerDetector(object): raise ConfigureError('Could not determine custom compiler type: {}'.format(c_compiler)) self.version_list = clang_version or gcc_version or msvc_version - + self.c_compiler = c_compiler_path self.cxx_compiler = cxx_compiler and which(cxx_compiler) or c_compiler_path - + class ToolchainOptions(object): def __init__(self, build, detector): """ @@ -922,11 +922,11 @@ class ToolchainOptions(object): self.target = build.target tc_json = build.params - + logger.debug('Toolchain host %s', self.host) logger.debug('Toolchain target %s', self.target) logger.debug('Toolchain json %s', DebugString(lambda: json.dumps(tc_json, indent=4, sort_keys=True))) - + self.params = tc_json['params'] self._name = tc_json.get('name', 'theyknow') @@ -938,7 +938,7 @@ class ToolchainOptions(object): self.cxx_compiler = detector.cxx_compiler self.compiler_version_list = detector.version_list self.compiler_version = '.'.join(map(lambda part: six.ensure_str(str(part)), self.compiler_version_list)) - + else: self.type = self.params['type'] self.from_arcadia = True @@ -949,22 +949,22 @@ class ToolchainOptions(object): # TODO(somov): Требовать номер версии всегда. self.compiler_version = self.params.get('gcc_version') or self.params.get('version') or '0' self.compiler_version_list = list(map(int, self.compiler_version.split('.'))) - + # TODO(somov): Посмотреть, можно ли спрятать это поле. self.name_marker = '$(%s)' % self.params.get('match_root', self._name.upper()) self.arch_opt = self.params.get('arch_opt', []) - self.triplet_opt = self.params.get('triplet_opt', {}) + self.triplet_opt = self.params.get('triplet_opt', {}) self.target_opt = self.params.get('target_opt', []) - + # TODO(somov): Убрать чтение настройки из os.environ. self.werror_mode = preset('WERROR_MODE') or os.environ.get('WERROR_MODE') or self.params.get('werror_mode') or 'compiler_specific' - + # default C++ standard is set here, some older toolchains might need to redefine it in ya.conf.json self.cxx_std = self.params.get('cxx_std', 'c++20') self._env = tc_json.get('env', {}) - + self.android_ndk_version = self.params.get('android_ndk_version', None) logger.debug('c_compiler=%s', self.c_compiler) @@ -1022,10 +1022,10 @@ class GnuToolchainOptions(ToolchainOptions): self.inplace_tools = self.params.get('inplace_tools', False) self.strip = self.params.get('strip') self.objcopy = self.params.get('objcopy') - self.isystem = self.params.get('isystem') + self.isystem = self.params.get('isystem') self.dwarf_tool = self.target.find_in_dict(self.params.get('dwarf_tool')) - + # TODO(somov): Унифицировать формат sys_lib self.sys_lib = self.params.get('sys_lib', {}) if isinstance(self.sys_lib, dict): @@ -1033,15 +1033,15 @@ class GnuToolchainOptions(ToolchainOptions): self.os_sdk = preset('OS_SDK') or self._default_os_sdk() self.os_sdk_local = self.os_sdk == 'local' - + def _default_os_sdk(self): if self.target.is_linux: if self.target.is_armv8: return 'ubuntu-16' if self.target.is_armv7 and self.target.armv7_float_abi == 'hard': - return 'ubuntu-16' - + return 'ubuntu-16' + if self.target.is_armv7 and self.target.armv7_float_abi == 'softfp': return 'ubuntu-18' @@ -1155,19 +1155,19 @@ class GnuToolchain(Toolchain): ]) if self.tc.is_clang: - target_triple = self.tc.triplet_opt.get(target.arch, None) - if not target_triple: - target_triple = select(default=None, selectors=[ - (target.is_linux and target.is_x86_64, 'x86_64-linux-gnu'), - (target.is_linux and target.is_armv8, 'aarch64-linux-gnu'), + target_triple = self.tc.triplet_opt.get(target.arch, None) + if not target_triple: + target_triple = select(default=None, selectors=[ + (target.is_linux and target.is_x86_64, 'x86_64-linux-gnu'), + (target.is_linux and target.is_armv8, 'aarch64-linux-gnu'), (target.is_linux and target.is_armv7 and target.armv7_float_abi == 'hard', 'arm-linux-gnueabihf'), (target.is_linux and target.is_armv7 and target.armv7_float_abi == 'softfp', 'arm-linux-gnueabi'), (target.is_linux and target.is_powerpc, 'powerpc64le-linux-gnu'), (target.is_iossim and target.is_arm64, 'arm64-apple-ios{}-simulator'.format(ios_version_min)), - (target.is_apple and target.is_x86, 'i386-apple-darwin14'), - (target.is_apple and target.is_x86_64, 'x86_64-apple-darwin14'), + (target.is_apple and target.is_x86, 'i386-apple-darwin14'), + (target.is_apple and target.is_x86_64, 'x86_64-apple-darwin14'), (target.is_apple and target.is_macos_arm64, 'arm64-apple-macos11'), - (target.is_apple and target.is_armv7, 'armv7-apple-darwin14'), + (target.is_apple and target.is_armv7, 'armv7-apple-darwin14'), (target.is_apple and target.is_armv8, 'arm64-apple-darwin14'), (target.is_yocto and target.is_armv7, 'arm-poky-linux-gnueabi'), (target.is_android and target.is_x86, 'i686-linux-android'), @@ -1184,10 +1184,10 @@ class GnuToolchain(Toolchain): if target_triple: self.c_flags_platform.append('--target={}'.format(target_triple)) - if self.tc.isystem: - for root in list(self.tc.isystem): - self.c_flags_platform.extend(['-isystem', root]) - + if self.tc.isystem: + for root in list(self.tc.isystem): + self.c_flags_platform.extend(['-isystem', root]) + if target.is_android: self.c_flags_platform.extend(['-isystem', '{}/sources/cxx-stl/llvm-libc++abi/include'.format(self.tc.name_marker)]) @@ -1263,7 +1263,7 @@ class GnuToolchain(Toolchain): self.setup_tools(project='build/platform/linux_sdk', var='$OS_SDK_ROOT_RESOURCE_GLOBAL', bin='usr/bin', ldlibs='usr/lib/x86_64-linux-gnu') if target.is_yocto: - self.setup_sdk(project='build/platform/yocto_sdk/yocto_sdk', var='${YOCTO_SDK_ROOT_RESOURCE_GLOBAL}') + self.setup_sdk(project='build/platform/yocto_sdk/yocto_sdk', var='${YOCTO_SDK_ROOT_RESOURCE_GLOBAL}') elif self.tc.params.get('local'): if target.is_apple: if not tc.os_sdk_local: @@ -1319,7 +1319,7 @@ class GnuCompiler(Compiler): """ compiler_variable = 'CLANG' if tc.is_clang else 'GCC' super(GnuCompiler, self).__init__(tc, compiler_variable) - + self.build = build self.host = self.build.host self.target = self.build.target @@ -1380,7 +1380,7 @@ class GnuCompiler(Compiler): '-D_THREAD_SAFE', '-D_PTHREADS', '-D_REENTRANT', '-D_LIBCPP_ENABLE_CXX17_REMOVED_FEATURES', '-D_LARGEFILE_SOURCE', '-D__STDC_CONSTANT_MACROS', '-D__STDC_FORMAT_MACROS', ]) - + if not self.target.is_android: # There is no usable _FILE_OFFSET_BITS=64 support in Androids until API 21. And it's incomplete until at least API 24. # https://android.googlesource.com/platform/bionic/+/master/docs/32-bit-abi.md @@ -1389,7 +1389,7 @@ class GnuCompiler(Compiler): if self.target.is_linux or self.target.is_android or self.target.is_cygwin: self.c_defines.append('-D_GNU_SOURCE') - + if self.tc.is_clang and self.target.is_linux and self.target.is_x86_64: self.c_defines.append('-D_YNDX_LIBUNWIND_ENABLE_EXCEPTION_BACKTRACE') @@ -1399,10 +1399,10 @@ class GnuCompiler(Compiler): self.c_foptions.append('-fembed-bitcode') self.extra_compile_opts = [] - + self.c_flags = ['$CL_DEBUG_INFO', '$CL_DEBUG_INFO_DISABLE_CACHE__NO_UID__'] self.c_flags += self.tc.arch_opt + ['-pipe'] - + self.sfdl_flags = ['-E', '-C', '-x', 'c++'] if self.target.is_x86: @@ -1459,7 +1459,7 @@ class GnuCompiler(Compiler): if self.build.is_debug: self.c_foptions.append('$FSTACK') - + if self.build.is_fast_debug: self.c_flags.append('-Og') @@ -1477,23 +1477,23 @@ class GnuCompiler(Compiler): # Generate sections with address significance tables for ICF linker pass if self.tc.is_clang: self.c_foptions.extend(['-faddrsig']) - else: - self.optimize = '-O3' - + else: + self.optimize = '-O3' + if self.build.with_ndebug: self.c_defines.append('-DNDEBUG') else: self.c_defines.append('-UNDEBUG') - + if self.build.profiler_type in (Profiler.Generic, Profiler.GProf): self.c_foptions.append('-fno-omit-frame-pointer') - + if self.build.profiler_type == Profiler.GProf: self.c_flags.append('-pg') - + def print_compiler(self): super(GnuCompiler, self).print_compiler() - + emit('C_COMPILER_UNQUOTED', self.tc.c_compiler) emit('C_COMPILER', '${quo:C_COMPILER_UNQUOTED}') emit('OPTIMIZE', self.optimize) @@ -1504,7 +1504,7 @@ class GnuCompiler(Compiler): emit('GCC_PREPROCESSOR_OPTS', '$DUMP_DEPS', '$C_DEFINES') append('C_WARNING_OPTS', self.c_warnings) append('CXX_WARNING_OPTS', self.cxx_warnings) - + # PIE is only valid for executables, while PIC implies a shared library # `-pie` with a shared library is either ignored or fails to link emit_big(''' @@ -1546,7 +1546,7 @@ class GnuCompiler(Compiler): when ($NOGCCSTACKCHECK != "yes") { FSTACK += -fstack-check }''') - + c_builtins = [ "-Wno-builtin-macro-redefined", '-D__DATE__=\\""Sep 31 2019\\""', @@ -1642,7 +1642,7 @@ class GnuCompiler(Compiler): append('BC_CXXFLAGS', '$CXXFLAGS') append('C_DEFINES', '-D__LONG_LONG_SUPPORTED') - + emit('OBJ_CROSS_SUF', '$OBJ_SUF%s' % self.cross_suffix) emit('OBJECT_SUF', '$OBJ_SUF%s.o' % self.cross_suffix) emit('GCC_COMPILE_FLAGS', '$EXTRA_C_FLAGS -c -o $_COMPILE_OUTPUTS', '${pre=-I:_C__INCLUDE}') @@ -1784,7 +1784,7 @@ class Linker(object): def print_linker(self): self._print_linker_selector() - + def _print_linker_selector(self): # if self.type is None then _DEFAULT_LINKER is set to empty string value emit('_DEFAULT_LINKER_ID', self.type) @@ -1802,7 +1802,7 @@ class LD(Linker): self.host = self.build.host self.target = self.build.target self.tc = tc - + target = self.target self.ar = preset('AR') or self.tc.ar @@ -1891,7 +1891,7 @@ class LD(Linker): self.thread_library = select([ (target.is_linux or target.is_macos, '-lpthread'), ]) - + self.ld_export_dynamic_flag = None self.start_group = None self.end_group = None @@ -1902,7 +1902,7 @@ class LD(Linker): self.soname_option = None self.dwarf_command = None self.libresolv = '-lresolv' if target.is_linux or target.is_macos or target.is_android else None - + if target.is_linux or target.is_android: self.ld_export_dynamic_flag = '-rdynamic' self.use_stdlib = '-nodefaultlibs' @@ -1926,11 +1926,11 @@ class LD(Linker): if self.build.profiler_type == Profiler.GProf: self.ld_flags.append('-pg') - + # TODO(somov): Единое условие на coverage. if self.build.is_coverage or is_positive('GCOV_COVERAGE') or is_positive('CLANG_COVERAGE') or self.build.is_sanitized: self.use_stdlib = None - + self.ld_sdk = select(default=None, selectors=[ (target.is_macos_arm64, '-Wl,-sdk_version,11.0'), (target.is_macos, '-Wl,-sdk_version,10.15'), @@ -1971,7 +1971,7 @@ class LD(Linker): emit('AR_TOOL', self.ar) emit('AR_TYPE', self.ar_type) - + emit('STRIP_TOOL_VENDOR', self.strip) emit('OBJCOPY_TOOL_VENDOR', self.objcopy) @@ -1980,7 +1980,7 @@ class LD(Linker): emit('LD_STRIP_FLAG', self.ld_stripflag) emit('STRIP_FLAG') - + emit('LD_DCE_FLAG', self.ld_dce_flag) emit('DCE_FLAG') @@ -2298,7 +2298,7 @@ class MSVC(object): self.build = build self.tc = tc - + class MSVCToolchain(MSVC, Toolchain): def __init__(self, tc, build): """ @@ -2599,7 +2599,7 @@ class MSVCCompiler(MSVC, Compiler): when ($NO_OPTIMIZE == "yes") { OPTIMIZE = /Od }''') - + emit('SFDL_FLAG', ['/E', '/C', '/P', '/TP', '/Fi$SFDL_TMP_OUT']) emit('WERROR_FLAG', '/WX') emit('WERROR_MODE', self.tc.werror_mode) @@ -2660,7 +2660,7 @@ class MSVCLinker(MSVC, Linker): ignored_errors = [ 4221 ] - + flag_machine = '/MACHINE:{}'.format(arch.upper()) flags_ignore = ['/IGNORE:{}'.format(code) for code in ignored_errors] @@ -2823,7 +2823,7 @@ Compilers = { 'msvc': (MSVCToolchain, MSVCCompiler, MSVCLinker), } - + class Ragel(object): def __init__(self): self.rlgen_flags = [] @@ -2837,7 +2837,7 @@ class Ragel(object): self.set_default_flags(optimized=build.is_release and not build.is_sanitized) else: raise ConfigureError('Unexpected compiler {}'.format(compiler)) - + def set_default_flags(self, optimized): if optimized: self.rlgen_flags.append('-G2') @@ -2845,13 +2845,13 @@ class Ragel(object): else: self.rlgen_flags.append('-T0') self.ragel6_flags.append('-CT0') - + def print_variables(self): emit('RLGEN_FLAGS', self.rlgen_flags) emit('RAGEL_FLAGS', self.ragel_flags) emit('RAGEL6_FLAGS', self.ragel6_flags) - - + + class Python(object): def __init__(self, tc): self.python = None @@ -2860,20 +2860,20 @@ class Python(object): self.libraries = None self.includes = None self.tc = tc - + def configure_posix(self, python=None, python_config=None): python = python or preset('PYTHON_BIN') or which('python') python_config = python_config or preset('PYTHON_CONFIG') or which('python-config') - + if python is None or python_config is None: return - + # python-config dumps each option on one line in the specified order config = get_stdout([python_config, '--cflags', '--ldflags', '--includes']) or '' config = config.split('\n') if len(config) < 3: return - + self.python = python self.flags = config[0] self.ldflags = config[1] @@ -2884,7 +2884,7 @@ class Python(object): self.libraries = '' if preset('USE_ARCADIA_PYTHON') == 'no' and not preset('USE_SYSTEM_PYTHON') and not self.tc.os_sdk_local: raise Exception("Use fixed python (see https://clubs.at.yandex-team.ru/arcadia/15392) or set OS_SDK=local flag") - + def print_variables(self): variables = Variables({ 'PYTHON_BIN': self.python, @@ -2893,33 +2893,33 @@ class Python(object): 'PYTHON_LIBRARIES': self.libraries, 'PYTHON_INCLUDE': self.includes }) - + variables.update_from_presets() variables.reset_if_any(reset_value='PYTHON-NOT-FOUND') variables.emit() - - + + class Perl(object): # Parse (key, value) from "version='5.26.0';" lines PERL_CONFIG_RE = re.compile(r"^(?P<key>\w+)='(?P<value>.*)';$", re.MULTILINE) - + def __init__(self): self.perl = None self.version = None self.privlib = None self.archlib = None - + def configure_local(self, perl=None): self.perl = perl or preset('PERL') or which('perl') if self.perl is None: return - + # noinspection PyTypeChecker config = dict(self._iter_config(['version', 'privlibexp', 'archlibexp'])) self.version = config.get('version') self.privlib = config.get('privlibexp') self.archlib = config.get('archlibexp') - + def print_variables(self, prefix=''): variables = Variables({ prefix + 'PERL': self.perl, @@ -2927,15 +2927,15 @@ class Perl(object): prefix + 'PERL_PRIVLIB': self.privlib, prefix + 'PERL_ARCHLIB': self.archlib, }) - + variables.reset_if_any(reset_value='PERL-NOT-FOUND') variables.emit(with_ignore_comment=variables.keys()) - + def _iter_config(self, config_keys): # Run perl -V:version -V:etc... perl_config = [self.perl] + ['-V:{}'.format(key) for key in config_keys] config = six.ensure_str(get_stdout(perl_config) or '') - + start = 0 while True: match = Perl.PERL_CONFIG_RE.search(config, start) @@ -2943,8 +2943,8 @@ class Perl(object): break yield match.group('key', 'value') start = match.end() - - + + class Setting(object): def __init__(self, key, auto=None, convert=None, rewrite=False): self.key = key @@ -3224,22 +3224,22 @@ def print_swig_config(): emit('SWIG_LIBRARY', library) -def main(): +def main(): options = opts() - + arcadia = Arcadia(options.arcadia_root) - + ymake = YMake(arcadia) - + ymake.print_core_conf() ymake.print_presets() ymake.print_settings() - + build = Build(arcadia, options.build_type, options.toolchain_params, force_ignore_local_files=not options.local_distbuild) build.print_build() emit_with_ignore_comment('CONF_SCRIPT_DEPENDS', __file__) - + if __name__ == '__main__': - main() + main() diff --git a/contrib/restricted/googletest/googlemock/include/gmock/gmock.h b/contrib/restricted/googletest/googlemock/include/gmock/gmock.h index 0ad0fa8d72..12469bc466 100644 --- a/contrib/restricted/googletest/googlemock/include/gmock/gmock.h +++ b/contrib/restricted/googletest/googlemock/include/gmock/gmock.h @@ -1,61 +1,61 @@ -// Copyright 2007, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - +// Copyright 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Google Mock - a framework for writing C++ mock classes. +// +// This is the main header file a user should include. -// Google Mock - a framework for writing C++ mock classes. -// -// This is the main header file a user should include. - // GOOGLETEST_CM0002 DO NOT DELETE #ifndef GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_H_ #define GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_H_ - -// This file implements the following syntax: -// + +// This file implements the following syntax: +// // ON_CALL(mock_object, Method(...)) -// .With(...) ? -// .WillByDefault(...); -// -// where With() is optional and WillByDefault() must appear exactly -// once. -// +// .With(...) ? +// .WillByDefault(...); +// +// where With() is optional and WillByDefault() must appear exactly +// once. +// // EXPECT_CALL(mock_object, Method(...)) -// .With(...) ? -// .Times(...) ? -// .InSequence(...) * -// .WillOnce(...) * -// .WillRepeatedly(...) ? -// .RetiresOnSaturation() ? ; -// -// where all clauses are optional and WillOnce() can be repeated. - +// .With(...) ? +// .Times(...) ? +// .InSequence(...) * +// .WillOnce(...) * +// .WillRepeatedly(...) ? +// .RetiresOnSaturation() ? ; +// +// where all clauses are optional and WillOnce() can be repeated. + #include "gmock/gmock-actions.h" #include "gmock/gmock-cardinalities.h" #include "gmock/gmock-function-mocker.h" @@ -64,35 +64,35 @@ #include "gmock/gmock-more-matchers.h" #include "gmock/gmock-nice-strict.h" #include "gmock/internal/gmock-internal-utils.h" - -namespace testing { - -// Declares Google Mock flags that we want a user to use programmatically. -GMOCK_DECLARE_bool_(catch_leaked_mocks); -GMOCK_DECLARE_string_(verbose); + +namespace testing { + +// Declares Google Mock flags that we want a user to use programmatically. +GMOCK_DECLARE_bool_(catch_leaked_mocks); +GMOCK_DECLARE_string_(verbose); GMOCK_DECLARE_int32_(default_mock_behavior); - -// Initializes Google Mock. This must be called before running the -// tests. In particular, it parses the command line for the flags -// that Google Mock recognizes. Whenever a Google Mock flag is seen, -// it is removed from argv, and *argc is decremented. -// -// No value is returned. Instead, the Google Mock flag variables are -// updated. -// -// Since Google Test is needed for Google Mock to work, this function -// also initializes Google Test and parses its flags, if that hasn't -// been done. -GTEST_API_ void InitGoogleMock(int* argc, char** argv); - -// This overloaded version can be used in Windows programs compiled in -// UNICODE mode. -GTEST_API_ void InitGoogleMock(int* argc, wchar_t** argv); - + +// Initializes Google Mock. This must be called before running the +// tests. In particular, it parses the command line for the flags +// that Google Mock recognizes. Whenever a Google Mock flag is seen, +// it is removed from argv, and *argc is decremented. +// +// No value is returned. Instead, the Google Mock flag variables are +// updated. +// +// Since Google Test is needed for Google Mock to work, this function +// also initializes Google Test and parses its flags, if that hasn't +// been done. +GTEST_API_ void InitGoogleMock(int* argc, char** argv); + +// This overloaded version can be used in Windows programs compiled in +// UNICODE mode. +GTEST_API_ void InitGoogleMock(int* argc, wchar_t** argv); + // This overloaded version can be used on Arduino/embedded platforms where // there is no argc/argv. GTEST_API_ void InitGoogleMock(); -} // namespace testing - +} // namespace testing + #endif // GOOGLEMOCK_INCLUDE_GMOCK_GMOCK_H_ diff --git a/contrib/restricted/googletest/googlemock/ya.make b/contrib/restricted/googletest/googlemock/ya.make index e1757514d6..38649ba5fc 100644 --- a/contrib/restricted/googletest/googlemock/ya.make +++ b/contrib/restricted/googletest/googlemock/ya.make @@ -1,12 +1,12 @@ # Generated by devtools/yamaker. -LIBRARY() - +LIBRARY() + OWNER( somov g:cpp-contrib ) - + LICENSE( Apache-2.0 AND BSD-3-Clause @@ -23,8 +23,8 @@ ADDINCL( GLOBAL contrib/restricted/googletest/googletest/include contrib/restricted/googletest/googlemock contrib/restricted/googletest/googletest -) - +) + NO_COMPILER_WARNINGS() NO_UTIL() @@ -35,8 +35,8 @@ CFLAGS( GLOBAL -DGTEST_HAS_STD_WSTRING=1 ) -SRCS( +SRCS( src/gmock-all.cc -) - -END() +) + +END() diff --git a/contrib/tools/bison/bison/src/files.c b/contrib/tools/bison/bison/src/files.c index 82ed2b56e8..3b219229bc 100644 --- a/contrib/tools/bison/bison/src/files.c +++ b/contrib/tools/bison/bison/src/files.c @@ -89,9 +89,9 @@ static char *header_extension = NULL; | STR1, and STR2. | `-----------------------------------------------------------------*/ -#if defined _win_ || defined _WIN64 || defined _WIN32 || defined __WIN32__ +#if defined _win_ || defined _WIN64 || defined _WIN32 || defined __WIN32__ char *stpcpy(char *dst, const char *src); -#endif +#endif static char * concat2 (char const *str1, char const *str2) diff --git a/contrib/tools/bison/bison/src/output.c b/contrib/tools/bison/bison/src/output.c index 20dfe088b2..61376ba390 100644 --- a/contrib/tools/bison/bison/src/output.c +++ b/contrib/tools/bison/bison/src/output.c @@ -51,16 +51,16 @@ #endif #ifndef PKGDATADIR -#define STR(a) XSTR(a) -#define XSTR(a) #a - +#define STR(a) XSTR(a) +#define XSTR(a) #a + const char* default_pkgdatadir() { - const char* arc_path = getenv("ARCADIA_ROOT_DISTBUILD"); - if (arc_path == NULL) + const char* arc_path = getenv("ARCADIA_ROOT_DISTBUILD"); + if (arc_path == NULL) arc_path = ArcadiaRoot(); - return uniqstr_vsprintf("%s/" STR(BISON_DATA_DIR), arc_path); + return uniqstr_vsprintf("%s/" STR(BISON_DATA_DIR), arc_path); } #define PKGDATADIR (default_pkgdatadir()) #endif diff --git a/contrib/tools/bison/bison/ya.make b/contrib/tools/bison/bison/ya.make index 2676031b81..04f8ae3758 100644 --- a/contrib/tools/bison/bison/ya.make +++ b/contrib/tools/bison/bison/ya.make @@ -59,7 +59,7 @@ SRCS( CFLAGS( -Daccept=bison_accept - -DBISON_DATA_DIR="contrib/tools/bison/bison/data" + -DBISON_DATA_DIR="contrib/tools/bison/bison/data" ) PEERDIR( diff --git a/contrib/tools/python/src/config_init.c b/contrib/tools/python/src/config_init.c index cdb5d15e15..2fa47000b8 100644 --- a/contrib/tools/python/src/config_init.c +++ b/contrib/tools/python/src/config_init.c @@ -51,13 +51,13 @@ extern void init_multiprocessing(void); extern void initspwd(void); #endif -#ifdef _DARWIN_ +#ifdef _DARWIN_ #ifndef __IOS__ -extern void init_multiprocessing(void); +extern void init_multiprocessing(void); extern void init_scproxy(void); -#endif #endif - +#endif + #ifdef _CYGWIN_ extern void init_multiprocessing(void); #endif @@ -86,7 +86,7 @@ extern void initmsvcrt(void); extern void init_subprocess(void); extern void init_winreg(void); #endif - + #if !defined(_CYGWIN_) -extern void init_ctypes(void); -#endif +extern void init_ctypes(void); +#endif diff --git a/contrib/tools/python/src/config_map.c b/contrib/tools/python/src/config_map.c index 98b5230874..6bfe125ddd 100644 --- a/contrib/tools/python/src/config_map.c +++ b/contrib/tools/python/src/config_map.c @@ -51,13 +51,13 @@ {"spwd", initspwd}, #endif -#ifdef _DARWIN_ +#ifdef _DARWIN_ #ifndef __IOS__ -{"_multiprocessing", init_multiprocessing}, +{"_multiprocessing", init_multiprocessing}, {"_scproxy", init_scproxy}, -#endif #endif - +#endif + #ifdef _CYGWIN_ {"_multiprocessing", init_multiprocessing}, #endif @@ -86,7 +86,7 @@ {"_subprocess", init_subprocess}, {"_winreg", init_winreg}, #endif - + #if defined(_x86_) && !defined(_CYGWIN_) || defined(__powerpc__) || defined(__aarch64__) -{"_ctypes", init_ctypes}, -#endif +{"_ctypes", init_ctypes}, +#endif diff --git a/contrib/tools/ya.make b/contrib/tools/ya.make index 3517db4fc0..750911c587 100644 --- a/contrib/tools/ya.make +++ b/contrib/tools/ya.make @@ -58,7 +58,7 @@ RECURSE( IF (NOT OS_WINDOWS) RECURSE( - ag + ag lftp make ) diff --git a/library/cpp/comptable/usage/ya.make b/library/cpp/comptable/usage/ya.make index 3197b5f4ae..ab31e7528c 100644 --- a/library/cpp/comptable/usage/ya.make +++ b/library/cpp/comptable/usage/ya.make @@ -1,6 +1,6 @@ PROGRAM() -OWNER(ironpeter) +OWNER(ironpeter) SRCS( usage.cpp @@ -9,5 +9,5 @@ SRCS( PEERDIR( library/cpp/comptable ) - + END() diff --git a/library/cpp/comptable/ut/ya.make b/library/cpp/comptable/ut/ya.make index bc21fc375f..d0a49793a5 100644 --- a/library/cpp/comptable/ut/ya.make +++ b/library/cpp/comptable/ut/ya.make @@ -1,6 +1,6 @@ UNITTEST_FOR(library/cpp/comptable) -OWNER(ironpeter) +OWNER(ironpeter) SRCS( comptable_ut.cpp diff --git a/library/cpp/comptable/ya.make b/library/cpp/comptable/ya.make index f27fb69793..314603c62a 100644 --- a/library/cpp/comptable/ya.make +++ b/library/cpp/comptable/ya.make @@ -1,6 +1,6 @@ LIBRARY() -OWNER(ironpeter) +OWNER(ironpeter) SRCS( comptable.cpp diff --git a/library/cpp/getopt/last_getopt.h b/library/cpp/getopt/last_getopt.h index 4a24cfbe4f..d14f05cc5b 100644 --- a/library/cpp/getopt/last_getopt.h +++ b/library/cpp/getopt/last_getopt.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/last_getopt.h> diff --git a/library/cpp/getopt/last_getopt_support.h b/library/cpp/getopt/last_getopt_support.h index 3eb609653c..b71c7045b1 100644 --- a/library/cpp/getopt/last_getopt_support.h +++ b/library/cpp/getopt/last_getopt_support.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/last_getopt_support.h> diff --git a/library/cpp/getopt/modchooser.h b/library/cpp/getopt/modchooser.h index fc09e16d63..9bf73daf13 100644 --- a/library/cpp/getopt/modchooser.h +++ b/library/cpp/getopt/modchooser.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/modchooser.h> diff --git a/library/cpp/getopt/opt.h b/library/cpp/getopt/opt.h index f33208eae0..d2a85075bd 100644 --- a/library/cpp/getopt/opt.h +++ b/library/cpp/getopt/opt.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/opt.h> diff --git a/library/cpp/getopt/opt2.h b/library/cpp/getopt/opt2.h index a97ccf7eba..2d35bc7b29 100644 --- a/library/cpp/getopt/opt2.h +++ b/library/cpp/getopt/opt2.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/opt2.h> diff --git a/library/cpp/getopt/posix_getopt.h b/library/cpp/getopt/posix_getopt.h index f934f4fb02..8cb7ece624 100644 --- a/library/cpp/getopt/posix_getopt.h +++ b/library/cpp/getopt/posix_getopt.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/posix_getopt.h> diff --git a/library/cpp/getopt/print.cpp b/library/cpp/getopt/print.cpp index 0765532f8b..8cf1c62e4d 100644 --- a/library/cpp/getopt/print.cpp +++ b/library/cpp/getopt/print.cpp @@ -1,14 +1,14 @@ -#include "last_getopt.h" +#include "last_getopt.h" #include "last_getopt_support.h" #include "modchooser.h" #include "opt.h" #include "opt2.h" #include "posix_getopt.h" #include "ygetopt.h" - + #include <library/cpp/svnversion/svnversion.h> #include <library/cpp/build_info/build_info.h> - + namespace NLastGetoptPrivate { TString InitVersionString() { TString ts = GetProgramSvnVersion(); @@ -20,7 +20,7 @@ namespace NLastGetoptPrivate { ts += sandboxTaskId; } return ts; - } + } TString InitShortVersionString() { TString ts = GetProgramShortVersionData(); @@ -37,4 +37,4 @@ namespace NLastGetoptPrivate { } } Init; -} +} diff --git a/library/cpp/getopt/small/ya.make b/library/cpp/getopt/small/ya.make index 3453fa949b..96de0f04b1 100644 --- a/library/cpp/getopt/small/ya.make +++ b/library/cpp/getopt/small/ya.make @@ -1,28 +1,28 @@ -LIBRARY() - +LIBRARY() + OWNER(pg) - + PEERDIR( library/cpp/colorizer ) -SRCS( +SRCS( completer.cpp completer_command.cpp completion_generator.cpp formatted_output.cpp - last_getopt.cpp + last_getopt.cpp last_getopt_easy_setup.cpp last_getopt_opt.cpp last_getopt_opts.cpp last_getopt_parser.cpp last_getopt_parse_result.cpp - modchooser.cpp - opt.cpp - opt2.cpp - posix_getopt.cpp + modchooser.cpp + opt.cpp + opt2.cpp + posix_getopt.cpp wrap.cpp - ygetopt.cpp -) - -END() + ygetopt.cpp +) + +END() diff --git a/library/cpp/getopt/ya.make b/library/cpp/getopt/ya.make index dd2cd5c40e..6df23b22b2 100644 --- a/library/cpp/getopt/ya.make +++ b/library/cpp/getopt/ya.make @@ -2,12 +2,12 @@ LIBRARY() OWNER(pg) -PEERDIR( +PEERDIR( library/cpp/getopt/small library/cpp/svnversion library/cpp/build_info -) - +) + SRCS( GLOBAL print.cpp ) diff --git a/library/cpp/getopt/ygetopt.h b/library/cpp/getopt/ygetopt.h index 126e1fbc4a..fd018b3128 100644 --- a/library/cpp/getopt/ygetopt.h +++ b/library/cpp/getopt/ygetopt.h @@ -1,3 +1,3 @@ -#pragma once +#pragma once #include <library/cpp/getopt/small/ygetopt.h> diff --git a/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.proto b/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.proto index 4e5d7b4932..fd23eb372b 100644 --- a/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.proto +++ b/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.proto @@ -1,4 +1,4 @@ -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; package NMonProto; diff --git a/library/cpp/protobuf/json/inline.h b/library/cpp/protobuf/json/inline.h index 8a09991905..e2d7bb6ef0 100644 --- a/library/cpp/protobuf/json/inline.h +++ b/library/cpp/protobuf/json/inline.h @@ -5,7 +5,7 @@ // An example of usage: // 1) Define a field option in your .proto to identify fields which should be inlined, e.g. // -// import "google/protobuf/descriptor.proto"; +// import "google/protobuf/descriptor.proto"; // extend google.protobuf.FieldOptions { // optional bool this_is_json = 58253; // do not forget assign some more or less unique tag // } diff --git a/library/cpp/protobuf/json/ut/filter_ut.proto b/library/cpp/protobuf/json/ut/filter_ut.proto index 0da5484dd1..29d630ade4 100644 --- a/library/cpp/protobuf/json/ut/filter_ut.proto +++ b/library/cpp/protobuf/json/ut/filter_ut.proto @@ -1,4 +1,4 @@ -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; package NProtobufJsonUt; diff --git a/library/cpp/protobuf/json/ut/inline_ut.proto b/library/cpp/protobuf/json/ut/inline_ut.proto index 3b30020bab..76bd10232d 100644 --- a/library/cpp/protobuf/json/ut/inline_ut.proto +++ b/library/cpp/protobuf/json/ut/inline_ut.proto @@ -1,4 +1,4 @@ -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; package NProtobufJsonUt; diff --git a/library/cpp/protobuf/util/proto/merge.proto b/library/cpp/protobuf/util/proto/merge.proto index 51bbe80f1b..a937041c07 100644 --- a/library/cpp/protobuf/util/proto/merge.proto +++ b/library/cpp/protobuf/util/proto/merge.proto @@ -1,4 +1,4 @@ -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; // These meta-options are used for selecting proper merging method, see merge.h diff --git a/library/cpp/protobuf/util/ut/common_ut.proto b/library/cpp/protobuf/util/ut/common_ut.proto index 8e24eb1212..9cf803ffbf 100644 --- a/library/cpp/protobuf/util/ut/common_ut.proto +++ b/library/cpp/protobuf/util/ut/common_ut.proto @@ -1,4 +1,4 @@ -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; import "library/cpp/protobuf/util/proto/merge.proto"; package NProtobufUtilUt; diff --git a/library/cpp/sighandler/ya.make b/library/cpp/sighandler/ya.make index 761739bf39..c0f7ea6084 100644 --- a/library/cpp/sighandler/ya.make +++ b/library/cpp/sighandler/ya.make @@ -1,6 +1,6 @@ LIBRARY() -OWNER(pg) +OWNER(pg) SRCS( async_signals_handler.cpp diff --git a/library/cpp/terminate_handler/sample/exception/ya.make b/library/cpp/terminate_handler/sample/exception/ya.make index 6d9e93bfda..958c26f89a 100644 --- a/library/cpp/terminate_handler/sample/exception/ya.make +++ b/library/cpp/terminate_handler/sample/exception/ya.make @@ -1,4 +1,4 @@ -PROGRAM(exception_sample) +PROGRAM(exception_sample) OWNER(nga) diff --git a/library/cpp/unicode/normalization/ya.make b/library/cpp/unicode/normalization/ya.make index 6431135ede..95bc93f297 100644 --- a/library/cpp/unicode/normalization/ya.make +++ b/library/cpp/unicode/normalization/ya.make @@ -2,7 +2,7 @@ LIBRARY() NO_UTIL() -OWNER(alzobnin) +OWNER(alzobnin) SRCS( generated/composition.cpp diff --git a/util/folder/dirut.cpp b/util/folder/dirut.cpp index 1c261c9dd7..ffc9b09f96 100644 --- a/util/folder/dirut.cpp +++ b/util/folder/dirut.cpp @@ -399,9 +399,9 @@ int mkpath(char* path, int mode) { return NFs::MakeDirectoryRecursive(path, NFs::EFilePermission(mode)) ? 0 : -1; } -// Implementation of realpath in FreeBSD (version 9.0 and less) and GetFullPathName in Windows -// did not require last component of the file name to exist (other implementations will fail -// if it does not). Use RealLocation if that behaviour is required. +// Implementation of realpath in FreeBSD (version 9.0 and less) and GetFullPathName in Windows +// did not require last component of the file name to exist (other implementations will fail +// if it does not). Use RealLocation if that behaviour is required. TString RealPath(const TString& path) { TTempBuf result; Y_ASSERT(result.Size() > MAX_PATH); //TMP_BUF_LEN > MAX_PATH @@ -416,13 +416,13 @@ TString RealPath(const TString& path) { TString RealLocation(const TString& path) { if (NFs::Exists(path)) - return RealPath(path); + return RealPath(path); TString dirpath = GetDirName(path); if (NFs::Exists(dirpath)) return RealPath(dirpath) + GetDirectorySeparatorS() + GetFileNameComponent(path.data()); - ythrow TFileError() << "RealLocation failed \"" << path << "\""; -} - + ythrow TFileError() << "RealLocation failed \"" << path << "\""; +} + int MakeTempDir(char path[/*FILENAME_MAX*/], const char* prefix) { int ret; diff --git a/util/folder/path.cpp b/util/folder/path.cpp index c8f440d669..bfe0c67d68 100644 --- a/util/folder/path.cpp +++ b/util/folder/path.cpp @@ -77,29 +77,29 @@ TFsPath TFsPath::RelativeTo(const TFsPath& root) const { return TFsPath(split.Reconstruct()); } -TFsPath TFsPath::RelativePath(const TFsPath& root) const { - TSplit split = GetSplit(); - const TSplit& rsplit = root.GetSplit(); - size_t cnt = 0; +TFsPath TFsPath::RelativePath(const TFsPath& root) const { + TSplit split = GetSplit(); + const TSplit& rsplit = root.GetSplit(); + size_t cnt = 0; while (split.size() > cnt && rsplit.size() > cnt && split[cnt] == rsplit[cnt]) { ++cnt; } bool absboth = split.IsAbsolute && rsplit.IsAbsolute; if (cnt == 0 && !absboth) { - ythrow TIoException() << "No common parts in " << *this << " and " << root; + ythrow TIoException() << "No common parts in " << *this << " and " << root; } TString r; for (size_t i = 0; i < rsplit.size() - cnt; i++) { - r += i == 0 ? ".." : "/.."; + r += i == 0 ? ".." : "/.."; } for (size_t i = cnt; i < split.size(); i++) { r += (i == 0 || i == cnt && rsplit.size() - cnt == 0 ? "" : "/"); r += split[i]; } return r.size() ? TFsPath(r) : TFsPath(); -} - +} + TFsPath TFsPath::Parent() const { if (!IsDefined()) { return TFsPath(); @@ -314,11 +314,11 @@ TFsPath TFsPath::RealPath() const { return ::RealPath(*this); } -TFsPath TFsPath::RealLocation() const { - CheckDefined(); +TFsPath TFsPath::RealLocation() const { + CheckDefined(); return ::RealLocation(*this); -} - +} + TFsPath TFsPath::ReadLink() const { CheckDefined(); diff --git a/util/folder/path.h b/util/folder/path.h index 68a03bd85e..2fb4d6b4ef 100644 --- a/util/folder/path.h +++ b/util/folder/path.h @@ -119,7 +119,7 @@ public: /** * @returns relative path or empty path if root equals to this. */ - TFsPath RelativePath(const TFsPath& root) const; //..; for relative paths 1st component must be the same + TFsPath RelativePath(const TFsPath& root) const; //..; for relative paths 1st component must be the same /** * Never fails. Returns this if already a root. @@ -191,7 +191,7 @@ public: void Touch() const; TFsPath RealPath() const; - TFsPath RealLocation() const; + TFsPath RealLocation() const; TFsPath ReadLink() const; /// always absolute diff --git a/util/folder/path_ut.cpp b/util/folder/path_ut.cpp index 1d6ce221aa..e6a3451016 100644 --- a/util/folder/path_ut.cpp +++ b/util/folder/path_ut.cpp @@ -266,7 +266,7 @@ Y_UNIT_TEST_SUITE(TFsPathTests) { Y_UNIT_TEST(Cwd) { UNIT_ASSERT_VALUES_EQUAL(TFsPath::Cwd().RealPath(), TFsPath(".").RealPath()); } - + Y_UNIT_TEST(TestSubpathOf) { UNIT_ASSERT(TFsPath("/a/b/c/d").IsSubpathOf("/a/b")); @@ -344,7 +344,7 @@ Y_UNIT_TEST_SUITE(TFsPathTests) { UNIT_ASSERT_EXCEPTION(TFsPath("a/b/c").RelativePath(TFsPath("d/e")), TIoException); } - + Y_UNIT_TEST(TestUndefined) { UNIT_ASSERT_VALUES_EQUAL(TFsPath(), TFsPath("")); UNIT_ASSERT_VALUES_EQUAL(TFsPath(), TFsPath().Fix()); diff --git a/util/network/sock.h b/util/network/sock.h index 5db2ee1091..b10be2f715 100644 --- a/util/network/sock.h +++ b/util/network/sock.h @@ -251,10 +251,10 @@ struct TSockAddrInet: public sockaddr_in, public ISockAddr { if (ret < 0) return -errno; - socklen_t len = Len(); - if (getsockname(s, (struct sockaddr*)(SockAddr()), &len) < 0) - return -WSAGetLastError(); - + socklen_t len = Len(); + if (getsockname(s, (struct sockaddr*)(SockAddr()), &len) < 0) + return -WSAGetLastError(); + return 0; } diff --git a/util/system/context_aarch64.S b/util/system/context_aarch64.S index e88f6d150f..0b2ef4e4a6 100644 --- a/util/system/context_aarch64.S +++ b/util/system/context_aarch64.S @@ -1,12 +1,12 @@ .p2align 2 -#if !(defined __darwin__) && !(defined __arm64__) +#if !(defined __darwin__) && !(defined __arm64__) .global __mysetjmp .type __mysetjmp,@function -__mysetjmp: -#else -.global ___mysetjmp -___mysetjmp: -#endif +__mysetjmp: +#else +.global ___mysetjmp +___mysetjmp: +#endif // IHI0055B_aapcs64.pdf 5.1.1, 5.1.2 callee saved registers stp x19, x20, [x0,#0] stp x21, x22, [x0,#16] @@ -24,14 +24,14 @@ ___mysetjmp: ret .p2align 2 -#if !(defined __darwin__) && !(defined __arm64__) +#if !(defined __darwin__) && !(defined __arm64__) .global __mylongjmp .type __mylongjump,@function -__mylongjmp: -#else -.global ___mylongjmp -___mylongjmp: -#endif +__mylongjmp: +#else +.global ___mylongjmp +___mylongjmp: +#endif // IHI0055B_aapcs64.pdf 5.1.1, 5.1.2 callee saved registers ldp x19, x20, [x0,#0] ldp x21, x22, [x0,#16] diff --git a/util/system/platform.h b/util/system/platform.h index bfa68216a4..58f310ab34 100644 --- a/util/system/platform.h +++ b/util/system/platform.h @@ -93,8 +93,8 @@ #if defined(__i386__) || defined(_M_IX86) #define _i386_ -#endif - +#endif + #if defined(__ia64__) || defined(_M_IA64) #define _ia64_ #endif diff --git a/util/system/tls.h b/util/system/tls.h index 8f5efdfe34..3c4f56dbeb 100644 --- a/util/system/tls.h +++ b/util/system/tls.h @@ -9,7 +9,7 @@ #if defined(_darwin_) #define Y_DISABLE_THRKEY_OPTIMIZATION -#endif +#endif #if defined(_arm_) && defined(_linux_) #define Y_DISABLE_THRKEY_OPTIMIZATION diff --git a/util/system/types.h b/util/system/types.h index 66064082ac..12e68a6060 100644 --- a/util/system/types.h +++ b/util/system/types.h @@ -22,13 +22,13 @@ typedef uint32_t ui32; typedef int32_t i32; #endif -#if defined(_darwin_) && defined(_64_) -typedef unsigned long ui64; +#if defined(_darwin_) && defined(_64_) +typedef unsigned long ui64; typedef long i64; -#else +#else typedef uint64_t ui64; typedef int64_t i64; -#endif +#endif #define LL(number) INT64_C(number) #define ULL(number) UINT64_C(number) diff --git a/util/ya.make b/util/ya.make index d41cd64324..6ebe7e40cf 100644 --- a/util/ya.make +++ b/util/ya.make @@ -340,7 +340,7 @@ ELSE() system/context_x86.asm ) ENDIF() - IF (ARCH_AARCH64 OR ARCH_ARM64) + IF (ARCH_AARCH64 OR ARCH_ARM64) SRCS( system/context_aarch64.S ) diff --git a/ydb/core/base/ut/ya.make b/ydb/core/base/ut/ya.make index 9d229bc987..7d6b2f3546 100644 --- a/ydb/core/base/ut/ya.make +++ b/ydb/core/base/ut/ya.make @@ -14,7 +14,7 @@ PEERDIR( SRCS( blobstorage_grouptype_ut.cpp localdb_ut.cpp - logoblob_ut.cpp + logoblob_ut.cpp shared_data_ut.cpp statestorage_ut.cpp statestorage_guardian_impl_ut.cpp diff --git a/ydb/core/client/ut/ya.make b/ydb/core/client/ut/ya.make index b1549c5d6a..5d839f47c8 100644 --- a/ydb/core/client/ut/ya.make +++ b/ydb/core/client/ut/ya.make @@ -41,7 +41,7 @@ INCLUDE(${ARCADIA_ROOT}/ydb/tests/supp/ubsan_supp.inc) SRCS( cancel_tx_ut.cpp - client_ut.cpp + client_ut.cpp flat_ut.cpp locks_ut.cpp query_stats_ut.cpp diff --git a/ydb/core/protos/counters.proto b/ydb/core/protos/counters.proto index f6b765511d..05a8c93d90 100644 --- a/ydb/core/protos/counters.proto +++ b/ydb/core/protos/counters.proto @@ -1,4 +1,4 @@ -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; package NKikimr; diff --git a/ydb/core/protos/msgbus.proto b/ydb/core/protos/msgbus.proto index 7d1c32f09d..df7cda5980 100644 --- a/ydb/core/protos/msgbus.proto +++ b/ydb/core/protos/msgbus.proto @@ -27,7 +27,7 @@ import "ydb/public/api/protos/draft/persqueue_error_codes.proto"; import "ydb/public/api/protos/ydb_issue_message.proto"; import "ydb/library/mkql_proto/protos/minikql.proto"; -import "google/protobuf/descriptor.proto"; +import "google/protobuf/descriptor.proto"; package NKikimrClient; option java_package = "ru.yandex.kikimr.proto"; diff --git a/ydb/core/tablet/ut/ya.make b/ydb/core/tablet/ut/ya.make index 7c81e5a4d0..52440600e1 100644 --- a/ydb/core/tablet/ut/ya.make +++ b/ydb/core/tablet/ut/ya.make @@ -27,7 +27,7 @@ SRCS( resource_broker_ut.cpp tablet_counters_aggregator_ut.cpp tablet_metrics_ut.cpp - tablet_pipe_ut.cpp + tablet_pipe_ut.cpp tablet_pipecache_ut.cpp tablet_req_blockbs_ut.cpp tablet_resolver_ut.cpp diff --git a/ydb/core/testlib/actors/ut/ya.make b/ydb/core/testlib/actors/ut/ya.make index 7bc27373f1..85a31fe740 100644 --- a/ydb/core/testlib/actors/ut/ya.make +++ b/ydb/core/testlib/actors/ut/ya.make @@ -1,8 +1,8 @@ UNITTEST_FOR(ydb/core/testlib/actors) -OWNER( +OWNER( g:kikimr -) +) FORK_SUBTESTS() IF (SANITIZER_TYPE OR WITH_VALGRIND) diff --git a/ydb/core/testlib/ya.make b/ydb/core/testlib/ya.make index 0e2f8d5307..137fa829f3 100644 --- a/ydb/core/testlib/ya.make +++ b/ydb/core/testlib/ya.make @@ -3,7 +3,7 @@ LIBRARY() OWNER( ddoarn fomichev - vvvv + vvvv g:kikimr ) diff --git a/ydb/core/util/ut/ya.make b/ydb/core/util/ut/ya.make index 8c84320718..315713cad8 100644 --- a/ydb/core/util/ut/ya.make +++ b/ydb/core/util/ut/ya.make @@ -24,13 +24,13 @@ SRCS( bits_ut.cpp btree_cow_ut.cpp btree_ut.cpp - cache_ut.cpp + cache_ut.cpp circular_queue_ut.cpp concurrent_rw_hash_ut.cpp fast_tls_ut.cpp fragmented_buffer_ut.cpp hazard_ut.cpp - hyperlog_counter_ut.cpp + hyperlog_counter_ut.cpp interval_set_ut.cpp intrusive_fixed_hash_set_ut.cpp intrusive_heap_ut.cpp @@ -42,7 +42,7 @@ SRCS( operation_queue_priority_ut.cpp page_map_ut.cpp queue_inplace_ut.cpp - queue_oneone_inplace_ut.cpp + queue_oneone_inplace_ut.cpp simple_cache_ut.cpp time_series_vec_ut.cpp token_bucket_ut.cpp diff --git a/ydb/library/yql/minikql/ut/ya.make b/ydb/library/yql/minikql/ut/ya.make index 5df9834f39..098a77d0c6 100644 --- a/ydb/library/yql/minikql/ut/ya.make +++ b/ydb/library/yql/minikql/ut/ya.make @@ -19,10 +19,10 @@ OWNER( SRCS( compact_hash_ut.cpp mkql_alloc_ut.cpp - mkql_node_builder_ut.cpp + mkql_node_builder_ut.cpp mkql_node_cast_ut.cpp - mkql_node_printer_ut.cpp - mkql_node_ut.cpp + mkql_node_printer_ut.cpp + mkql_node_ut.cpp mkql_opt_literal_ut.cpp mkql_stats_registry_ut.cpp mkql_type_ops_ut.cpp |