aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAlexander Smirnov <alex@ydb.tech>2024-09-13 08:44:29 +0000
committerAlexander Smirnov <alex@ydb.tech>2024-09-13 08:44:29 +0000
commit2e6cb77e04b736082e5706f31bdd92bffdcbdc28 (patch)
tree7b38df2eddbd3bfc17730fe13496b5efb0a039a4
parenta3a65179dfa7410d510252adbc2d04c8751f3126 (diff)
parent02f70f11e125e828e4ac5ea843ca7c657cbb45cd (diff)
downloadydb-2e6cb77e04b736082e5706f31bdd92bffdcbdc28.tar.gz
Merge branch 'rightlib' into mergelibs-240913-0843
-rw-r--r--build/conf/python.conf54
-rw-r--r--build/export_generators/cmake/cmake/antlr.cmake13
-rw-r--r--build/export_generators/cmake/cmake/antlr4.cmake30
-rw-r--r--build/export_generators/cmake/generator.toml8
-rw-r--r--build/export_generators/cmake/target_cmake_lists.jinja3
-rw-r--r--build/export_generators/hardcoded-cmake/cmake/antlr.cmake13
-rw-r--r--build/export_generators/hardcoded-cmake/cmake/antlr4.cmake30
-rw-r--r--build/export_generators/hardcoded-cmake/generator.toml5
-rw-r--r--build/plugins/_dart_fields.py128
-rw-r--r--build/plugins/pybuild.py59
-rw-r--r--build/plugins/ytest.py62
-rw-r--r--build/sysincl/windows.yml13
-rw-r--r--build/ymake.core.conf15
-rw-r--r--contrib/libs/curl/lib/content_encoding.c2
-rw-r--r--contrib/libs/curl/lib/curl_config-linux.h6
-rw-r--r--contrib/libs/curl/lib/curl_config-osx.h6
-rw-r--r--contrib/libs/curl/lib/curl_config-win.h2
-rw-r--r--contrib/libs/curl/lib/version.c2
-rw-r--r--contrib/libs/curl/ya.make2
-rw-r--r--contrib/python/ydb/py3/.dist-info/METADATA8
-rw-r--r--contrib/python/ydb/py3/README.md6
-rw-r--r--contrib/python/ydb/py3/ya.make2
-rw-r--r--contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_query_public_types.py25
-rw-r--r--contrib/python/ydb/py3/ydb/aio/__init__.py2
-rw-r--r--contrib/python/ydb/py3/ydb/aio/query/__init__.py10
-rw-r--r--contrib/python/ydb/py3/ydb/aio/query/pool.py112
-rw-r--r--contrib/python/ydb/py3/ydb/aio/query/session.py25
-rw-r--r--contrib/python/ydb/py3/ydb/aio/query/transaction.py8
-rw-r--r--contrib/python/ydb/py3/ydb/query/__init__.py14
-rw-r--r--contrib/python/ydb/py3/ydb/query/base.py31
-rw-r--r--contrib/python/ydb/py3/ydb/query/pool.py131
-rw-r--r--contrib/python/ydb/py3/ydb/query/session.py40
-rw-r--r--contrib/python/ydb/py3/ydb/query/transaction.py6
-rw-r--r--contrib/python/ydb/py3/ydb/settings.py20
-rw-r--r--contrib/python/ydb/py3/ydb/table.py4
-rw-r--r--contrib/python/ydb/py3/ydb/ydb_version.py2
-rw-r--r--library/cpp/http/push_parser/http_parser.cpp4
-rw-r--r--library/cpp/http/push_parser/http_parser.h2
-rw-r--r--library/cpp/svnversion/ya.make6
-rw-r--r--library/cpp/threading/thread_local/thread_local.h10
-rw-r--r--library/python/monlib/metric_registry.pyx5
-rw-r--r--yt/yt/client/api/queue_client.h2
-rw-r--r--yt/yt/client/driver/driver.cpp3
-rw-r--r--yt/yt/core/concurrency/action_queue.cpp4
-rw-r--r--yt/yt/core/concurrency/profiling_helpers.cpp12
-rw-r--r--yt/yt/core/rpc/channel_detail.cpp4
-rw-r--r--yt/yt/core/rpc/config.cpp24
-rw-r--r--yt/yt/core/rpc/config.h50
-rw-r--r--yt/yt/core/rpc/public.h1
-rw-r--r--yt/yt/core/rpc/service_detail.cpp14
-rw-r--r--yt/yt/library/tracing/jaeger/sampler.cpp3
-rw-r--r--yt/yt/library/ytprof/allocation_tag_profiler/allocation_tag_profiler.cpp3
52 files changed, 770 insertions, 276 deletions
diff --git a/build/conf/python.conf b/build/conf/python.conf
index 29c1f712fa..30b1fe2d04 100644
--- a/build/conf/python.conf
+++ b/build/conf/python.conf
@@ -179,6 +179,24 @@ macro NO_YMAKE_PYTHON3() {
SET(YMAKE_PYTHON3_PEERDIR)
}
+FLAKE_EXTRA_PARAMS=
+FLAKE_MIGRATIONS_CONFIG=
+FLAKE_CONFIG_FILES=build/config/tests/flake8/flake8.conf $FLAKE_MIGRATIONS_CONFIG
+when ($DISABLE_FLAKE8_MIGRATIONS == "yes") {
+ FLAKE_EXTRA_PARAMS="DISABLE_FLAKE8_MIGRATIONS=yes"
+}
+otherwise {
+ FLAKE_MIGRATIONS_CONFIG=build/rules/flake8/migrations.yaml
+}
+
+when ($FLAKE8_FILE_PROCESSING_TIME == "") {
+ FLAKE8_FILE_PROCESSING_TIME=1.5
+}
+
+when ($BLACK_FILE_PROCESSING_TIME == "") {
+ BLACK_FILE_PROCESSING_TIME=1.5
+}
+
# tag:python-specific tag:deprecated tag:internal
module _PY2_PROGRAM: _BASE_PY_PROGRAM {
SET(MODULE_LANG PY2)
@@ -186,6 +204,7 @@ module _PY2_PROGRAM: _BASE_PY_PROGRAM {
# Looks like we cannot avoid copy-paste util ymake supports multiple inheritance
# We need to attach coverage.extractor to every py_program target, except pytest targets
ADD_YTEST($MODULE_PREFIX$REALPRJNAME coverage.extractor)
+ STYLE_PY2_FLAKE8()
}
# tag:python-specific tag:deprecated
@@ -247,25 +266,39 @@ multimodule PY3_PROGRAM {
}
# tag:python-specific tag:test
-STYLE_PYTHON_VALUE=no
-STYLE_PYTHON_PYPROJECT_VALUE=
### @usage: STYLE_PYTHON([pyproject])
###
### Check python3 sources for style issues using black.
+BLACK_CONFIG_FILES=
macro STYLE_PYTHON(pyproject...) {
- SET(STYLE_PYTHON_VALUE yes)
- SET(STYLE_PYTHON_PYPROJECT_VALUE ${pyproject})
+ BLACK_CONFIG_FILES=$pyproject build/config/tests/py_style/config.toml
+ _ADD_PY_LINTER_CHECK(NAME black LINTER tools/black_linter/black_linter FILE_PROCESSING_TIME $BLACK_FILE_PROCESSING_TIME CONFIGS $BLACK_CONFIG_FILES)
}
# tag:python-specific tag:test
-STYLE_RUFF_VALUE=no
-RUFF_CONFIG_PATHS_FILE=${ARCADIA_ROOT}/build/config/tests/ruff/ruff_config_paths.json
### @usage: STYLE_RUFF()
###
### Check python3 sources for style issues using ruff.
+RUFF_CONFIG_PATHS_FILE=build/config/tests/ruff/ruff_config_paths.json
macro STYLE_RUFF() {
- SET(STYLE_RUFF_VALUE yes)
- SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${RUFF_CONFIG_PATHS_FILE})
+ SET_APPEND(_MAKEFILE_INCLUDE_LIKE_DEPS ${ARCADIA_ROOT}/${RUFF_CONFIG_PATHS_FILE})
+ _ADD_PY_LINTER_CHECK(NAME ruff LINTER tools/ruff_linter/bin/ruff_linter GLOBAL_RESOURCES build/external_resources/ruff CONFIGS $RUFF_CONFIG_PATHS_FILE)
+}
+
+# tag:python-specific tag:test
+### @usage: STYLE_FLAKE8()
+###
+### Check python3 sources for style issues using flake8.
+macro STYLE_FLAKE8() {
+ _ADD_PY_LINTER_CHECK(NAME flake8 LINTER tools/flake8_linter/flake8_linter GLOBAL_RESOURCES build/external_resources/flake8_py3 FILE_PROCESSING_TIME $FLAKE8_FILE_PROCESSING_TIME CONFIGS $FLAKE_CONFIG_FILES EXTRA_PARAMS $FLAKE_EXTRA_PARAMS)
+}
+
+# tag:python-specific tag:test
+### @usage: STYLE_PY2_FLAKE8()
+###
+### Check python3 sources for style issues using flake8.
+macro STYLE_PY2_FLAKE8() {
+ _ADD_PY_LINTER_CHECK(NAME py2_flake8 LINTER tools/flake8_linter/flake8_linter GLOBAL_RESOURCES build/external_resources/flake8_py2 FILE_PROCESSING_TIME $FLAKE8_FILE_PROCESSING_TIME CONFIGS $FLAKE_CONFIG_FILES EXTRA_PARAMS $FLAKE_EXTRA_PARAMS)
}
# tag:python-specific tag:test
@@ -302,6 +335,7 @@ module PYTEST_BIN: _BASE_PYTEST {
.DEFAULT_NAME_GENERATOR=FullPath
.ARGS_PARSER=Base
SETUP_PYTEST_BIN()
+ STYLE_PY2_FLAKE8()
}
# tag:python-specific tag:test
@@ -344,6 +378,7 @@ module PY3TEST_BIN: _BASE_PY3_PROGRAM {
.ARGS_PARSER=Base
SET(MODULE_LANG PY3)
SETUP_PYTEST_BIN()
+ STYLE_FLAKE8()
PEERDIR+=library/python/pytest
}
@@ -565,6 +600,7 @@ module PY2_LIBRARY: _LIBRARY {
}
SET(MODULE_LANG PY2)
+ STYLE_PY2_FLAKE8()
ADD_CLANG_TIDY()
when ($TIDY_ENABLED == "yes") {
_MAKEFILE_INCLUDE_LIKE_DEPS+=${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_project_map.json
@@ -611,6 +647,7 @@ module PY3_LIBRARY: _LIBRARY {
}
SET(MODULE_LANG PY3)
+ STYLE_FLAKE8()
ADD_CLANG_TIDY()
when ($TIDY_ENABLED == "yes") {
_MAKEFILE_INCLUDE_LIKE_DEPS+=${ARCADIA_ROOT}/build/yandex_specific/config/clang_tidy/tidy_project_map.json
@@ -749,6 +786,7 @@ module _BASE_PY3_PROGRAM: _BASE_PROGRAM {
module PY3_PROGRAM_BIN: _BASE_PY3_PROGRAM {
# Look's like we cannot avoid copy-paste util ymake supports multiple inheritance
# We need to attach coverage.extractor to every py_program target, except pytest targets
+ STYLE_FLAKE8()
ADD_YTEST($MODULE_PREFIX$REALPRJNAME coverage.extractor)
}
diff --git a/build/export_generators/cmake/cmake/antlr.cmake b/build/export_generators/cmake/cmake/antlr.cmake
index d203fd9c88..3bbce44a67 100644
--- a/build/export_generators/cmake/cmake/antlr.cmake
+++ b/build/export_generators/cmake/cmake/antlr.cmake
@@ -1,7 +1,6 @@
function(ensure_antlr)
if(NOT ANTLR3_EXECUTABLE)
- find_program(ANTLR3_EXECUTABLE
- NAMES antlr3)
+ find_program(ANTLR3_EXECUTABLE NAMES antlr3)
if (NOT ANTLR3_EXECUTABLE)
message(FATAL_ERROR "Unable to find antlr3 program. Please install antlr3 and make sure executable file present in the $PATH env.")
endif()
@@ -14,7 +13,7 @@ function(run_antlr)
set(oneValueArgs WORKING_DIRECTORY)
set(multiValueArgs OUTPUT DEPENDS ANTLER_ARGS)
cmake_parse_arguments(
- RUN_ANTLR
+ RUN_ANTLR3
"${options}"
"${oneValueArgs}"
"${multiValueArgs}"
@@ -22,10 +21,10 @@ function(run_antlr)
)
add_custom_command(
- OUTPUT ${RUN_ANTLR_OUTPUT}
- COMMAND ${ANTLR3_EXECUTABLE} ${RUN_ANTLR_ANTLER_ARGS}
- WORKING_DIRECTORY ${RUN_ANTLR_WORKING_DIRECTORY}
- DEPENDS ${RUN_ANTLR_DEPENDS}
+ OUTPUT ${RUN_ANTLR3_OUTPUT}
+ COMMAND ${ANTLR3_EXECUTABLE} ${RUN_ANTLR3_ANTLER_ARGS}
+ WORKING_DIRECTORY ${RUN_ANTLR3_WORKING_DIRECTORY}
+ DEPENDS ${RUN_ANTLR3_DEPENDS}
)
endfunction()
diff --git a/build/export_generators/cmake/cmake/antlr4.cmake b/build/export_generators/cmake/cmake/antlr4.cmake
new file mode 100644
index 0000000000..df3465c1b3
--- /dev/null
+++ b/build/export_generators/cmake/cmake/antlr4.cmake
@@ -0,0 +1,30 @@
+function(ensure_antlr4)
+ if(NOT ANTLR4_EXECUTABLE)
+ find_program(ANTLR4_EXECUTABLE NAMES antlr4)
+ if (NOT ANTLR4_EXECUTABLE)
+ message(FATAL_ERROR "Unable to find antlr4 program. Please install antlr4 and make sure executable file present in the $PATH env.")
+ endif()
+ endif()
+endfunction()
+
+function(run_antlr4)
+ ensure_antlr4()
+ set(options "")
+ set(oneValueArgs WORKING_DIRECTORY)
+ set(multiValueArgs OUTPUT DEPENDS ANTLER_ARGS)
+ cmake_parse_arguments(
+ RUN_ANTLR4
+ "${options}"
+ "${oneValueArgs}"
+ "${multiValueArgs}"
+ ${ARGN}
+ )
+
+ add_custom_command(
+ OUTPUT ${RUN_ANTLR4_OUTPUT}
+ COMMAND ${ANTLR4_EXECUTABLE} ${RUN_ANTLR4_ANTLER_ARGS}
+ WORKING_DIRECTORY ${RUN_ANTLR4_WORKING_DIRECTORY}
+ DEPENDS ${RUN_ANTLR4_DEPENDS}
+ )
+
+endfunction()
diff --git a/build/export_generators/cmake/generator.toml b/build/export_generators/cmake/generator.toml
index d253630ab6..4fcb521f2f 100644
--- a/build/export_generators/cmake/generator.toml
+++ b/build/export_generators/cmake/generator.toml
@@ -188,6 +188,7 @@ set_yunittest_property="skip"
copy_file="skip"
configure_file="skip"
run_antlr="skip"
+run_antlr4="skip"
set_property="skip"
add_jar="skip"
set_property_escaped="skip"
@@ -270,11 +271,16 @@ copy=["cmake/FindJNITarget.cmake"]
add_values=[{attr="includes", values=["cmake/FindJNITarget.cmake"]}]
[[rules]]
-attrs=["run_antlr"]
+attrs=["target_commands-macro=run_antlr"]
copy=["cmake/antlr.cmake"]
add_values=[{attr="includes", values=["cmake/antlr.cmake"]}]
[[rules]]
+attrs=["target_commands-macro=run_antlr4"]
+copy=["cmake/antlr4.cmake"]
+add_values=[{attr="includes", values=["cmake/antlr4.cmake"]}]
+
+[[rules]]
attrs=[
"target_bison_parser",
"target_flex_lexers"
diff --git a/build/export_generators/cmake/target_cmake_lists.jinja b/build/export_generators/cmake/target_cmake_lists.jinja
index a333641721..683cf2c102 100644
--- a/build/export_generators/cmake/target_cmake_lists.jinja
+++ b/build/export_generators/cmake/target_cmake_lists.jinja
@@ -30,6 +30,9 @@
{%- set only_headers_like_sources_library = false -%}
{%- endif -%}
{%- endif -%}
+{%- if current_target.target_commands|selectattr('macro', 'eq', 'add_custom_command')|length -%}
+{%- set only_headers_like_sources_library = false -%}
+{%- endif -%}
{%- else -%}
{%- set only_headers_like_sources_library = false -%}
{%- endif -%}
diff --git a/build/export_generators/hardcoded-cmake/cmake/antlr.cmake b/build/export_generators/hardcoded-cmake/cmake/antlr.cmake
index d203fd9c88..3bbce44a67 100644
--- a/build/export_generators/hardcoded-cmake/cmake/antlr.cmake
+++ b/build/export_generators/hardcoded-cmake/cmake/antlr.cmake
@@ -1,7 +1,6 @@
function(ensure_antlr)
if(NOT ANTLR3_EXECUTABLE)
- find_program(ANTLR3_EXECUTABLE
- NAMES antlr3)
+ find_program(ANTLR3_EXECUTABLE NAMES antlr3)
if (NOT ANTLR3_EXECUTABLE)
message(FATAL_ERROR "Unable to find antlr3 program. Please install antlr3 and make sure executable file present in the $PATH env.")
endif()
@@ -14,7 +13,7 @@ function(run_antlr)
set(oneValueArgs WORKING_DIRECTORY)
set(multiValueArgs OUTPUT DEPENDS ANTLER_ARGS)
cmake_parse_arguments(
- RUN_ANTLR
+ RUN_ANTLR3
"${options}"
"${oneValueArgs}"
"${multiValueArgs}"
@@ -22,10 +21,10 @@ function(run_antlr)
)
add_custom_command(
- OUTPUT ${RUN_ANTLR_OUTPUT}
- COMMAND ${ANTLR3_EXECUTABLE} ${RUN_ANTLR_ANTLER_ARGS}
- WORKING_DIRECTORY ${RUN_ANTLR_WORKING_DIRECTORY}
- DEPENDS ${RUN_ANTLR_DEPENDS}
+ OUTPUT ${RUN_ANTLR3_OUTPUT}
+ COMMAND ${ANTLR3_EXECUTABLE} ${RUN_ANTLR3_ANTLER_ARGS}
+ WORKING_DIRECTORY ${RUN_ANTLR3_WORKING_DIRECTORY}
+ DEPENDS ${RUN_ANTLR3_DEPENDS}
)
endfunction()
diff --git a/build/export_generators/hardcoded-cmake/cmake/antlr4.cmake b/build/export_generators/hardcoded-cmake/cmake/antlr4.cmake
new file mode 100644
index 0000000000..df3465c1b3
--- /dev/null
+++ b/build/export_generators/hardcoded-cmake/cmake/antlr4.cmake
@@ -0,0 +1,30 @@
+function(ensure_antlr4)
+ if(NOT ANTLR4_EXECUTABLE)
+ find_program(ANTLR4_EXECUTABLE NAMES antlr4)
+ if (NOT ANTLR4_EXECUTABLE)
+ message(FATAL_ERROR "Unable to find antlr4 program. Please install antlr4 and make sure executable file present in the $PATH env.")
+ endif()
+ endif()
+endfunction()
+
+function(run_antlr4)
+ ensure_antlr4()
+ set(options "")
+ set(oneValueArgs WORKING_DIRECTORY)
+ set(multiValueArgs OUTPUT DEPENDS ANTLER_ARGS)
+ cmake_parse_arguments(
+ RUN_ANTLR4
+ "${options}"
+ "${oneValueArgs}"
+ "${multiValueArgs}"
+ ${ARGN}
+ )
+
+ add_custom_command(
+ OUTPUT ${RUN_ANTLR4_OUTPUT}
+ COMMAND ${ANTLR4_EXECUTABLE} ${RUN_ANTLR4_ANTLER_ARGS}
+ WORKING_DIRECTORY ${RUN_ANTLR4_WORKING_DIRECTORY}
+ DEPENDS ${RUN_ANTLR4_DEPENDS}
+ )
+
+endfunction()
diff --git a/build/export_generators/hardcoded-cmake/generator.toml b/build/export_generators/hardcoded-cmake/generator.toml
index 3e36586f39..e8c2a28b5c 100644
--- a/build/export_generators/hardcoded-cmake/generator.toml
+++ b/build/export_generators/hardcoded-cmake/generator.toml
@@ -72,6 +72,11 @@ copy=["cmake/antlr.cmake"]
add_values=[{attr="includes", values=["cmake/antlr.cmake"]}]
[[rules]]
+attrs=["run_antlr4"]
+copy=["cmake/antlr4.cmake"]
+add_values=[{attr="includes", values=["cmake/antlr4.cmake"]}]
+
+[[rules]]
attrs=[
"target_bison_parser",
"target_flex_lexers"
diff --git a/build/plugins/_dart_fields.py b/build/plugins/_dart_fields.py
index 4676499399..07ff9ce0f2 100644
--- a/build/plugins/_dart_fields.py
+++ b/build/plugins/_dart_fields.py
@@ -22,6 +22,9 @@ CANON_OUTPUT_STORAGE = 'canondata_storage'
KTLINT_CURRENT_EDITOR_CONFIG = "arcadia/build/platform/java/ktlint/.editorconfig"
KTLINT_OLD_EDITOR_CONFIG = "arcadia/build/platform/java/ktlint_old/.editorconfig"
+ARCADIA_ROOT = '${ARCADIA_ROOT}/'
+SOURCE_ROOT_SHORT = '$S/'
+
class DartValueError(ValueError):
pass
@@ -256,6 +259,15 @@ def _get_ts_test_data_dirs(unit):
)
+@_common.lazy
+def get_linter_configs(unit, config_paths):
+ rel_config_path = _common.rootrel_arc_src(config_paths, unit)
+ arc_config_path = unit.resolve_arc_path(rel_config_path)
+ abs_config_path = unit.resolve(arc_config_path)
+ with open(abs_config_path, 'r') as fd:
+ return list(json.load(fd).values())
+
+
class AndroidApkTestActivity:
KEY = 'ANDROID_APK_TEST_ACTIVITY'
@@ -360,10 +372,10 @@ class CustomDependencies:
@classmethod
def depends_with_linter(cls, unit, flat_args, spec_args):
- deps = []
- _, linter = flat_args
- deps.append(os.path.dirname(linter))
- deps += spec_args.get('DEPENDS', [])
+ linter = Linter.value(unit, flat_args, spec_args)[Linter.KEY]
+ deps = spec_args.get('DEPENDS', []) + [os.path.dirname(linter)]
+ for dep in deps:
+ unit.ondepends(dep)
return {cls.KEY: " ".join(deps)}
@classmethod
@@ -478,7 +490,7 @@ class JavaClasspathCmdType:
unit.path(), java_cp_arg_type
)
)
- raise DartValueError
+ raise DartValueError()
return {cls.KEY: java_cp_arg_type}
@@ -528,9 +540,69 @@ class KtlintBinary:
return {cls.KEY: value}
+class Linter:
+ KEY = 'LINTER'
+
+ @classmethod
+ def value(cls, unit, flat_args, spec_args):
+ return {cls.KEY: spec_args['LINTER'][0]}
+
+
+class LintConfigs:
+ KEY = 'LINT-CONFIGS'
+
+ @classmethod
+ def value(cls, unit, flat_args, spec_args):
+ resolved_configs = []
+ configs = spec_args.get('CONFIGS', [])
+ for cfg in configs:
+ filename = unit.resolve(SOURCE_ROOT_SHORT + cfg)
+ if not os.path.exists(filename):
+ message = 'Configuration file {} is not found'.format(filename)
+ raise DartValueError(message)
+ resolved_configs.append(cfg)
+ if os.path.splitext(filename)[-1] == '.json':
+ cfgs = get_linter_configs(unit, cfg)
+ for c in cfgs:
+ filename = unit.resolve(SOURCE_ROOT_SHORT + c)
+ if not os.path.exists(filename):
+ message = 'Configuration file {} is not found'.format(filename)
+ raise DartValueError(message)
+ resolved_configs.append(c)
+ return {cls.KEY: serialize_list(resolved_configs)}
+
+
+class LintExtraParams:
+ KEY = 'LINT-EXTRA-PARAMS'
+
+ @classmethod
+ def from_macro_args(cls, unit, flat_args, spec_args):
+ extra_params = spec_args.get('EXTRA_PARAMS', [])
+ for arg in extra_params:
+ if '=' not in arg:
+ message = 'Wrong EXTRA_PARAMS value: "{}". Values must have format "name=value".'.format(arg)
+ raise DartValueError(message)
+ return {cls.KEY: serialize_list(extra_params)}
+
+
class LintFileProcessingTime:
KEY = 'LINT-FILE-PROCESSING-TIME'
+ @classmethod
+ def from_macro_args(cls, unit, flat_args, spec_args):
+ return {cls.KEY: spec_args.get('FILE_PROCESSING_TIME', [''])[0]}
+
+
+class LintName:
+ KEY = 'LINT-NAME'
+
+ @classmethod
+ def value(cls, unit, flat_args, spec_args):
+ lint_name = spec_args['NAME'][0]
+ if lint_name in ('flake8', 'py2_flake8') and (unit.get('DISABLE_FLAKE8') or 'no') == 'yes':
+ raise DartValueError('Flake8 linting is disabled by `DISABLE_FLAKE8`')
+ return {cls.KEY: lint_name}
+
class ModuleLang:
KEY = 'MODULE_LANG'
@@ -841,9 +913,8 @@ class TestData:
props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
if error_mgs:
- # TODO move error reporting out of field classes
ymake.report_configure_error(error_mgs)
- raise DartValueError
+ raise DartValueError()
for prop in props:
if prop['type'] == 'file':
test_data.append(prop['path'].replace('${ARCADIA_ROOT}', 'arcadia'))
@@ -949,7 +1020,8 @@ class TestedProjectName:
class TestFiles:
KEY = 'TEST-FILES'
- # TODO remove FILES, see DEVTOOLS-7052
+ # TODO remove FILES, see DEVTOOLS-7052, currently it's required
+ # https://a.yandex-team.ru/arcadia/devtools/ya/test/dartfile/__init__.py?rev=r14292146#L10
KEY2 = 'FILES'
@classmethod
@@ -993,32 +1065,55 @@ class TestFiles:
@classmethod
def test_srcs(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, 'TEST_SRCS_VALUE')
- return {cls.KEY: serialize_list(test_files)}
+ value = serialize_list(test_files)
+ return {cls.KEY: value, cls.KEY2: value}
@classmethod
def ts_test_srcs(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, "_TS_TEST_SRCS_VALUE")
test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files)
- return {cls.KEY: serialize_list(test_files)}
+ value = serialize_list(test_files)
+ return {cls.KEY: value, cls.KEY2: value}
@classmethod
def ts_input_files(cls, unit, flat_args, spec_args):
typecheck_files = get_values_list(unit, "TS_INPUT_FILES")
test_files = [_common.resolve_common_const(f) for f in typecheck_files]
- return {cls.KEY: serialize_list(test_files)}
+ value = serialize_list(test_files)
+ return {cls.KEY: value, cls.KEY2: value}
@classmethod
def ts_lint_srcs(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, "_TS_LINT_SRCS_VALUE")
test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files)
- return {cls.KEY: serialize_list(test_files)}
+ value = serialize_list(test_files)
+ return {cls.KEY: value, cls.KEY2: value}
@classmethod
def stylesheets(cls, unit, flat_args, spec_args):
test_files = get_values_list(unit, "_TS_STYLELINT_FILES")
test_files = _resolve_module_files(unit, unit.get("MODDIR"), test_files)
+ value = serialize_list(test_files)
+ return {cls.KEY: value, cls.KEY2: value}
- return {cls.KEY: serialize_list(test_files)}
+ @classmethod
+ def py_linter_files(cls, unit, flat_args, spec_args):
+ files = unit.get('PY_LINTER_FILES')
+ if not files:
+ raise DartValueError()
+ files = json.loads(files)
+ test_files = []
+ for path in files:
+ if path.startswith(ARCADIA_ROOT):
+ test_files.append(path.replace(ARCADIA_ROOT, SOURCE_ROOT_SHORT, 1))
+ elif path.startswith(SOURCE_ROOT_SHORT):
+ test_files.append(path)
+ if not test_files:
+ lint_name = LintName.value(unit, flat_args, spec_args)[LintName.KEY]
+ message = 'No files to lint for {}'.format(lint_name)
+ raise DartValueError(message)
+ test_files = serialize_list(test_files)
+ return {cls.KEY: test_files, cls.KEY2: test_files}
class TestEnv:
@@ -1097,6 +1192,10 @@ class TestName:
test_name = os.path.basename(os.path.join(unit.path(), unit.filename()).replace(".pkg", ""))
return {cls.KEY: os.path.splitext(test_name)[0]}
+ @classmethod
+ def name_from_macro_args(cls, unit, flat_args, spec_args):
+ return {cls.KEY: spec_args['NAME'][0]}
+
class TestPartition:
KEY = 'TEST_PARTITION'
@@ -1181,9 +1280,8 @@ class SystemProperties:
def value(cls, unit, flat_args, spec_args):
props, error_mgs = extract_java_system_properties(unit, get_values_list(unit, 'SYSTEM_PROPERTIES_VALUE'))
if error_mgs:
- # TODO move error reporting out of field classes
ymake.report_configure_error(error_mgs)
- raise DartValueError
+ raise DartValueError()
props = base64.b64encode(six.ensure_binary(json.dumps(props)))
return {cls.KEY: props}
diff --git a/build/plugins/pybuild.py b/build/plugins/pybuild.py
index 231107b0a4..bca88ebedc 100644
--- a/build/plugins/pybuild.py
+++ b/build/plugins/pybuild.py
@@ -183,62 +183,9 @@ def add_python_lint_checks(unit, py_ver, files):
if files and no_lint_value not in ("none", "none_internal"):
resolved_files = get_resolved_files()
if resolved_files:
- flake8_cfg = 'build/config/tests/flake8/flake8.conf'
- migrations_cfg = 'build/rules/flake8/migrations.yaml'
- resource = "build/external_resources/flake8_py{}".format(py_ver)
- lint_name = "py2_flake8" if py_ver == 2 else "flake8"
- params = [lint_name, "tools/flake8_linter/flake8_linter"]
- params += ["FILES"] + resolved_files
- params += ["GLOBAL_RESOURCES", resource]
- params += [
- "FILE_PROCESSING_TIME",
- unit.get("FLAKE8_FILE_PROCESSING_TIME") or DEFAULT_FLAKE8_FILE_PROCESSING_TIME,
- ]
-
- extra_params = []
- if unit.get("DISABLE_FLAKE8_MIGRATIONS") == "yes":
- extra_params.append("DISABLE_FLAKE8_MIGRATIONS=yes")
- config_files = [flake8_cfg, '']
- else:
- config_files = [flake8_cfg, migrations_cfg]
- params += ["CONFIGS"] + config_files
-
- if extra_params:
- params += ["EXTRA_PARAMS"] + extra_params
- unit.on_add_linter_check(params)
-
- # ruff related stuff
- if unit.get('STYLE_RUFF_VALUE') == 'yes':
- if no_lint_value in ("none", "none_internal"):
- ymake.report_configure_error(
- 'NO_LINT() and STYLE_RUFF() can\'t be enabled both at the same time',
- )
-
- resolved_files = get_resolved_files()
- if resolved_files:
- resource = "build/external_resources/ruff"
- params = ["ruff", "tools/ruff_linter/bin/ruff_linter"]
- params += ["FILES"] + resolved_files
- params += ["GLOBAL_RESOURCES", resource]
- configs = [
- rootrel_arc_src(unit.get('RUFF_CONFIG_PATHS_FILE'), unit),
- 'build/config/tests/ruff/ruff.toml',
- ] + get_ruff_configs(unit)
- params += ['CONFIGS'] + configs
- unit.on_add_linter_check(params)
-
- if files and unit.get('STYLE_PYTHON_VALUE') == 'yes' and is_py3(unit):
- resolved_files = get_resolved_files()
- if resolved_files:
- black_cfg = unit.get('STYLE_PYTHON_PYPROJECT_VALUE') or 'build/config/tests/py_style/config.toml'
- params = ['black', 'tools/black_linter/black_linter']
- params += ['FILES'] + resolved_files
- params += ['CONFIGS', black_cfg]
- params += [
- "FILE_PROCESSING_TIME",
- unit.get("BLACK_FILE_PROCESSING_TIME") or DEFAULT_BLACK_FILE_PROCESSING_TIME,
- ]
- unit.on_add_linter_check(params)
+ # repeated PY_SRCS, TEST_SCRS+PY_SRCS
+ collected = json.loads(unit.get('PY_LINTER_FILES')) if unit.get('PY_LINTER_FILES') else []
+ unit.set(['PY_LINTER_FILES', json.dumps(resolved_files + collected)])
def is_py3(unit):
diff --git a/build/plugins/ytest.py b/build/plugins/ytest.py
index aa41dff565..46ee8d2599 100644
--- a/build/plugins/ytest.py
+++ b/build/plugins/ytest.py
@@ -947,6 +947,68 @@ def onsetup_run_python(unit):
unit.ondepends('contrib/tools/python')
+@_common.lazy
+def get_linter_configs(unit, config_paths):
+ rel_config_path = _common.rootrel_arc_src(config_paths, unit)
+ arc_config_path = unit.resolve_arc_path(rel_config_path)
+ abs_config_path = unit.resolve(arc_config_path)
+ with open(abs_config_path, 'r') as fd:
+ return list(json.load(fd).values())
+
+
+@df.with_fields(
+ (
+ df.LintName.value,
+ df.TestFiles.py_linter_files,
+ df.LintConfigs.value,
+ df.LintExtraParams.from_macro_args,
+ df.TestName.name_from_macro_args,
+ df.TestedProjectName.unit_name,
+ df.SourceFolderPath.normalized,
+ df.TestEnv.value,
+ df.UseArcadiaPython.value,
+ df.LintFileProcessingTime.from_macro_args,
+ df.Linter.value,
+ df.CustomDependencies.depends_with_linter,
+ )
+)
+def on_add_py_linter_check(fields, unit, *args):
+ if unit.get("TIDY") == "yes":
+ return
+
+ no_lint_value = _common.get_no_lint_value(unit)
+ if no_lint_value in ("none", "none_internal"):
+ return
+
+ unlimited = -1
+ keywords = {
+ "NAME": 1,
+ "LINTER": 1,
+ "DEPENDS": unlimited,
+ "FILES": unlimited,
+ "CONFIGS": unlimited,
+ "GLOBAL_RESOURCES": unlimited,
+ "FILE_PROCESSING_TIME": 1,
+ "EXTRA_PARAMS": unlimited,
+ }
+ _, spec_args = _common.sort_by_keywords(keywords, args)
+
+ global_resources = spec_args.get('GLOBAL_RESOURCES', [])
+ for resource in global_resources:
+ unit.onpeerdir(resource)
+ try:
+ dart_record = create_dart_record(fields, unit, (), spec_args)
+ except df.DartValueError as e:
+ if msg := str(e):
+ unit.message(['WARN', msg])
+ return
+ dart_record[df.ScriptRelPath.KEY] = 'custom_lint'
+
+ data = dump_test(unit, dart_record)
+ if data:
+ unit.set_property(["DART_DATA", data])
+
+
def on_add_linter_check(unit, *args):
if unit.get("TIDY") == "yes":
return
diff --git a/build/sysincl/windows.yml b/build/sysincl/windows.yml
index 5297456c03..b4899d2aaa 100644
--- a/build/sysincl/windows.yml
+++ b/build/sysincl/windows.yml
@@ -18,6 +18,7 @@
- capi.h
- cet.h
- cfg.h
+ - combaseapi.h
- comdef.h
- commctrl.h
- commdlg.h
@@ -48,6 +49,7 @@
- devguid.h
- dinput.h
- direct.h
+ - directxmath.h
- dispex.h
- dos.h
- downloadmgr.h
@@ -81,6 +83,7 @@
- fltuser.h
- fltuserstructures.h
- fttypes.h
+ - functiondiscoverykeys_devpkey.h
- fwpmu.h
- gb18030.h
- gdiplus.h
@@ -188,6 +191,7 @@
- processtopologyapi.h
- profileapi.h
- projectedfslib.h
+ - propidl.h
- provider.h
- psapi.h
- pshpack1.h
@@ -340,3 +344,12 @@
- wrl/ftm.h
- wrl/implements.h
- wrl/wrappers/corewrappers.h
+
+# These headers are required to use Windows Graphics Capture API.
+- case_sensitive: false
+ includes:
+ - windows.graphics.capture.interop.h
+ - windows.graphics.directx.direct3d11.interop.h
+ - winrt/windows.foundation.metadata.h
+ - winrt/windows.graphics.capture.h
+ - winrt/windows.system.h
diff --git a/build/ymake.core.conf b/build/ymake.core.conf
index ce46cc9b1e..50579af8f9 100644
--- a/build/ymake.core.conf
+++ b/build/ymake.core.conf
@@ -2361,11 +2361,6 @@ macro PACK(Ext) {
SET(PACKED_PACKAGE_EXT $Ext)
}
-PACKAGE_STRICT_VALUE=
-macro PACKAGE_STRICT() {
- SET(PACKAGE_STRICT_VALUE yes)
-}
-
### @usage: PACKAGE(name)
###
### Module collects what is described directly inside it, builds and collects all its transitively available PEERDIRs.
@@ -4610,9 +4605,10 @@ macro RUN_PYTHON3(ScriptPath, IN{input}[], IN_NOPARSE{input}[], OUT{output}[], O
}
# tag:java-specific
-macro _RUN_JAVA(IN{input}[], IN_NOPARSE{input}[], OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], TOOL[], STDOUT="", STDOUT_NOAUTO="", CWD="", ENV[], HIDE_OUTPUT?"stderr2stdout":"stdout2stderr", Args...) {
+macro _RUN_ANTLR_BASE(IN{input}[], IN_NOPARSE{input}[], OUT{output}[], OUT_NOAUTO{output}[], OUTPUT_INCLUDES[], INDUCED_DEPS[], TOOL[], STDOUT="", STDOUT_NOAUTO="", CWD="", JAR[], SEM="run_java", SEM_ARGS_PREFIX="", ENV[], HIDE_OUTPUT?"stderr2stdout":"stdout2stderr", Args...) {
PEERDIR(build/platform/java/jdk $JDK_RESOURCE_PEERDIR)
- .CMD=${cwd:CWD} ${env:ENV} $YMAKE_PYTHON ${input;pre=build/scripts/:HIDE_OUTPUT.py} $JDK_RESOURCE/bin/java $Args ${hide;tool:TOOL} ${hide;input:IN} ${input;context=TEXT;hide:IN_NOPARSE} ${output_include;hide:OUTPUT_INCLUDES} $INDUCED_DEPS ${hide;output:OUT} ${hide;noauto;output:OUT_NOAUTO} ${stdout;output:STDOUT} ${stdout;output;noauto:STDOUT_NOAUTO} ${hide;kv:"p JV"} ${hide;kv:"pc light-blue"} ${hide;kv:"show_out"}
+ .CMD=${cwd:CWD} ${env:ENV} $YMAKE_PYTHON ${input;pre=build/scripts/:HIDE_OUTPUT.py} $JDK_RESOURCE/bin/java $JAR $Args ${hide;tool:TOOL} ${hide;input:IN} ${input;context=TEXT;hide:IN_NOPARSE} ${output_include;hide:OUTPUT_INCLUDES} $INDUCED_DEPS ${hide;output:OUT} ${hide;noauto;output:OUT_NOAUTO} ${stdout;output:STDOUT} ${stdout;output;noauto:STDOUT_NOAUTO} ${hide;kv:"p JV"} ${hide;kv:"pc light-blue"} ${hide;kv:"show_out"}
+ .SEM=$SEM OUTPUT ${output:OUT} ${noauto;output:OUT_NOAUTO} DEPENDS ${input:IN} ${pre=WORKING_DIRECTORY :CWD} $SEM_ARGS_PREFIX $Args && target_commands-ITEM && target_commands-macro $SEM && target_commands-args OUTPUT ${output:OUT} ${noauto;output:OUT_NOAUTO} DEPENDS ${input:IN} ${pre=WORKING_DIRECTORY :CWD} $SEM_ARGS_PREFIX $Args
}
### @usage: FROM_SANDBOX([FILE] resource_id [AUTOUPDATED script] [RENAME <resource files>] OUT_[NOAUTO] <output files> [EXECUTABLE] [OUTPUT_INCLUDES <include files>] [INDUCED_DEPS $VARs...])
@@ -4966,15 +4962,14 @@ macro ASM_PREINCLUDE(PREINCLUDES...) {
###
### Macro to invoke ANTLR3 generator (general case)
macro RUN_ANTLR(IN[], IN_NOPARSE[], OUT[], OUT_NOAUTO[], OUTPUT_INCLUDES[], INDUCED_DEPS[], CWD="", Args...) {
- _RUN_JAVA(-jar ${input:"contrib/java/antlr/antlr3/antlr.jar"} $Args IN $IN IN_NOPARSE $IN_NOPARSE OUT $OUT OUT_NOAUTO $OUT_NOAUTO OUTPUT_INCLUDES $OUTPUT_INCLUDES INDUCED_DEPS $INDUCED_DEPS ${pre=CWD :CWD})
- .SEM=run_antlr OUTPUT ${output:OUT} ${noauto;output:OUT_NOAUTO} DEPENDS ${input:IN} ${pre=WORKING_DIRECTORY :CWD} ANTLER_ARGS $Args && target_commands-ITEM && target_commands-macro run_antlr && target_commands-args OUTPUT ${output:OUT} ${noauto;output:OUT_NOAUTO} DEPENDS ${input:IN} ${pre=WORKING_DIRECTORY :CWD} ANTLER_ARGS $Args
+ _RUN_ANTLR_BASE($Args IN $IN IN_NOPARSE $IN_NOPARSE OUT $OUT OUT_NOAUTO $OUT_NOAUTO OUTPUT_INCLUDES $OUTPUT_INCLUDES INDUCED_DEPS $INDUCED_DEPS ${pre=CWD :CWD} JAR -jar ${input:"contrib/java/antlr/antlr3/antlr.jar"} SEM run_antlr SEM_ARGS_PREFIX ANTLER_ARGS)
}
### @usage: RUN_ANTLR4(Args...)
###
### Macro to invoke ANTLR4 generator (general case)
macro RUN_ANTLR4(IN[], IN_NOPARSE[], OUT[], OUT_NOAUTO[], OUTPUT_INCLUDES[], INDUCED_DEPS[], CWD="", Args...) {
- _RUN_JAVA(-jar ${input:"contrib/java/antlr/antlr4/antlr.jar"} $Args IN $IN IN_NOPARSE $IN_NOPARSE OUT $OUT OUT_NOAUTO $OUT_NOAUTO OUTPUT_INCLUDES $OUTPUT_INCLUDES INDUCED_DEPS $INDUCED_DEPS ${pre=CWD :CWD})
+ _RUN_ANTLR_BASE($Args IN $IN IN_NOPARSE $IN_NOPARSE OUT $OUT OUT_NOAUTO $OUT_NOAUTO OUTPUT_INCLUDES $OUTPUT_INCLUDES INDUCED_DEPS $INDUCED_DEPS ${pre=CWD :CWD} JAR -jar ${input:"contrib/java/antlr/antlr4/antlr.jar"} SEM run_antlr4 SEM_ARGS_PREFIX ANTLER_ARGS)
}
_ANTLR4_LISTENER_GRAMMAR=-listener
diff --git a/contrib/libs/curl/lib/content_encoding.c b/contrib/libs/curl/lib/content_encoding.c
index 8534718b5f..4167d4d684 100644
--- a/contrib/libs/curl/lib/content_encoding.c
+++ b/contrib/libs/curl/lib/content_encoding.c
@@ -38,7 +38,7 @@
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wvla"
#endif
-#error #include <brotli/decode.h>
+#include <brotli/decode.h>
#if defined(__GNUC__)
#pragma GCC diagnostic pop
#endif
diff --git a/contrib/libs/curl/lib/curl_config-linux.h b/contrib/libs/curl/lib/curl_config-linux.h
index 57ecc30b9b..22debd1fcd 100644
--- a/contrib/libs/curl/lib/curl_config-linux.h
+++ b/contrib/libs/curl/lib/curl_config-linux.h
@@ -176,10 +176,10 @@
#define HAVE_BOOL_T 1
/* if BROTLI is in use */
-/* #undef HAVE_BROTLI */
+#define HAVE_BROTLI 1
/* Define to 1 if you have the <brotli/decode.h> header file. */
-/* #undef HAVE_BROTLI_DECODE_H */
+#define HAVE_BROTLI_DECODE_H 1
/* Define to 1 if you have the __builtin_available function. */
/* #undef HAVE_BUILTIN_AVAILABLE */
@@ -395,7 +395,7 @@
/* #undef HAVE_LDAP_URL_PARSE */
/* Define to 1 if you have the `brotlidec' library (-lbrotlidec). */
-/* #undef HAVE_LIBBROTLIDEC */
+#define HAVE_LIBBROTLIDEC 1
/* Define to 1 if you have the <libgen.h> header file. */
#define HAVE_LIBGEN_H 1
diff --git a/contrib/libs/curl/lib/curl_config-osx.h b/contrib/libs/curl/lib/curl_config-osx.h
index b6694c6704..f6b6041caf 100644
--- a/contrib/libs/curl/lib/curl_config-osx.h
+++ b/contrib/libs/curl/lib/curl_config-osx.h
@@ -149,10 +149,10 @@
/* #undef HAVE_BORINGSSL */
/* if BROTLI is in use */
-/* #undef HAVE_BROTLI */
+#define HAVE_BROTLI 1
/* Define to 1 if you have the <brotli/decode.h> header file. */
-/* #undef HAVE_BROTLI_DECODE_H */
+#define HAVE_BROTLI_DECODE_H 1
/* Define to 1 if you have the __builtin_available function. */
#define HAVE_BUILTIN_AVAILABLE 1
@@ -407,7 +407,7 @@
/* #undef HAVE_LDAP_URL_PARSE */
/* Define to 1 if you have the `brotlidec' library (-lbrotlidec). */
-/* #undef HAVE_LIBBROTLIDEC */
+#define HAVE_LIBBROTLIDEC 1
/* Define to 1 if you have the <libgen.h> header file. */
#define HAVE_LIBGEN_H 1
diff --git a/contrib/libs/curl/lib/curl_config-win.h b/contrib/libs/curl/lib/curl_config-win.h
index 24d2561fba..89b070743b 100644
--- a/contrib/libs/curl/lib/curl_config-win.h
+++ b/contrib/libs/curl/lib/curl_config-win.h
@@ -435,7 +435,7 @@
#define HAVE_LIBZ 1
/* if brotli is available */
-/* #undef HAVE_BROTLI */
+#define HAVE_BROTLI 1
/* if your compiler supports LL */
#define HAVE_LL 1
diff --git a/contrib/libs/curl/lib/version.c b/contrib/libs/curl/lib/version.c
index d419dd49da..0aca34a4cd 100644
--- a/contrib/libs/curl/lib/version.c
+++ b/contrib/libs/curl/lib/version.c
@@ -67,7 +67,7 @@
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wvla"
#endif
-#error #include <brotli/decode.h>
+#include <brotli/decode.h>
#if defined(__GNUC__)
#pragma GCC diagnostic pop
#endif
diff --git a/contrib/libs/curl/ya.make b/contrib/libs/curl/ya.make
index 99b4afd269..5f55a73471 100644
--- a/contrib/libs/curl/ya.make
+++ b/contrib/libs/curl/ya.make
@@ -16,6 +16,8 @@ VERSION(8.5.0)
ORIGINAL_SOURCE(https://github.com/curl/curl/releases/download/curl-8_5_0/curl-8.5.0.tar.bz2)
PEERDIR(
+ contrib/libs/brotli/dec
+ contrib/libs/brotli/enc
contrib/libs/libc_compat
contrib/libs/nghttp2
contrib/libs/openssl
diff --git a/contrib/python/ydb/py3/.dist-info/METADATA b/contrib/python/ydb/py3/.dist-info/METADATA
index 4397bad877..db2f0036b3 100644
--- a/contrib/python/ydb/py3/.dist-info/METADATA
+++ b/contrib/python/ydb/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: ydb
-Version: 3.16.1
+Version: 3.17.1
Summary: YDB Python SDK
Home-page: http://github.com/ydb-platform/ydb-python-sdk
Author: Yandex LLC
@@ -30,6 +30,12 @@ YDB Python SDK
Officially supported Python client for YDB.
+---
+
+**Documentation**: <a href="https://ydb-platform.github.io/ydb-python-sdk" target="_blank">https://ydb-platform.github.io/ydb-python-sdk</a>
+
+---
+
## Quickstart
### Prerequisites
diff --git a/contrib/python/ydb/py3/README.md b/contrib/python/ydb/py3/README.md
index cfc57eb276..db7c3de271 100644
--- a/contrib/python/ydb/py3/README.md
+++ b/contrib/python/ydb/py3/README.md
@@ -7,6 +7,12 @@ YDB Python SDK
Officially supported Python client for YDB.
+---
+
+**Documentation**: <a href="https://ydb-platform.github.io/ydb-python-sdk" target="_blank">https://ydb-platform.github.io/ydb-python-sdk</a>
+
+---
+
## Quickstart
### Prerequisites
diff --git a/contrib/python/ydb/py3/ya.make b/contrib/python/ydb/py3/ya.make
index 8c6877ee58..c1ab6d4472 100644
--- a/contrib/python/ydb/py3/ya.make
+++ b/contrib/python/ydb/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(3.16.1)
+VERSION(3.17.1)
LICENSE(Apache-2.0)
diff --git a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_query_public_types.py b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_query_public_types.py
index 3ef2d55430..0b5ec41df7 100644
--- a/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_query_public_types.py
+++ b/contrib/python/ydb/py3/ydb/_grpc/grpcwrapper/ydb_query_public_types.py
@@ -10,6 +10,8 @@ except ImportError:
class BaseQueryTxMode(IToProto):
+ """Abstract class for Query Transaction Modes."""
+
@property
@abc.abstractmethod
def name(self) -> str:
@@ -17,6 +19,11 @@ class BaseQueryTxMode(IToProto):
class QuerySnapshotReadOnly(BaseQueryTxMode):
+ """All the read operations within a transaction access the database snapshot.
+ All the data reads are consistent. The snapshot is taken when the transaction begins,
+ meaning the transaction sees all changes committed before it began.
+ """
+
def __init__(self):
self._name = "snapshot_read_only"
@@ -29,6 +36,10 @@ class QuerySnapshotReadOnly(BaseQueryTxMode):
class QuerySerializableReadWrite(BaseQueryTxMode):
+ """This mode guarantees that the result of successful parallel transactions is equivalent
+ to their serial execution, and there are no read anomalies for successful transactions.
+ """
+
def __init__(self):
self._name = "serializable_read_write"
@@ -41,6 +52,15 @@ class QuerySerializableReadWrite(BaseQueryTxMode):
class QueryOnlineReadOnly(BaseQueryTxMode):
+ """Each read operation in the transaction is reading the data that is most recent at execution time.
+ The consistency of retrieved data depends on the allow_inconsistent_reads setting:
+ * false (consistent reads): Each individual read operation returns consistent data,
+ but no consistency is guaranteed between reads.
+ Reading the same table range twice may return different results.
+ * true (inconsistent reads): Even the data fetched by a particular
+ read operation may contain inconsistent results.
+ """
+
def __init__(self, allow_inconsistent_reads: bool = False):
self.allow_inconsistent_reads = allow_inconsistent_reads
self._name = "online_read_only"
@@ -54,6 +74,11 @@ class QueryOnlineReadOnly(BaseQueryTxMode):
class QueryStaleReadOnly(BaseQueryTxMode):
+ """Read operations within a transaction may return results that are slightly out-of-date
+ (lagging by fractions of a second). Each individual read returns consistent data,
+ but no consistency between different reads is guaranteed.
+ """
+
def __init__(self):
self._name = "stale_read_only"
diff --git a/contrib/python/ydb/py3/ydb/aio/__init__.py b/contrib/python/ydb/py3/ydb/aio/__init__.py
index 0e7d4e747a..1c9c887c22 100644
--- a/contrib/python/ydb/py3/ydb/aio/__init__.py
+++ b/contrib/python/ydb/py3/ydb/aio/__init__.py
@@ -1,3 +1,3 @@
from .driver import Driver # noqa
from .table import SessionPool, retry_operation # noqa
-from .query import QuerySessionPoolAsync, QuerySessionAsync # noqa
+from .query import QuerySessionPool, QuerySession, QueryTxContext # noqa
diff --git a/contrib/python/ydb/py3/ydb/aio/query/__init__.py b/contrib/python/ydb/py3/ydb/aio/query/__init__.py
index 829d7b54cf..8e7dd4fdff 100644
--- a/contrib/python/ydb/py3/ydb/aio/query/__init__.py
+++ b/contrib/python/ydb/py3/ydb/aio/query/__init__.py
@@ -1,7 +1,9 @@
__all__ = [
- "QuerySessionPoolAsync",
- "QuerySessionAsync",
+ "QuerySessionPool",
+ "QuerySession",
+ "QueryTxContext",
]
-from .pool import QuerySessionPoolAsync
-from .session import QuerySessionAsync
+from .pool import QuerySessionPool
+from .session import QuerySession
+from .transaction import QueryTxContext
diff --git a/contrib/python/ydb/py3/ydb/aio/query/pool.py b/contrib/python/ydb/py3/ydb/aio/query/pool.py
index f91f7465e4..e8d53438fc 100644
--- a/contrib/python/ydb/py3/ydb/aio/query/pool.py
+++ b/contrib/python/ydb/py3/ydb/aio/query/pool.py
@@ -1,3 +1,4 @@
+import asyncio
import logging
from typing import (
Callable,
@@ -6,7 +7,7 @@ from typing import (
)
from .session import (
- QuerySessionAsync,
+ QuerySession,
)
from ...retries import (
RetrySettings,
@@ -18,20 +19,85 @@ from ..._grpc.grpcwrapper import common_utils
logger = logging.getLogger(__name__)
-class QuerySessionPoolAsync:
- """QuerySessionPoolAsync is an object to simplify operations with sessions of Query Service."""
+class QuerySessionPool:
+ """QuerySessionPool is an object to simplify operations with sessions of Query Service."""
- def __init__(self, driver: common_utils.SupportedDriverType):
+ def __init__(self, driver: common_utils.SupportedDriverType, size: int = 100):
"""
:param driver: A driver instance
+ :param size: Size of session pool
"""
- logger.warning("QuerySessionPoolAsync is an experimental API, which could be changed.")
+ logger.warning("QuerySessionPool is an experimental API, which could be changed.")
self._driver = driver
+ self._size = size
+ self._should_stop = asyncio.Event()
+ self._queue = asyncio.Queue()
+ self._current_size = 0
+ self._waiters = 0
+ self._loop = asyncio.get_running_loop()
+
+ async def _create_new_session(self):
+ session = QuerySession(self._driver)
+ await session.create()
+ logger.debug(f"New session was created for pool. Session id: {session._state.session_id}")
+ return session
+
+ async def acquire(self) -> QuerySession:
+ """WARNING: This API is experimental and could be changed.
+
+ Acquire a session from Session Pool.
+
+ :return A QuerySession object.
+ """
+
+ if self._should_stop.is_set():
+ logger.error("An attempt to take session from closed session pool.")
+ raise RuntimeError("An attempt to take session from closed session pool.")
+
+ session = None
+ try:
+ session = self._queue.get_nowait()
+ except asyncio.QueueEmpty:
+ pass
+
+ if session is None and self._current_size == self._size:
+ queue_get = asyncio.ensure_future(self._queue.get())
+ task_stop = asyncio.ensure_future(asyncio.ensure_future(self._should_stop.wait()))
+ done, _ = await asyncio.wait((queue_get, task_stop), return_when=asyncio.FIRST_COMPLETED)
+ if task_stop in done:
+ queue_get.cancel()
+ raise RuntimeError("An attempt to take session from closed session pool.")
+
+ task_stop.cancel()
+ session = queue_get.result()
+
+ if session is not None:
+ if session._state.attached:
+ logger.debug(f"Acquired active session from queue: {session._state.session_id}")
+ return session
+ else:
+ self._current_size -= 1
+ logger.debug(f"Acquired dead session from queue: {session._state.session_id}")
+
+ logger.debug(f"Session pool is not large enough: {self._current_size} < {self._size}, will create new one.")
+ session = await self._create_new_session()
+ self._current_size += 1
+ return session
+
+ async def release(self, session: QuerySession) -> None:
+ """WARNING: This API is experimental and could be changed.
+
+ Release a session back to Session Pool.
+ """
+
+ self._queue.put_nowait(session)
+ logger.debug("Session returned to queue: %s", session._state.session_id)
def checkout(self) -> "SimpleQuerySessionCheckoutAsync":
"""WARNING: This API is experimental and could be changed.
- Return a Session context manager, that opens session on enter and closes session on exit.
+
+ Return a Session context manager, that acquires session on enter and releases session on exit.
"""
return SimpleQuerySessionCheckoutAsync(self)
@@ -40,6 +106,7 @@ class QuerySessionPoolAsync:
self, callee: Callable, retry_settings: Optional[RetrySettings] = None, *args, **kwargs
):
"""WARNING: This API is experimental and could be changed.
+
Special interface to execute a bunch of commands with session in a safe, retriable way.
:param callee: A function, that works with session.
@@ -65,6 +132,7 @@ class QuerySessionPoolAsync:
**kwargs,
) -> List[convert.ResultSet]:
"""WARNING: This API is experimental and could be changed.
+
Special interface to execute a one-shot queries in a safe, retriable way.
Note: this method loads all data from stream before return, do not use this
method with huge read queries.
@@ -85,8 +153,20 @@ class QuerySessionPoolAsync:
return await retry_operation_async(wrapped_callee, retry_settings)
- async def stop(self, timeout=None):
- pass # TODO: implement
+ async def stop(self):
+ self._should_stop.set()
+
+ tasks = []
+ while True:
+ try:
+ session = self._queue.get_nowait()
+ tasks.append(session.delete())
+ except asyncio.QueueEmpty:
+ break
+
+ await asyncio.gather(*tasks)
+
+ logger.debug("All session were deleted.")
async def __aenter__(self):
return self
@@ -94,15 +174,21 @@ class QuerySessionPoolAsync:
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.stop()
+ def __del__(self):
+ if self._should_stop.is_set() or self._loop.is_closed():
+ return
+
+ self._loop.call_soon(self.stop)
+
class SimpleQuerySessionCheckoutAsync:
- def __init__(self, pool: QuerySessionPoolAsync):
+ def __init__(self, pool: QuerySessionPool):
self._pool = pool
- self._session = QuerySessionAsync(pool._driver)
+ self._session = None
- async def __aenter__(self) -> QuerySessionAsync:
- await self._session.create()
+ async def __aenter__(self) -> QuerySession:
+ self._session = await self._pool.acquire()
return self._session
async def __aexit__(self, exc_type, exc_val, exc_tb):
- await self._session.delete()
+ await self._pool.release(self._session)
diff --git a/contrib/python/ydb/py3/ydb/aio/query/session.py b/contrib/python/ydb/py3/ydb/aio/query/session.py
index 627a41d895..4c1c1a10fc 100644
--- a/contrib/python/ydb/py3/ydb/aio/query/session.py
+++ b/contrib/python/ydb/py3/ydb/aio/query/session.py
@@ -5,9 +5,10 @@ from typing import (
)
from .base import AsyncResponseContextIterator
-from .transaction import QueryTxContextAsync
+from .transaction import QueryTxContext
from .. import _utilities
from ... import issues
+from ...settings import BaseRequestSettings
from ..._grpc.grpcwrapper import common_utils
from ..._grpc.grpcwrapper import ydb_query_public_types as _ydb_query_public
@@ -18,7 +19,7 @@ from ...query.session import (
)
-class QuerySessionAsync(BaseQuerySession):
+class QuerySession(BaseQuerySession):
"""Session object for Query Service. It is not recommended to control
session's lifecycle manually - use a QuerySessionPool is always a better choise.
"""
@@ -32,7 +33,7 @@ class QuerySessionAsync(BaseQuerySession):
settings: Optional[base.QueryClientSettings] = None,
loop: asyncio.AbstractEventLoop = None,
):
- super(QuerySessionAsync, self).__init__(driver, settings)
+ super(QuerySession, self).__init__(driver, settings)
self._loop = loop if loop is not None else asyncio.get_running_loop()
async def _attach(self) -> None:
@@ -62,7 +63,7 @@ class QuerySessionAsync(BaseQuerySession):
self._state.reset()
self._state._change_state(QuerySessionStateEnum.CLOSED)
- async def delete(self) -> None:
+ async def delete(self, settings: Optional[BaseRequestSettings] = None) -> None:
"""WARNING: This API is experimental and could be changed.
Deletes a Session of Query Service on server side and releases resources.
@@ -73,30 +74,30 @@ class QuerySessionAsync(BaseQuerySession):
return
self._state._check_invalid_transition(QuerySessionStateEnum.CLOSED)
- await self._delete_call()
+ await self._delete_call(settings=settings)
self._stream.cancel()
- async def create(self) -> "QuerySessionAsync":
+ async def create(self, settings: Optional[BaseRequestSettings] = None) -> "QuerySession":
"""WARNING: This API is experimental and could be changed.
Creates a Session of Query Service on server side and attaches it.
- :return: QuerySessionSync object.
+ :return: QuerySession object.
"""
if self._state._already_in(QuerySessionStateEnum.CREATED):
return
self._state._check_invalid_transition(QuerySessionStateEnum.CREATED)
- await self._create_call()
+ await self._create_call(settings=settings)
await self._attach()
return self
- def transaction(self, tx_mode=None) -> QueryTxContextAsync:
+ def transaction(self, tx_mode=None) -> QueryTxContext:
self._state._check_session_ready_to_use()
tx_mode = tx_mode if tx_mode else _ydb_query_public.QuerySerializableReadWrite()
- return QueryTxContextAsync(
+ return QueryTxContext(
self._driver,
self._state,
self,
@@ -110,10 +111,12 @@ class QuerySessionAsync(BaseQuerySession):
syntax: base.QuerySyntax = None,
exec_mode: base.QueryExecMode = None,
concurrent_result_sets: bool = False,
+ settings: Optional[BaseRequestSettings] = None,
) -> AsyncResponseContextIterator:
"""WARNING: This API is experimental and could be changed.
Sends a query to Query Service
+
:param query: (YQL or SQL text) to be executed.
:param syntax: Syntax of the query, which is a one from the following choises:
1) QuerySyntax.YQL_V1, which is default;
@@ -132,6 +135,7 @@ class QuerySessionAsync(BaseQuerySession):
exec_mode=exec_mode,
parameters=parameters,
concurrent_result_sets=concurrent_result_sets,
+ settings=settings,
)
return AsyncResponseContextIterator(
@@ -139,6 +143,7 @@ class QuerySessionAsync(BaseQuerySession):
lambda resp: base.wrap_execute_query_response(
rpc_state=None,
response_pb=resp,
+ session_state=self._state,
settings=self._settings,
),
)
diff --git a/contrib/python/ydb/py3/ydb/aio/query/transaction.py b/contrib/python/ydb/py3/ydb/aio/query/transaction.py
index 429ba125c8..b115a4b48b 100644
--- a/contrib/python/ydb/py3/ydb/aio/query/transaction.py
+++ b/contrib/python/ydb/py3/ydb/aio/query/transaction.py
@@ -15,8 +15,8 @@ from ...query.transaction import (
logger = logging.getLogger(__name__)
-class QueryTxContextAsync(BaseQueryTxContext):
- async def __aenter__(self) -> "QueryTxContextAsync":
+class QueryTxContext(BaseQueryTxContext):
+ async def __aenter__(self) -> "QueryTxContext":
"""
Enters a context manager and returns a transaction
@@ -47,7 +47,7 @@ class QueryTxContextAsync(BaseQueryTxContext):
pass
self._prev_stream = None
- async def begin(self, settings: Optional[BaseRequestSettings] = None) -> "QueryTxContextAsync":
+ async def begin(self, settings: Optional[BaseRequestSettings] = None) -> "QueryTxContext":
"""WARNING: This API is experimental and could be changed.
Explicitly begins a transaction
@@ -114,6 +114,7 @@ class QueryTxContextAsync(BaseQueryTxContext):
"""WARNING: This API is experimental and could be changed.
Sends a query to Query Service
+
:param query: (YQL or SQL text) to be executed.
:param parameters: dict with parameters and YDB types;
:param commit_tx: A special flag that allows transaction commit.
@@ -146,6 +147,7 @@ class QueryTxContextAsync(BaseQueryTxContext):
lambda resp: base.wrap_execute_query_response(
rpc_state=None,
response_pb=resp,
+ session_state=self._session_state,
tx=self,
commit_tx=commit_tx,
settings=self.session._settings,
diff --git a/contrib/python/ydb/py3/ydb/query/__init__.py b/contrib/python/ydb/py3/ydb/query/__init__.py
index 40e512cd6b..0f8187892f 100644
--- a/contrib/python/ydb/py3/ydb/query/__init__.py
+++ b/contrib/python/ydb/py3/ydb/query/__init__.py
@@ -1,11 +1,13 @@
__all__ = [
+ "BaseQueryTxMode",
"QueryOnlineReadOnly",
"QuerySerializableReadWrite",
"QuerySnapshotReadOnly",
"QueryStaleReadOnly",
"QuerySessionPool",
- "QueryClientSync",
- "QuerySessionSync",
+ "QueryClientSettings",
+ "QuerySession",
+ "QueryTxContext",
]
import logging
@@ -14,10 +16,12 @@ from .base import (
QueryClientSettings,
)
-from .session import QuerySessionSync
+from .session import QuerySession
+from .transaction import QueryTxContext
from .._grpc.grpcwrapper import common_utils
from .._grpc.grpcwrapper.ydb_query_public_types import (
+ BaseQueryTxMode,
QueryOnlineReadOnly,
QuerySerializableReadWrite,
QuerySnapshotReadOnly,
@@ -35,5 +39,5 @@ class QueryClientSync:
self._driver = driver
self._settings = query_client_settings
- def session(self) -> QuerySessionSync:
- return QuerySessionSync(self._driver, self._settings)
+ def session(self) -> QuerySession:
+ return QuerySession(self._driver, self._settings)
diff --git a/contrib/python/ydb/py3/ydb/query/base.py b/contrib/python/ydb/py3/ydb/query/base.py
index 55087d0c4e..9372cbcf54 100644
--- a/contrib/python/ydb/py3/ydb/query/base.py
+++ b/contrib/python/ydb/py3/ydb/query/base.py
@@ -165,28 +165,31 @@ def create_execute_query_request(
)
+def bad_session_handler(func):
+ @functools.wraps(func)
+ def decorator(rpc_state, response_pb, session_state: IQuerySessionState, *args, **kwargs):
+ try:
+ return func(rpc_state, response_pb, session_state, *args, **kwargs)
+ except issues.BadSession:
+ session_state.reset()
+ raise
+
+ return decorator
+
+
+@bad_session_handler
def wrap_execute_query_response(
rpc_state: RpcState,
response_pb: _apis.ydb_query.ExecuteQueryResponsePart,
+ session_state: IQuerySessionState,
tx: Optional["BaseQueryTxContext"] = None,
commit_tx: Optional[bool] = False,
settings: Optional[QueryClientSettings] = None,
) -> convert.ResultSet:
issues._process_response(response_pb)
- if tx and response_pb.tx_meta and not tx.tx_id:
- tx._move_to_beginned(response_pb.tx_meta.id)
if tx and commit_tx:
tx._move_to_commited()
- return convert.ResultSet.from_message(response_pb.result_set, settings)
-
-
-def bad_session_handler(func):
- @functools.wraps(func)
- def decorator(rpc_state, response_pb, session_state: IQuerySessionState, *args, **kwargs):
- try:
- return func(rpc_state, response_pb, session_state, *args, **kwargs)
- except issues.BadSession:
- session_state.reset()
- raise
+ elif tx and response_pb.tx_meta and not tx.tx_id:
+ tx._move_to_beginned(response_pb.tx_meta.id)
- return decorator
+ return convert.ResultSet.from_message(response_pb.result_set, settings)
diff --git a/contrib/python/ydb/py3/ydb/query/pool.py b/contrib/python/ydb/py3/ydb/query/pool.py
index afe39f0623..839d8688ca 100644
--- a/contrib/python/ydb/py3/ydb/query/pool.py
+++ b/contrib/python/ydb/py3/ydb/query/pool.py
@@ -4,15 +4,20 @@ from typing import (
Optional,
List,
)
+import time
+import threading
+import queue
from .session import (
- QuerySessionSync,
+ QuerySession,
)
from ..retries import (
RetrySettings,
retry_operation_sync,
)
+from .. import issues
from .. import convert
+from ..settings import BaseRequestSettings
from .._grpc.grpcwrapper import common_utils
@@ -22,23 +27,102 @@ logger = logging.getLogger(__name__)
class QuerySessionPool:
"""QuerySessionPool is an object to simplify operations with sessions of Query Service."""
- def __init__(self, driver: common_utils.SupportedDriverType):
+ def __init__(self, driver: common_utils.SupportedDriverType, size: int = 100):
"""
- :param driver: A driver instance
+ :param driver: A driver instance.
+ :param size: Max size of Session Pool.
"""
logger.warning("QuerySessionPool is an experimental API, which could be changed.")
self._driver = driver
+ self._queue = queue.Queue()
+ self._current_size = 0
+ self._size = size
+ self._should_stop = threading.Event()
+ self._lock = threading.RLock()
+
+ def _create_new_session(self, timeout: Optional[float]):
+ session = QuerySession(self._driver)
+ session.create(settings=BaseRequestSettings().with_timeout(timeout))
+ logger.debug(f"New session was created for pool. Session id: {session._state.session_id}")
+ return session
+
+ def acquire(self, timeout: Optional[float] = None) -> QuerySession:
+ """WARNING: This API is experimental and could be changed.
+
+ Acquire a session from Session Pool.
+
+ :param timeout: A timeout to wait in seconds.
+
+ :return A QuerySession object.
+ """
+
+ start = time.monotonic()
+
+ lock_acquire_timeout = timeout if timeout is not None else -1
+ acquired = self._lock.acquire(timeout=lock_acquire_timeout)
+ try:
+ if self._should_stop.is_set():
+ logger.error("An attempt to take session from closed session pool.")
+ raise RuntimeError("An attempt to take session from closed session pool.")
+
+ session = None
+ try:
+ session = self._queue.get_nowait()
+ except queue.Empty:
+ pass
+
+ finish = time.monotonic()
+ timeout = timeout - (finish - start) if timeout is not None else None
+
+ start = time.monotonic()
+ if session is None and self._current_size == self._size:
+ try:
+ session = self._queue.get(block=True, timeout=timeout)
+ except queue.Empty:
+ raise issues.SessionPoolEmpty("Timeout on acquire session")
+
+ if session is not None:
+ if session._state.attached:
+ logger.debug(f"Acquired active session from queue: {session._state.session_id}")
+ return session
+ else:
+ self._current_size -= 1
+ logger.debug(f"Acquired dead session from queue: {session._state.session_id}")
+
+ logger.debug(f"Session pool is not large enough: {self._current_size} < {self._size}, will create new one.")
+ finish = time.monotonic()
+ time_left = timeout - (finish - start) if timeout is not None else None
+ session = self._create_new_session(time_left)
+
+ self._current_size += 1
+ return session
+ finally:
+ if acquired:
+ self._lock.release()
+
+ def release(self, session: QuerySession) -> None:
+ """WARNING: This API is experimental and could be changed.
+
+ Release a session back to Session Pool.
+ """
+
+ self._queue.put_nowait(session)
+ logger.debug("Session returned to queue: %s", session._state.session_id)
- def checkout(self) -> "SimpleQuerySessionCheckout":
+ def checkout(self, timeout: Optional[float] = None) -> "SimpleQuerySessionCheckout":
"""WARNING: This API is experimental and could be changed.
- Return a Session context manager, that opens session on enter and closes session on exit.
+
+ Return a Session context manager, that acquires session on enter and releases session on exit.
+
+ :param timeout: A timeout to wait in seconds.
"""
- return SimpleQuerySessionCheckout(self)
+ return SimpleQuerySessionCheckout(self, timeout)
def retry_operation_sync(self, callee: Callable, retry_settings: Optional[RetrySettings] = None, *args, **kwargs):
"""WARNING: This API is experimental and could be changed.
+
Special interface to execute a bunch of commands with session in a safe, retriable way.
:param callee: A function, that works with session.
@@ -50,7 +134,7 @@ class QuerySessionPool:
retry_settings = RetrySettings() if retry_settings is None else retry_settings
def wrapped_callee():
- with self.checkout() as session:
+ with self.checkout(timeout=retry_settings.max_session_acquire_timeout) as session:
return callee(session, *args, **kwargs)
return retry_operation_sync(wrapped_callee, retry_settings)
@@ -64,6 +148,7 @@ class QuerySessionPool:
**kwargs,
) -> List[convert.ResultSet]:
"""WARNING: This API is experimental and could be changed.
+
Special interface to execute a one-shot queries in a safe, retriable way.
Note: this method loads all data from stream before return, do not use this
method with huge read queries.
@@ -78,14 +163,28 @@ class QuerySessionPool:
retry_settings = RetrySettings() if retry_settings is None else retry_settings
def wrapped_callee():
- with self.checkout() as session:
+ with self.checkout(timeout=retry_settings.max_session_acquire_timeout) as session:
it = session.execute(query, parameters, *args, **kwargs)
return [result_set for result_set in it]
return retry_operation_sync(wrapped_callee, retry_settings)
def stop(self, timeout=None):
- pass # TODO: implement
+ acquire_timeout = timeout if timeout is not None else -1
+ acquired = self._lock.acquire(timeout=acquire_timeout)
+ try:
+ self._should_stop.set()
+ while True:
+ try:
+ session = self._queue.get_nowait()
+ session.delete()
+ except queue.Empty:
+ break
+
+ logger.debug("All session were deleted.")
+ finally:
+ if acquired:
+ self._lock.release()
def __enter__(self):
return self
@@ -93,15 +192,19 @@ class QuerySessionPool:
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
+ def __del__(self):
+ self.stop()
+
class SimpleQuerySessionCheckout:
- def __init__(self, pool: QuerySessionPool):
+ def __init__(self, pool: QuerySessionPool, timeout: Optional[float]):
self._pool = pool
- self._session = QuerySessionSync(pool._driver)
+ self._timeout = timeout
+ self._session = None
- def __enter__(self) -> QuerySessionSync:
- self._session.create()
+ def __enter__(self) -> QuerySession:
+ self._session = self._pool.acquire(self._timeout)
return self._session
def __exit__(self, exc_type, exc_val, exc_tb):
- self._session.delete()
+ self._pool.release(self._session)
diff --git a/contrib/python/ydb/py3/ydb/query/session.py b/contrib/python/ydb/py3/ydb/query/session.py
index 4b051dc16f..5b4db26c92 100644
--- a/contrib/python/ydb/py3/ydb/query/session.py
+++ b/contrib/python/ydb/py3/ydb/query/session.py
@@ -10,12 +10,13 @@ from typing import (
from . import base
from .. import _apis, issues, _utilities
+from ..settings import BaseRequestSettings
from ..connection import _RpcState as RpcState
from .._grpc.grpcwrapper import common_utils
from .._grpc.grpcwrapper import ydb_query as _ydb_query
from .._grpc.grpcwrapper import ydb_query_public_types as _ydb_query_public
-from .transaction import QueryTxContextSync
+from .transaction import QueryTxContext
logger = logging.getLogger(__name__)
@@ -136,29 +137,32 @@ class BaseQuerySession:
self._settings = settings if settings is not None else base.QueryClientSettings()
self._state = QuerySessionState(settings)
- def _create_call(self) -> "BaseQuerySession":
+ def _create_call(self, settings: Optional[BaseRequestSettings] = None) -> "BaseQuerySession":
return self._driver(
_apis.ydb_query.CreateSessionRequest(),
_apis.QueryService.Stub,
_apis.QueryService.CreateSession,
wrap_result=wrapper_create_session,
wrap_args=(self._state, self),
+ settings=settings,
)
- def _delete_call(self) -> "BaseQuerySession":
+ def _delete_call(self, settings: Optional[BaseRequestSettings] = None) -> "BaseQuerySession":
return self._driver(
_apis.ydb_query.DeleteSessionRequest(session_id=self._state.session_id),
_apis.QueryService.Stub,
_apis.QueryService.DeleteSession,
wrap_result=wrapper_delete_session,
wrap_args=(self._state, self),
+ settings=settings,
)
- def _attach_call(self) -> Iterable[_apis.ydb_query.SessionState]:
+ def _attach_call(self, settings: Optional[BaseRequestSettings] = None) -> Iterable[_apis.ydb_query.SessionState]:
return self._driver(
_apis.ydb_query.AttachSessionRequest(session_id=self._state.session_id),
_apis.QueryService.Stub,
_apis.QueryService.AttachSession,
+ settings=settings,
)
def _execute_call(
@@ -169,6 +173,7 @@ class BaseQuerySession:
exec_mode: base.QueryExecMode = None,
parameters: dict = None,
concurrent_result_sets: bool = False,
+ settings: Optional[BaseRequestSettings] = None,
) -> Iterable[_apis.ydb_query.ExecuteQueryResponsePart]:
request = base.create_execute_query_request(
query=query,
@@ -186,18 +191,19 @@ class BaseQuerySession:
request.to_proto(),
_apis.QueryService.Stub,
_apis.QueryService.ExecuteQuery,
+ settings=settings,
)
-class QuerySessionSync(BaseQuerySession):
+class QuerySession(BaseQuerySession):
"""Session object for Query Service. It is not recommended to control
session's lifecycle manually - use a QuerySessionPool is always a better choise.
"""
_stream = None
- def _attach(self) -> None:
- self._stream = self._attach_call()
+ def _attach(self, settings: Optional[BaseRequestSettings] = None) -> None:
+ self._stream = self._attach_call(settings=settings)
status_stream = _utilities.SyncResponseIterator(
self._stream,
lambda response: common_utils.ServerStatus.from_proto(response),
@@ -228,7 +234,7 @@ class QuerySessionSync(BaseQuerySession):
self._state.reset()
self._state._change_state(QuerySessionStateEnum.CLOSED)
- def delete(self) -> None:
+ def delete(self, settings: Optional[BaseRequestSettings] = None) -> None:
"""WARNING: This API is experimental and could be changed.
Deletes a Session of Query Service on server side and releases resources.
@@ -239,29 +245,31 @@ class QuerySessionSync(BaseQuerySession):
return
self._state._check_invalid_transition(QuerySessionStateEnum.CLOSED)
- self._delete_call()
+ self._delete_call(settings=settings)
self._stream.cancel()
- def create(self) -> "QuerySessionSync":
+ def create(self, settings: Optional[BaseRequestSettings] = None) -> "QuerySession":
"""WARNING: This API is experimental and could be changed.
Creates a Session of Query Service on server side and attaches it.
- :return: QuerySessionSync object.
+ :return: QuerySession object.
"""
if self._state._already_in(QuerySessionStateEnum.CREATED):
return
self._state._check_invalid_transition(QuerySessionStateEnum.CREATED)
- self._create_call()
+
+ self._create_call(settings=settings)
self._attach()
return self
- def transaction(self, tx_mode: Optional[base.BaseQueryTxMode] = None) -> QueryTxContextSync:
+ def transaction(self, tx_mode: Optional[base.BaseQueryTxMode] = None) -> QueryTxContext:
"""WARNING: This API is experimental and could be changed.
Creates a transaction context manager with specified transaction mode.
+
:param tx_mode: Transaction mode, which is a one from the following choises:
1) QuerySerializableReadWrite() which is default mode;
2) QueryOnlineReadOnly(allow_inconsistent_reads=False);
@@ -275,7 +283,7 @@ class QuerySessionSync(BaseQuerySession):
tx_mode = tx_mode if tx_mode else _ydb_query_public.QuerySerializableReadWrite()
- return QueryTxContextSync(
+ return QueryTxContext(
self._driver,
self._state,
self,
@@ -289,10 +297,12 @@ class QuerySessionSync(BaseQuerySession):
syntax: base.QuerySyntax = None,
exec_mode: base.QueryExecMode = None,
concurrent_result_sets: bool = False,
+ settings: Optional[BaseRequestSettings] = None,
) -> base.SyncResponseContextIterator:
"""WARNING: This API is experimental and could be changed.
Sends a query to Query Service
+
:param query: (YQL or SQL text) to be executed.
:param syntax: Syntax of the query, which is a one from the following choises:
1) QuerySyntax.YQL_V1, which is default;
@@ -311,6 +321,7 @@ class QuerySessionSync(BaseQuerySession):
exec_mode=exec_mode,
parameters=parameters,
concurrent_result_sets=concurrent_result_sets,
+ settings=settings,
)
return base.SyncResponseContextIterator(
@@ -318,6 +329,7 @@ class QuerySessionSync(BaseQuerySession):
lambda resp: base.wrap_execute_query_response(
rpc_state=None,
response_pb=resp,
+ session_state=self._state,
settings=self._settings,
),
)
diff --git a/contrib/python/ydb/py3/ydb/query/transaction.py b/contrib/python/ydb/py3/ydb/query/transaction.py
index be7396b1a5..21ba02798b 100644
--- a/contrib/python/ydb/py3/ydb/query/transaction.py
+++ b/contrib/python/ydb/py3/ydb/query/transaction.py
@@ -294,7 +294,7 @@ class BaseQueryTxContext:
self._tx_state._change_state(QueryTxStateEnum.COMMITTED)
-class QueryTxContextSync(BaseQueryTxContext):
+class QueryTxContext(BaseQueryTxContext):
def __enter__(self) -> "BaseQueryTxContext":
"""
Enters a context manager and returns a transaction
@@ -326,7 +326,7 @@ class QueryTxContextSync(BaseQueryTxContext):
pass
self._prev_stream = None
- def begin(self, settings: Optional[BaseRequestSettings] = None) -> "QueryTxContextSync":
+ def begin(self, settings: Optional[BaseRequestSettings] = None) -> "QueryTxContext":
"""WARNING: This API is experimental and could be changed.
Explicitly begins a transaction
@@ -394,6 +394,7 @@ class QueryTxContextSync(BaseQueryTxContext):
"""WARNING: This API is experimental and could be changed.
Sends a query to Query Service
+
:param query: (YQL or SQL text) to be executed.
:param parameters: dict with parameters and YDB types;
:param commit_tx: A special flag that allows transaction commit.
@@ -427,6 +428,7 @@ class QueryTxContextSync(BaseQueryTxContext):
lambda resp: base.wrap_execute_query_response(
rpc_state=None,
response_pb=resp,
+ session_state=self._session_state,
tx=self,
commit_tx=commit_tx,
settings=self.session._settings,
diff --git a/contrib/python/ydb/py3/ydb/settings.py b/contrib/python/ydb/py3/ydb/settings.py
index 6739a46fab..019b75a8ec 100644
--- a/contrib/python/ydb/py3/ydb/settings.py
+++ b/contrib/python/ydb/py3/ydb/settings.py
@@ -39,7 +39,7 @@ class BaseRequestSettings(object):
.with_need_rpc_auth(self.need_rpc_auth)
)
- def with_compression(self, compression):
+ def with_compression(self, compression) -> "BaseRequestSettings":
"""
Enables compression for the specific RPC
:param compression: An RPCCompression enum value.
@@ -48,11 +48,11 @@ class BaseRequestSettings(object):
self.compression = compression
return self
- def with_need_rpc_auth(self, need_rpc_auth):
+ def with_need_rpc_auth(self, need_rpc_auth) -> "BaseRequestSettings":
self.need_rpc_auth = need_rpc_auth
return self
- def with_header(self, key, value):
+ def with_header(self, key, value) -> "BaseRequestSettings":
"""
Adds a key-value pair to the request headers.
:param key: A string with a header key.
@@ -62,7 +62,7 @@ class BaseRequestSettings(object):
self.headers.append((key, value))
return self
- def with_trace_id(self, trace_id):
+ def with_trace_id(self, trace_id) -> "BaseRequestSettings":
"""
Includes trace id for RPC headers
:param trace_id: A trace id string
@@ -71,7 +71,7 @@ class BaseRequestSettings(object):
self.trace_id = trace_id
return self
- def with_request_type(self, request_type):
+ def with_request_type(self, request_type) -> "BaseRequestSettings":
"""
Includes request type for RPC headers
:param request_type: A request type string
@@ -80,7 +80,7 @@ class BaseRequestSettings(object):
self.request_type = request_type
return self
- def with_operation_timeout(self, timeout):
+ def with_operation_timeout(self, timeout) -> "BaseRequestSettings":
"""
Indicates that client is no longer interested in the result of operation after the specified duration
starting from the time operation arrives at the server.
@@ -89,12 +89,12 @@ class BaseRequestSettings(object):
Timeout of operation does not tell anything about its result, it might be completed successfully
or cancelled on server.
:param timeout:
- :return:
+ :return: The self instance
"""
self.operation_timeout = timeout
return self
- def with_cancel_after(self, timeout):
+ def with_cancel_after(self, timeout) -> "BaseRequestSettings":
"""
Server will try to cancel the operation after the specified duration starting from the time
the operation arrives at server.
@@ -102,12 +102,12 @@ class BaseRequestSettings(object):
sent back to client if it was waiting for the operation result.
In case when cancellation isn't possible, no action will be performed.
:param timeout:
- :return:
+ :return: The self instance
"""
self.cancel_after = timeout
return self
- def with_timeout(self, timeout):
+ def with_timeout(self, timeout) -> "BaseRequestSettings":
"""
Client-side timeout to complete request.
Since YDB doesn't support request cancellation at this moment, this feature should be
diff --git a/contrib/python/ydb/py3/ydb/table.py b/contrib/python/ydb/py3/ydb/table.py
index ac9f93042c..cfcffb17af 100644
--- a/contrib/python/ydb/py3/ydb/table.py
+++ b/contrib/python/ydb/py3/ydb/table.py
@@ -297,6 +297,10 @@ class TableIndex(object):
self._pb.global_index.SetInParent()
return self
+ def with_global_async_index(self):
+ self._pb.global_async_index.SetInParent()
+ return self
+
def with_index_columns(self, *columns):
for column in columns:
self._pb.index_columns.append(column)
diff --git a/contrib/python/ydb/py3/ydb/ydb_version.py b/contrib/python/ydb/py3/ydb/ydb_version.py
index 96a1189198..b0ef9f368d 100644
--- a/contrib/python/ydb/py3/ydb/ydb_version.py
+++ b/contrib/python/ydb/py3/ydb/ydb_version.py
@@ -1 +1 @@
-VERSION = "3.16.1"
+VERSION = "3.17.1"
diff --git a/library/cpp/http/push_parser/http_parser.cpp b/library/cpp/http/push_parser/http_parser.cpp
index a646b349a3..b7b0a82bec 100644
--- a/library/cpp/http/push_parser/http_parser.cpp
+++ b/library/cpp/http/push_parser/http_parser.cpp
@@ -44,6 +44,10 @@ TString THttpParser::GetBestCompressionScheme() const {
return TString();
}
+const THashSet<TString>& THttpParser::AcceptedEncodings() const {
+ return AcceptEncodings_;
+}
+
bool THttpParser::FirstLineParser() {
if (Y_UNLIKELY(!ReadLine())) {
return false;
diff --git a/library/cpp/http/push_parser/http_parser.h b/library/cpp/http/push_parser/http_parser.h
index af3ce46dbd..64d7b12ea5 100644
--- a/library/cpp/http/push_parser/http_parser.h
+++ b/library/cpp/http/push_parser/http_parser.h
@@ -100,6 +100,8 @@ public:
TString GetBestCompressionScheme() const;
+ const THashSet<TString>& AcceptedEncodings() const;
+
const TString& Content() const noexcept {
return Content_;
}
diff --git a/library/cpp/svnversion/ya.make b/library/cpp/svnversion/ya.make
index 3789c4b38a..d028f45c94 100644
--- a/library/cpp/svnversion/ya.make
+++ b/library/cpp/svnversion/ya.make
@@ -4,7 +4,13 @@ SRCS(
svnversion.cpp
svn_interface.c
)
+
+IF (OPENSOURCE_PROJECT == "yt-cpp-sdk")
+ PEERDIR(build/scripts/c_templates/)
+ENDIF()
+
END()
+
RECURSE(
test
)
diff --git a/library/cpp/threading/thread_local/thread_local.h b/library/cpp/threading/thread_local/thread_local.h
index 1cc4642373..bb0d11347d 100644
--- a/library/cpp/threading/thread_local/thread_local.h
+++ b/library/cpp/threading/thread_local/thread_local.h
@@ -222,16 +222,16 @@ public:
template <typename ...ConsturctArgs>
T* Get(TThread::TId tid, ConsturctArgs&& ...args) {
- TNode* node = Head_.load(std::memory_order_relaxed);
- for (; node; node = node->Next) {
+ TNode* head = Head_.load(std::memory_order_acquire);
+ for (TNode* node = head; node; node = node->Next) {
if (node->Key == tid) {
return &node->Value;
}
}
- TNode* newNode = AllocateNode(tid, node, std::forward<ConsturctArgs>(args)...);
- while (!Head_.compare_exchange_weak(node, newNode, std::memory_order_release, std::memory_order_relaxed)) {
- newNode->Next = node;
+ TNode* newNode = AllocateNode(tid, head, std::forward<ConsturctArgs>(args)...);
+ while (!Head_.compare_exchange_weak(head, newNode, std::memory_order_release, std::memory_order_relaxed)) {
+ newNode->Next = head;
}
return &newNode->Value;
diff --git a/library/python/monlib/metric_registry.pyx b/library/python/monlib/metric_registry.pyx
index 800a1abd1b..aae0a5962a 100644
--- a/library/python/monlib/metric_registry.pyx
+++ b/library/python/monlib/metric_registry.pyx
@@ -12,6 +12,7 @@ from util.datetime.base cimport TInstant
from util.system.types cimport ui32
from util.generic.vector cimport TVector
+from libcpp.utility cimport move
from libcpp.string cimport string
from cython.operator cimport address, dereference as deref
@@ -20,10 +21,6 @@ import datetime as dt
import sys
-cdef extern from "<utility>" namespace "std" nogil:
- cdef IHistogramCollectorPtr&& move(IHistogramCollectorPtr t)
-
-
def get_or_raise(kwargs, key):
value = kwargs.get(key)
if value is None:
diff --git a/yt/yt/client/api/queue_client.h b/yt/yt/client/api/queue_client.h
index ad489e547a..658dbd5b39 100644
--- a/yt/yt/client/api/queue_client.h
+++ b/yt/yt/client/api/queue_client.h
@@ -27,7 +27,7 @@ struct TPullRowsOptions
NChaosClient::TReplicationProgress ReplicationProgress;
NTransactionClient::TTimestamp UpperTimestamp = NTransactionClient::NullTimestamp;
NTableClient::TTableSchemaPtr TableSchema;
- i64 MaxDataWeight = 1_GB;
+ i64 MaxDataWeight = 20_MB;
IReservingMemoryUsageTrackerPtr MemoryTracker;
};
diff --git a/yt/yt/client/driver/driver.cpp b/yt/yt/client/driver/driver.cpp
index 8cdc9cbe88..19a0f3bdc5 100644
--- a/yt/yt/client/driver/driver.cpp
+++ b/yt/yt/client/driver/driver.cpp
@@ -548,8 +548,7 @@ private:
NTracing::TChildTraceContextGuard commandSpan(ConcatToString(TStringBuf("Driver:"), request.CommandName));
NTracing::AnnotateTraceContext([&] (const auto& traceContext) {
- // TODO(babenko): switch to std::string
- traceContext->AddTag("user", TString(request.AuthenticatedUser));
+ traceContext->AddTag("user", request.AuthenticatedUser);
traceContext->AddTag("request_id", request.Id);
});
diff --git a/yt/yt/core/concurrency/action_queue.cpp b/yt/yt/core/concurrency/action_queue.cpp
index 3ff52fbbc5..99e6ec94f0 100644
--- a/yt/yt/core/concurrency/action_queue.cpp
+++ b/yt/yt/core/concurrency/action_queue.cpp
@@ -253,7 +253,7 @@ IInvokerPtr CreateSerializedInvoker(IInvokerPtr underlyingInvoker, const NProfil
IInvokerPtr CreateSerializedInvoker(IInvokerPtr underlyingInvoker, const TString& invokerName, NProfiling::IRegistryImplPtr registry)
{
NProfiling::TTagSet tagSet;
- tagSet.AddTag(std::pair<TString, TString>("invoker", invokerName));
+ tagSet.AddTag(NProfiling::TTag("invoker", invokerName));
return CreateSerializedInvoker(std::move(underlyingInvoker), std::move(tagSet), std::move(registry));
}
@@ -326,7 +326,7 @@ IPrioritizedInvokerPtr CreatePrioritizedInvoker(IInvokerPtr underlyingInvoker, c
IPrioritizedInvokerPtr CreatePrioritizedInvoker(IInvokerPtr underlyingInvoker, const TString& invokerName, NProfiling::IRegistryImplPtr registry)
{
NProfiling::TTagSet tagSet;
- tagSet.AddTag(std::pair<TString, TString>("invoker", invokerName));
+ tagSet.AddTag(NProfiling::TTag("invoker", invokerName));
return CreatePrioritizedInvoker(std::move(underlyingInvoker), std::move(tagSet), std::move(registry));
}
diff --git a/yt/yt/core/concurrency/profiling_helpers.cpp b/yt/yt/core/concurrency/profiling_helpers.cpp
index b6b67994c0..1a80490da5 100644
--- a/yt/yt/core/concurrency/profiling_helpers.cpp
+++ b/yt/yt/core/concurrency/profiling_helpers.cpp
@@ -10,7 +10,7 @@ TTagSet GetThreadTags(
const TString& threadName)
{
TTagSet tags;
- tags.AddTag(std::pair<TString, TString>("thread", threadName));
+ tags.AddTag(TTag("thread", threadName));
return tags;
}
@@ -20,8 +20,8 @@ TTagSet GetBucketTags(
{
TTagSet tags;
- tags.AddTag(std::pair<TString, TString>("thread", threadName));
- tags.AddTag(std::pair<TString, TString>("bucket", bucketName), -1);
+ tags.AddTag(TTag("thread", threadName));
+ tags.AddTag(TTag("bucket", bucketName), -1);
return tags;
}
@@ -33,9 +33,9 @@ TTagSet GetQueueTags(
{
TTagSet tags;
- tags.AddTag(std::pair<TString, TString>("thread", threadName));
- tags.AddTag(std::pair<TString, TString>("bucket", bucketName), -1);
- tags.AddTag(std::pair<TString, TString>("queue", queueName), -1);
+ tags.AddTag(TTag("thread", threadName));
+ tags.AddTag(TTag("bucket", bucketName), -1);
+ tags.AddTag(TTag("queue", queueName), -1);
return tags;
}
diff --git a/yt/yt/core/rpc/channel_detail.cpp b/yt/yt/core/rpc/channel_detail.cpp
index 89750b2e6f..b6edfbb2ea 100644
--- a/yt/yt/core/rpc/channel_detail.cpp
+++ b/yt/yt/core/rpc/channel_detail.cpp
@@ -205,8 +205,8 @@ auto TClientRequestPerformanceProfiler::GetPerformanceCounters(
auto [counter, _] = LeakySingleton<TCountersMap>()->FindOrInsert(std::pair(service, method), [&] {
auto profiler = RpcClientProfiler
.WithHot()
- .WithTag("yt_service", TString(service))
- .WithTag("method", TString(method), -1);
+ .WithTag("yt_service", service)
+ .WithTag("method", method, -1);
return TPerformanceCounters(profiler);
});
return counter;
diff --git a/yt/yt/core/rpc/config.cpp b/yt/yt/core/rpc/config.cpp
index ded7137a24..50030c0c3b 100644
--- a/yt/yt/core/rpc/config.cpp
+++ b/yt/yt/core/rpc/config.cpp
@@ -148,7 +148,7 @@ void TRetryingChannelConfig::Register(TRegistrar registrar)
////////////////////////////////////////////////////////////////////////////////
-void TBalancingChannelConfigBase::Register(TRegistrar registrar)
+void TViablePeerRegistryConfig::Register(TRegistrar registrar)
{
registrar.Parameter("discover_timeout", &TThis::DiscoverTimeout)
.Default(TDuration::Seconds(15));
@@ -162,12 +162,6 @@ void TBalancingChannelConfigBase::Register(TRegistrar registrar)
.Default(TDuration::Seconds(60));
registrar.Parameter("soft_backoff_time", &TThis::SoftBackoffTime)
.Default(TDuration::Seconds(15));
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-void TViablePeerRegistryConfig::Register(TRegistrar registrar)
-{
registrar.Parameter("max_peer_count", &TThis::MaxPeerCount)
.GreaterThan(1)
.Default(100);
@@ -241,18 +235,24 @@ void TServiceDiscoveryEndpointsConfig::Register(TRegistrar registrar)
////////////////////////////////////////////////////////////////////////////////
-void TBalancingChannelConfig::Register(TRegistrar registrar)
+void TBalancingChannelConfigBase::Register(TRegistrar registrar)
{
- registrar.Parameter("addresses", &TThis::Addresses)
- .Optional();
registrar.Parameter("disable_balancing_on_single_address", &TThis::DisableBalancingOnSingleAddress)
.Default(true);
- registrar.Parameter("endpoints", &TThis::Endpoints)
- .Optional();
registrar.Parameter("hedging_delay", &TThis::HedgingDelay)
.Optional();
registrar.Parameter("cancel_primary_request_on_hedging", &TThis::CancelPrimaryRequestOnHedging)
.Default(false);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+void TBalancingChannelConfig::Register(TRegistrar registrar)
+{
+ registrar.Parameter("addresses", &TThis::Addresses)
+ .Optional();
+ registrar.Parameter("endpoints", &TThis::Endpoints)
+ .Optional();
registrar.Postprocessor([] (TThis* config) {
int endpointConfigCount = 0;
diff --git a/yt/yt/core/rpc/config.h b/yt/yt/core/rpc/config.h
index f77fb077d2..cc4e924ef1 100644
--- a/yt/yt/core/rpc/config.h
+++ b/yt/yt/core/rpc/config.h
@@ -195,7 +195,12 @@ DEFINE_REFCOUNTED_TYPE(TRetryingChannelConfig)
////////////////////////////////////////////////////////////////////////////////
-class TBalancingChannelConfigBase
+DEFINE_ENUM(EPeerPriorityStrategy,
+ (None)
+ (PreferLocal)
+);
+
+class TViablePeerRegistryConfig
: public virtual NYTree::TYsonStruct
{
public:
@@ -227,22 +232,6 @@ public:
//! returns a soft failure (i.e. "down" response) to |Discover| request.
TDuration SoftBackoffTime;
- REGISTER_YSON_STRUCT(TBalancingChannelConfigBase);
-
- static void Register(TRegistrar registrar);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-DEFINE_ENUM(EPeerPriorityStrategy,
- (None)
- (PreferLocal)
-);
-
-class TViablePeerRegistryConfig
- : public TBalancingChannelConfigBase
-{
-public:
//! In case too many peers are known, the registry will only maintain this many peers active.
int MaxPeerCount;
@@ -322,27 +311,40 @@ DEFINE_REFCOUNTED_TYPE(TServiceDiscoveryEndpointsConfig)
////////////////////////////////////////////////////////////////////////////////
-class TBalancingChannelConfig
+class TBalancingChannelConfigBase
: public TDynamicChannelPoolConfig
{
public:
- //! First option: static list of addresses.
- std::optional<std::vector<std::string>> Addresses;
-
//! Disables discovery and balancing when just one address is given.
//! This is vital for jobs since node's redirector is incapable of handling
//! discover requests properly.
bool DisableBalancingOnSingleAddress;
- //! Second option: SD endpoints.
- TServiceDiscoveryEndpointsConfigPtr Endpoints;
-
//! Delay before sending a hedged request. If null then hedging is disabled.
std::optional<TDuration> HedgingDelay;
//! Whether to cancel the primary request when backup one is sent.
bool CancelPrimaryRequestOnHedging;
+ REGISTER_YSON_STRUCT(TBalancingChannelConfigBase);
+
+ static void Register(TRegistrar registrar);
+};
+
+DEFINE_REFCOUNTED_TYPE(TBalancingChannelConfigBase)
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TBalancingChannelConfig
+ : public TBalancingChannelConfigBase
+{
+public:
+ //! First option: static list of addresses.
+ std::optional<std::vector<std::string>> Addresses;
+
+ //! Second option: SD endpoints.
+ TServiceDiscoveryEndpointsConfigPtr Endpoints;
+
REGISTER_YSON_STRUCT(TBalancingChannelConfig);
static void Register(TRegistrar registrar);
diff --git a/yt/yt/core/rpc/public.h b/yt/yt/core/rpc/public.h
index 45f6b44a82..b4f7694947 100644
--- a/yt/yt/core/rpc/public.h
+++ b/yt/yt/core/rpc/public.h
@@ -116,6 +116,7 @@ DECLARE_REFCOUNTED_CLASS(TRetryingChannelConfig)
DECLARE_REFCOUNTED_CLASS(TViablePeerRegistryConfig)
DECLARE_REFCOUNTED_CLASS(TDynamicChannelPoolConfig)
DECLARE_REFCOUNTED_CLASS(TServiceDiscoveryEndpointsConfig)
+DECLARE_REFCOUNTED_CLASS(TBalancingChannelConfigBase)
DECLARE_REFCOUNTED_CLASS(TBalancingChannelConfig)
DECLARE_REFCOUNTED_CLASS(TThrottlingChannelConfig)
DECLARE_REFCOUNTED_CLASS(TThrottlingChannelDynamicConfig)
diff --git a/yt/yt/core/rpc/service_detail.cpp b/yt/yt/core/rpc/service_detail.cpp
index 09cc08b0a0..d23ef9ba4c 100644
--- a/yt/yt/core/rpc/service_detail.cpp
+++ b/yt/yt/core/rpc/service_detail.cpp
@@ -57,7 +57,7 @@ constexpr auto ServiceLivenessCheckPeriod = TDuration::MilliSeconds(100);
TRequestQueuePtr CreateRequestQueue(const std::string& name, const NProfiling::TProfiler& profiler)
{
// TODO(babenko): migrate to std::string
- return New<TRequestQueue>(name, profiler.WithTag("user", TString(name)));
+ return New<TRequestQueue>(name, profiler.WithTag("user", std::string(name)));
}
////////////////////////////////////////////////////////////////////////////////
@@ -1643,7 +1643,7 @@ TServiceBase::TServiceBase(
, MemoryUsageTracker_(std::move(options.MemoryUsageTracker))
, Profiler_(RpcServerProfiler
.WithHot(options.UseHotProfiler)
- .WithTag("yt_service", TString(ServiceId_.ServiceName)))
+ .WithTag("yt_service", ServiceId_.ServiceName))
, AuthenticationTimer_(Profiler_.Timer("/authentication_time"))
, ServiceLivenessChecker_(New<TPeriodicExecutor>(
TDispatcher::Get()->GetLightInvoker(),
@@ -1933,8 +1933,7 @@ void TServiceBase::RegisterRequestQueue(
auto profiler = runtimeInfo->Profiler.WithSparse();
if (runtimeInfo->Descriptor.RequestQueueProvider) {
- // TODO(babenko): switch to std::string
- profiler = profiler.WithTag("queue", TString(requestQueue->GetName()));
+ profiler = profiler.WithTag("queue", requestQueue->GetName());
}
profiler.AddFuncGauge("/request_queue_size", MakeStrong(this), [=] {
return requestQueue->GetQueueSize();
@@ -2327,12 +2326,11 @@ TServiceBase::TMethodPerformanceCountersPtr TServiceBase::CreateMethodPerformanc
auto profiler = runtimeInfo->Profiler.WithSparse();
if (userTag) {
- // TODO(babenko): switch to std::string
- profiler = profiler.WithTag("user", TString(userTag));
+ // TODO(babenko): migrate to std::string
+ profiler = profiler.WithTag("user", std::string(userTag));
}
if (runtimeInfo->Descriptor.RequestQueueProvider) {
- // TODO(babenko): switch to std::string
- profiler = profiler.WithTag("queue", TString(requestQueue->GetName()));
+ profiler = profiler.WithTag("queue", requestQueue->GetName());
}
return New<TMethodPerformanceCounters>(profiler, TimeHistogramConfig_.Acquire());
}
diff --git a/yt/yt/library/tracing/jaeger/sampler.cpp b/yt/yt/library/tracing/jaeger/sampler.cpp
index d33623f532..8e7d1ab1ab 100644
--- a/yt/yt/library/tracing/jaeger/sampler.cpp
+++ b/yt/yt/library/tracing/jaeger/sampler.cpp
@@ -58,8 +58,7 @@ void TSampler::SampleTraceContext(const std::string& user, const TTraceContextPt
auto [userState, inserted] = Users_.FindOrInsert(user, [&] {
auto state = New<TUserState>();
- // TODO(babenko): switch to std::string
- auto profiler = Profiler_.WithSparse().WithTag("user", TString(user));
+ auto profiler = Profiler_.WithSparse().WithTag("user", user);
state->TracesSampledByUser = profiler.Counter("/traces_sampled_by_user");
state->TracesSampledByProbability = profiler.Counter("/traces_sampled_by_probability");
diff --git a/yt/yt/library/ytprof/allocation_tag_profiler/allocation_tag_profiler.cpp b/yt/yt/library/ytprof/allocation_tag_profiler/allocation_tag_profiler.cpp
index f79052d105..a2cccc2241 100644
--- a/yt/yt/library/ytprof/allocation_tag_profiler/allocation_tag_profiler.cpp
+++ b/yt/yt/library/ytprof/allocation_tag_profiler/allocation_tag_profiler.cpp
@@ -69,8 +69,7 @@ private:
auto it = guages.find(tagValue);
if (it == guages.end()) {
it = guages.emplace(tagValue, Profiler_
- // TODO(babenko): migrate to std::string
- .WithTag(TString(tagKey), TString(tagValue))
+ .WithTag(tagKey, tagValue)
.Gauge(Format("/%v", NYPath::ToYPathLiteral(tagKey))))
.first;
it->second.Update(usage);