aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAlexander Smirnov <alex@ydb.tech>2025-05-31 13:51:12 +0000
committerAlexander Smirnov <alex@ydb.tech>2025-05-31 13:51:12 +0000
commit089c8af726ff1d67faeb603dc66b8a11e69c32c3 (patch)
tree77ffdf8bca0a143448734c92b4e118413c7aa5dd
parent2b00537e14e6dc51140ea1abed802b2949530d4c (diff)
parente571997e526ada1be8d4f72d2a9303b9947d5881 (diff)
downloadydb-089c8af726ff1d67faeb603dc66b8a11e69c32c3.tar.gz
Merge branch 'rightlib' into merge-libs-250531-1349
-rw-r--r--contrib/python/Flask-Cors/py3/.dist-info/METADATA4
-rw-r--r--contrib/python/Flask-Cors/py3/flask_cors/core.py63
-rw-r--r--contrib/python/Flask-Cors/py3/flask_cors/extension.py8
-rw-r--r--contrib/python/Flask-Cors/py3/flask_cors/version.py2
-rw-r--r--contrib/python/Flask-Cors/py3/ya.make2
-rw-r--r--contrib/python/pluggy/py3/.dist-info/METADATA27
-rw-r--r--contrib/python/pluggy/py3/pluggy/__init__.py9
-rw-r--r--contrib/python/pluggy/py3/pluggy/_callers.py185
-rw-r--r--contrib/python/pluggy/py3/pluggy/_hooks.py43
-rw-r--r--contrib/python/pluggy/py3/pluggy/_manager.py71
-rw-r--r--contrib/python/pluggy/py3/pluggy/_result.py15
-rw-r--r--contrib/python/pluggy/py3/pluggy/_tracing.py5
-rw-r--r--contrib/python/pluggy/py3/pluggy/_version.py13
-rw-r--r--contrib/python/pluggy/py3/ya.make2
-rw-r--r--contrib/python/types-protobuf/.dist-info/METADATA4
-rw-r--r--contrib/python/types-protobuf/README.md2
-rw-r--r--contrib/python/types-protobuf/google-stubs/protobuf/descriptor.pyi21
-rw-r--r--contrib/python/types-protobuf/google-stubs/protobuf/descriptor_pool.pyi6
-rw-r--r--contrib/python/types-protobuf/google-stubs/protobuf/internal/well_known_types.pyi2
-rw-r--r--contrib/python/types-protobuf/ya.make2
-rw-r--r--library/cpp/threading/chunk_queue/queue.h4
-rw-r--r--util/datetime/uptime.cpp11
-rw-r--r--yt/cpp/mapreduce/common/retry_lib.cpp1
-rw-r--r--yt/cpp/mapreduce/interface/error_codes.h13
-rw-r--r--yt/python/yt/common.py4
-rw-r--r--yt/yt/client/api/rpc_proxy/client_impl.cpp25
-rw-r--r--yt/yt/client/api/rpc_proxy/client_impl.h4
-rw-r--r--yt/yt/client/api/rpc_proxy/helpers.cpp14
-rw-r--r--yt/yt/client/api/rpc_proxy/helpers.h5
-rw-r--r--yt/yt/client/ypath/parser_detail.cpp21
-rw-r--r--yt/yt/library/formats/protobuf_writer.cpp4
31 files changed, 271 insertions, 321 deletions
diff --git a/contrib/python/Flask-Cors/py3/.dist-info/METADATA b/contrib/python/Flask-Cors/py3/.dist-info/METADATA
index 39f042019ad..4f796aaabed 100644
--- a/contrib/python/Flask-Cors/py3/.dist-info/METADATA
+++ b/contrib/python/Flask-Cors/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.2
+Metadata-Version: 2.4
Name: flask-cors
-Version: 5.0.1
+Version: 6.0.0
Summary: A Flask extension simplifying CORS support
Author-email: Cory Dolphin <corydolphin@gmail.com>
Project-URL: Homepage, https://corydolphin.github.io/flask-cors/
diff --git a/contrib/python/Flask-Cors/py3/flask_cors/core.py b/contrib/python/Flask-Cors/py3/flask_cors/core.py
index 0ad0d1da629..5773b0beb8b 100644
--- a/contrib/python/Flask-Cors/py3/flask_cors/core.py
+++ b/contrib/python/Flask-Cors/py3/flask_cors/core.py
@@ -69,14 +69,17 @@ def parse_resources(resources):
# resource of '*', which is not actually a valid regexp.
resources = [(re_fix(k), v) for k, v in resources.items()]
- # Sort by regex length to provide consistency of matching and
- # to provide a proxy for specificity of match. E.G. longer
- # regular expressions are tried first.
- def pattern_length(pair):
- maybe_regex, _ = pair
- return len(get_regexp_pattern(maybe_regex))
+ # Sort patterns with static (literal) paths first, then by regex specificity
+ def sort_key(pair):
+ pattern, _ = pair
+ if isinstance(pattern, RegexObject):
+ return (1, 0, pattern.pattern.count("/"), -len(pattern.pattern))
+ elif probably_regex(pattern):
+ return (1, 1, pattern.count("/"), -len(pattern))
+ else:
+ return (0, 0, pattern.count("/"), -len(pattern))
- return sorted(resources, key=pattern_length, reverse=True)
+ return sorted(resources, key=sort_key)
elif isinstance(resources, str):
return [(re_fix(resources), {})]
@@ -121,9 +124,10 @@ def get_cors_origins(options, request_origin):
if wildcard and options.get("send_wildcard"):
LOG.debug("Allowed origins are set to '*'. Sending wildcard CORS header.")
return ["*"]
- # If the value of the Origin header is a case-sensitive match
- # for any of the values in list of origins
- elif try_match_any(request_origin, origins):
+ # If the value of the Origin header is a case-insensitive match
+ # for any of the values in list of origins.
+ # NOTE: Per RFC 1035 and RFC 4343 schemes and hostnames are case insensitive.
+ elif try_match_any_pattern(request_origin, origins, caseSensitive=False):
LOG.debug(
"The request's Origin header matches. Sending CORS headers.",
)
@@ -164,7 +168,7 @@ def get_allow_headers(options, acl_request_headers):
request_headers = [h.strip() for h in acl_request_headers.split(",")]
# any header that matches in the allow_headers
- matching_headers = filter(lambda h: try_match_any(h, options.get("allow_headers")), request_headers)
+ matching_headers = filter(lambda h: try_match_any_pattern(h, options.get("allow_headers"), caseSensitive=False), request_headers)
return ", ".join(sorted(matching_headers))
@@ -277,22 +281,31 @@ def re_fix(reg):
return r".*" if reg == r"*" else reg
-def try_match_any(inst, patterns):
- return any(try_match(inst, pattern) for pattern in patterns)
-
+def try_match_any_pattern(inst, patterns, caseSensitive=True):
+ return any(try_match_pattern(inst, pattern, caseSensitive) for pattern in patterns)
-def try_match(request_origin, maybe_regex):
- """Safely attempts to match a pattern or string to a request origin."""
- if isinstance(maybe_regex, RegexObject):
- return re.match(maybe_regex, request_origin)
- elif probably_regex(maybe_regex):
- return re.match(maybe_regex, request_origin, flags=re.IGNORECASE)
- else:
+def try_match_pattern(value, pattern, caseSensitive=True):
+ """
+ Safely attempts to match a pattern or string to a value. This
+ function can be used to match request origins, headers, or paths.
+ The value of caseSensitive should be set in accordance to the
+ data being compared e.g. origins and headers are case insensitive
+ whereas paths are case-sensitive
+ """
+ if isinstance(pattern, RegexObject):
+ return re.match(pattern, value)
+ if probably_regex(pattern):
+ flags = 0 if caseSensitive else re.IGNORECASE
try:
- return request_origin.lower() == maybe_regex.lower()
- except AttributeError:
- return request_origin == maybe_regex
-
+ return re.match(pattern, value, flags=flags)
+ except re.error:
+ return False
+ try:
+ v = str(value)
+ p = str(pattern)
+ return v == p if caseSensitive else v.casefold() == p.casefold()
+ except Exception:
+ return value == pattern
def get_cors_options(appInstance, *dicts):
"""
diff --git a/contrib/python/Flask-Cors/py3/flask_cors/extension.py b/contrib/python/Flask-Cors/py3/flask_cors/extension.py
index 87e55b7bddd..434f65eaa20 100644
--- a/contrib/python/Flask-Cors/py3/flask_cors/extension.py
+++ b/contrib/python/Flask-Cors/py3/flask_cors/extension.py
@@ -1,9 +1,9 @@
import logging
-from urllib.parse import unquote_plus
+from urllib.parse import unquote
from flask import request
-from .core import ACL_ORIGIN, get_cors_options, get_regexp_pattern, parse_resources, set_cors_headers, try_match
+from .core import ACL_ORIGIN, get_cors_options, get_regexp_pattern, parse_resources, set_cors_headers, try_match_pattern
LOG = logging.getLogger(__name__)
@@ -188,9 +188,9 @@ def make_after_request_function(resources):
if resp.headers is not None and resp.headers.get(ACL_ORIGIN):
LOG.debug("CORS have been already evaluated, skipping")
return resp
- normalized_path = unquote_plus(request.path)
+ normalized_path = unquote(request.path)
for res_regex, res_options in resources:
- if try_match(normalized_path, res_regex):
+ if try_match_pattern(normalized_path, res_regex, caseSensitive=True):
LOG.debug(
"Request to '%r' matches CORS resource '%s'. Using options: %s",
request.path,
diff --git a/contrib/python/Flask-Cors/py3/flask_cors/version.py b/contrib/python/Flask-Cors/py3/flask_cors/version.py
index 2fe5fde13bb..0f607a5d2d6 100644
--- a/contrib/python/Flask-Cors/py3/flask_cors/version.py
+++ b/contrib/python/Flask-Cors/py3/flask_cors/version.py
@@ -1 +1 @@
-__version__ = "5.0.1"
+__version__ = "6.0.0"
diff --git a/contrib/python/Flask-Cors/py3/ya.make b/contrib/python/Flask-Cors/py3/ya.make
index 56b564e1d27..631966d8c5c 100644
--- a/contrib/python/Flask-Cors/py3/ya.make
+++ b/contrib/python/Flask-Cors/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(5.0.1)
+VERSION(6.0.0)
LICENSE(MIT)
diff --git a/contrib/python/pluggy/py3/.dist-info/METADATA b/contrib/python/pluggy/py3/.dist-info/METADATA
index 2d697b0d721..12345f88ebe 100644
--- a/contrib/python/pluggy/py3/.dist-info/METADATA
+++ b/contrib/python/pluggy/py3/.dist-info/METADATA
@@ -1,15 +1,9 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
Name: pluggy
-Version: 1.5.0
+Version: 1.6.0
Summary: plugin and hook calling mechanisms for python
-Home-page: https://github.com/pytest-dev/pluggy
-Author: Holger Krekel
-Author-email: holger@merlinux.eu
+Author-email: Holger Krekel <holger@merlinux.eu>
License: MIT
-Platform: unix
-Platform: linux
-Platform: osx
-Platform: win32
Classifier: Development Status :: 6 - Mature
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
@@ -23,19 +17,22 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
-Requires-Python: >=3.8
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
Provides-Extra: dev
-Requires-Dist: pre-commit ; extra == 'dev'
-Requires-Dist: tox ; extra == 'dev'
+Requires-Dist: pre-commit; extra == "dev"
+Requires-Dist: tox; extra == "dev"
Provides-Extra: testing
-Requires-Dist: pytest ; extra == 'testing'
-Requires-Dist: pytest-benchmark ; extra == 'testing'
+Requires-Dist: pytest; extra == "testing"
+Requires-Dist: pytest-benchmark; extra == "testing"
+Requires-Dist: coverage; extra == "testing"
+Dynamic: license-file
====================================================
pluggy - A minimalist production ready plugin system
diff --git a/contrib/python/pluggy/py3/pluggy/__init__.py b/contrib/python/pluggy/py3/pluggy/__init__.py
index 36ce1680621..8a651f499d9 100644
--- a/contrib/python/pluggy/py3/pluggy/__init__.py
+++ b/contrib/python/pluggy/py3/pluggy/__init__.py
@@ -1,10 +1,3 @@
-try:
- from ._version import version as __version__
-except ImportError:
- # broken installation, we don't even try
- # unknown only works because we do poor mans version compare
- __version__ = "unknown"
-
__all__ = [
"__version__",
"PluginManager",
@@ -21,7 +14,6 @@ __all__ = [
"PluggyWarning",
"PluggyTeardownRaisedWarning",
]
-
from ._hooks import HookCaller
from ._hooks import HookImpl
from ._hooks import HookimplMarker
@@ -33,5 +25,6 @@ from ._manager import PluginManager
from ._manager import PluginValidationError
from ._result import HookCallError
from ._result import Result
+from ._version import version as __version__
from ._warnings import PluggyTeardownRaisedWarning
from ._warnings import PluggyWarning
diff --git a/contrib/python/pluggy/py3/pluggy/_callers.py b/contrib/python/pluggy/py3/pluggy/_callers.py
index d01f925cca2..472d5dd05be 100644
--- a/contrib/python/pluggy/py3/pluggy/_callers.py
+++ b/contrib/python/pluggy/py3/pluggy/_callers.py
@@ -4,13 +4,11 @@ Call loop machinery
from __future__ import annotations
+from collections.abc import Generator
+from collections.abc import Mapping
+from collections.abc import Sequence
from typing import cast
-from typing import Generator
-from typing import Mapping
from typing import NoReturn
-from typing import Sequence
-from typing import Tuple
-from typing import Union
import warnings
from ._hooks import HookImpl
@@ -21,22 +19,47 @@ from ._warnings import PluggyTeardownRaisedWarning
# Need to distinguish between old- and new-style hook wrappers.
# Wrapping with a tuple is the fastest type-safe way I found to do it.
-Teardown = Union[
- Tuple[Generator[None, Result[object], None], HookImpl],
- Generator[None, object, object],
-]
+Teardown = Generator[None, object, object]
+
+
+def run_old_style_hookwrapper(
+ hook_impl: HookImpl, hook_name: str, args: Sequence[object]
+) -> Teardown:
+ """
+ backward compatibility wrapper to run a old style hookwrapper as a wrapper
+ """
+
+ teardown: Teardown = cast(Teardown, hook_impl.function(*args))
+ try:
+ next(teardown)
+ except StopIteration:
+ _raise_wrapfail(teardown, "did not yield")
+ try:
+ res = yield
+ result = Result(res, None)
+ except BaseException as exc:
+ result = Result(None, exc)
+ try:
+ teardown.send(result)
+ except StopIteration:
+ pass
+ except BaseException as e:
+ _warn_teardown_exception(hook_name, hook_impl, e)
+ raise
+ else:
+ _raise_wrapfail(teardown, "has second yield")
+ finally:
+ teardown.close()
+ return result.get_result()
def _raise_wrapfail(
- wrap_controller: (
- Generator[None, Result[object], None] | Generator[None, object, object]
- ),
+ wrap_controller: Generator[None, object, object],
msg: str,
) -> NoReturn:
- co = wrap_controller.gi_code
+ co = wrap_controller.gi_code # type: ignore[attr-defined]
raise RuntimeError(
- "wrap_controller at %r %s:%d %s"
- % (co.co_name, co.co_filename, co.co_firstlineno, msg)
+ f"wrap_controller at {co.co_name!r} {co.co_filename}:{co.co_firstlineno} {msg}"
)
@@ -47,7 +70,7 @@ def _warn_teardown_exception(
msg += f"Plugin: {hook_impl.plugin_name}, Hook: {hook_name}\n"
msg += f"{type(e).__name__}: {e}\n"
msg += "For more information see https://pluggy.readthedocs.io/en/stable/api_reference.html#pluggy.PluggyTeardownRaisedWarning" # noqa: E501
- warnings.warn(PluggyTeardownRaisedWarning(msg), stacklevel=5)
+ warnings.warn(PluggyTeardownRaisedWarning(msg), stacklevel=6)
def _multicall(
@@ -64,31 +87,26 @@ def _multicall(
__tracebackhide__ = True
results: list[object] = []
exception = None
- only_new_style_wrappers = True
try: # run impl and wrapper setup functions in a loop
teardowns: list[Teardown] = []
try:
for hook_impl in reversed(hook_impls):
try:
args = [caller_kwargs[argname] for argname in hook_impl.argnames]
- except KeyError:
- for argname in hook_impl.argnames:
+ except KeyError as e:
+ # coverage bug - this is tested
+ for argname in hook_impl.argnames: # pragma: no cover
if argname not in caller_kwargs:
raise HookCallError(
f"hook call must provide argument {argname!r}"
- )
+ ) from e
if hook_impl.hookwrapper:
- only_new_style_wrappers = False
- try:
- # If this cast is not valid, a type error is raised below,
- # which is the desired response.
- res = hook_impl.function(*args)
- wrapper_gen = cast(Generator[None, Result[object], None], res)
- next(wrapper_gen) # first yield
- teardowns.append((wrapper_gen, hook_impl))
- except StopIteration:
- _raise_wrapfail(wrapper_gen, "did not yield")
+ function_gen = run_old_style_hookwrapper(hook_impl, hook_name, args)
+
+ next(function_gen) # first yield
+ teardowns.append(function_gen)
+
elif hook_impl.wrapper:
try:
# If this cast is not valid, a type error is raised below,
@@ -108,75 +126,44 @@ def _multicall(
except BaseException as exc:
exception = exc
finally:
- # Fast path - only new-style wrappers, no Result.
- if only_new_style_wrappers:
- if firstresult: # first result hooks return a single value
- result = results[0] if results else None
- else:
- result = results
-
- # run all wrapper post-yield blocks
- for teardown in reversed(teardowns):
- try:
- if exception is not None:
- teardown.throw(exception) # type: ignore[union-attr]
- else:
- teardown.send(result) # type: ignore[union-attr]
- # Following is unreachable for a well behaved hook wrapper.
- # Try to force finalizers otherwise postponed till GC action.
- # Note: close() may raise if generator handles GeneratorExit.
- teardown.close() # type: ignore[union-attr]
- except StopIteration as si:
- result = si.value
- exception = None
- continue
- except BaseException as e:
- exception = e
- continue
- _raise_wrapfail(teardown, "has second yield") # type: ignore[arg-type]
-
- if exception is not None:
- raise exception.with_traceback(exception.__traceback__)
- else:
- return result
-
- # Slow path - need to support old-style wrappers.
+ if firstresult: # first result hooks return a single value
+ result = results[0] if results else None
else:
- if firstresult: # first result hooks return a single value
- outcome: Result[object | list[object]] = Result(
- results[0] if results else None, exception
- )
- else:
- outcome = Result(results, exception)
-
- # run all wrapper post-yield blocks
- for teardown in reversed(teardowns):
- if isinstance(teardown, tuple):
- try:
- teardown[0].send(outcome)
- except StopIteration:
- pass
- except BaseException as e:
- _warn_teardown_exception(hook_name, teardown[1], e)
- raise
- else:
- _raise_wrapfail(teardown[0], "has second yield")
- else:
+ result = results
+
+ # run all wrapper post-yield blocks
+ for teardown in reversed(teardowns):
+ try:
+ if exception is not None:
try:
- if outcome._exception is not None:
- teardown.throw(outcome._exception)
+ teardown.throw(exception)
+ except RuntimeError as re:
+ # StopIteration from generator causes RuntimeError
+ # even for coroutine usage - see #544
+ if (
+ isinstance(exception, StopIteration)
+ and re.__cause__ is exception
+ ):
+ teardown.close()
+ continue
else:
- teardown.send(outcome._result)
- # Following is unreachable for a well behaved hook wrapper.
- # Try to force finalizers otherwise postponed till GC action.
- # Note: close() may raise if generator handles GeneratorExit.
- teardown.close()
- except StopIteration as si:
- outcome.force_result(si.value)
- continue
- except BaseException as e:
- outcome.force_exception(e)
- continue
- _raise_wrapfail(teardown, "has second yield")
-
- return outcome.get_result()
+ raise
+ else:
+ teardown.send(result)
+ # Following is unreachable for a well behaved hook wrapper.
+ # Try to force finalizers otherwise postponed till GC action.
+ # Note: close() may raise if generator handles GeneratorExit.
+ teardown.close()
+ except StopIteration as si:
+ result = si.value
+ exception = None
+ continue
+ except BaseException as e:
+ exception = e
+ continue
+ _raise_wrapfail(teardown, "has second yield")
+
+ if exception is not None:
+ raise exception
+ else:
+ return result
diff --git a/contrib/python/pluggy/py3/pluggy/_hooks.py b/contrib/python/pluggy/py3/pluggy/_hooks.py
index 362d791823e..97fef0d75fc 100644
--- a/contrib/python/pluggy/py3/pluggy/_hooks.py
+++ b/contrib/python/pluggy/py3/pluggy/_hooks.py
@@ -4,21 +4,19 @@ Internal hook annotation, representation and calling machinery.
from __future__ import annotations
+from collections.abc import Generator
+from collections.abc import Mapping
+from collections.abc import Sequence
+from collections.abc import Set
import inspect
import sys
from types import ModuleType
-from typing import AbstractSet
from typing import Any
from typing import Callable
from typing import Final
from typing import final
-from typing import Generator
-from typing import List
-from typing import Mapping
from typing import Optional
from typing import overload
-from typing import Sequence
-from typing import Tuple
from typing import TYPE_CHECKING
from typing import TypedDict
from typing import TypeVar
@@ -34,7 +32,7 @@ _Namespace = Union[ModuleType, type]
_Plugin = object
_HookExec = Callable[
[str, Sequence["HookImpl"], Mapping[str, object], bool],
- Union[object, List[object]],
+ Union[object, list[object]],
]
_HookImplFunction = Callable[..., Union[_T, Generator[None, Result[_T], None]]]
@@ -302,12 +300,12 @@ def varnames(func: object) -> tuple[tuple[str, ...], tuple[str, ...]]:
if inspect.isclass(func):
try:
func = func.__init__
- except AttributeError:
+ except AttributeError: # pragma: no cover - pypy special case
return (), ()
elif not inspect.isroutine(func): # callable object?
try:
func = getattr(func, "__call__", func)
- except Exception:
+ except Exception: # pragma: no cover - pypy special case
return (), ()
try:
@@ -315,7 +313,7 @@ def varnames(func: object) -> tuple[tuple[str, ...], tuple[str, ...]]:
sig = inspect.signature(
func.__func__ if inspect.ismethod(func) else func # type:ignore[arg-type]
)
- except TypeError:
+ except TypeError: # pragma: no cover
return (), ()
_valid_param_kinds = (
@@ -347,7 +345,7 @@ def varnames(func: object) -> tuple[tuple[str, ...], tuple[str, ...]]:
# pypy3 uses "obj" instead of "self" for default dunder methods
if not _PYPY:
implicit_names: tuple[str, ...] = ("self",)
- else:
+ else: # pragma: no cover
implicit_names = ("self", "obj")
if args:
qualname: str = getattr(func, "__qualname__", "")
@@ -376,7 +374,7 @@ class HookRelay:
_HookRelay = HookRelay
-_CallHistory = List[Tuple[Mapping[str, object], Optional[Callable[[Any], None]]]]
+_CallHistory = list[tuple[Mapping[str, object], Optional[Callable[[Any], None]]]]
class HookCaller:
@@ -485,12 +483,13 @@ class HookCaller:
notincall = ", ".join(
repr(argname)
for argname in self.spec.argnames
- # Avoid self.spec.argnames - kwargs.keys() - doesn't preserve order.
+ # Avoid self.spec.argnames - kwargs.keys()
+ # it doesn't preserve order.
if argname not in kwargs.keys()
)
warnings.warn(
- "Argument(s) {} which are declared in the hookspec "
- "cannot be found in this hook call".format(notincall),
+ f"Argument(s) {notincall} which are declared in the hookspec "
+ "cannot be found in this hook call",
stacklevel=2,
)
break
@@ -504,9 +503,9 @@ class HookCaller:
Returns the result(s) of calling all registered plugins, see
:ref:`calling`.
"""
- assert (
- not self.is_historic()
- ), "Cannot directly call a historic hook - use call_historic instead."
+ assert not self.is_historic(), (
+ "Cannot directly call a historic hook - use call_historic instead."
+ )
self._verify_all_args_are_provided(kwargs)
firstresult = self.spec.opts.get("firstresult", False) if self.spec else False
# Copy because plugins may register other plugins during iteration (#438).
@@ -545,9 +544,9 @@ class HookCaller:
"""Call the hook with some additional temporarily participating
methods using the specified ``kwargs`` as call parameters, see
:ref:`call_extra`."""
- assert (
- not self.is_historic()
- ), "Cannot directly call a historic hook - use call_historic instead."
+ assert not self.is_historic(), (
+ "Cannot directly call a historic hook - use call_historic instead."
+ )
self._verify_all_args_are_provided(kwargs)
opts: HookimplOpts = {
"wrapper": False,
@@ -608,7 +607,7 @@ class _SubsetHookCaller(HookCaller):
"_remove_plugins",
)
- def __init__(self, orig: HookCaller, remove_plugins: AbstractSet[_Plugin]) -> None:
+ def __init__(self, orig: HookCaller, remove_plugins: Set[_Plugin]) -> None:
self._orig = orig
self._remove_plugins = remove_plugins
self.name = orig.name # type: ignore[misc]
diff --git a/contrib/python/pluggy/py3/pluggy/_manager.py b/contrib/python/pluggy/py3/pluggy/_manager.py
index 9998dd815b5..ff1e3ce6e30 100644
--- a/contrib/python/pluggy/py3/pluggy/_manager.py
+++ b/contrib/python/pluggy/py3/pluggy/_manager.py
@@ -1,14 +1,14 @@
from __future__ import annotations
+from collections.abc import Iterable
+from collections.abc import Mapping
+from collections.abc import Sequence
import inspect
import types
from typing import Any
from typing import Callable
from typing import cast
from typing import Final
-from typing import Iterable
-from typing import Mapping
-from typing import Sequence
from typing import TYPE_CHECKING
import warnings
@@ -70,7 +70,7 @@ class DistFacade:
name: str = self.metadata["name"]
return name
- def __getattr__(self, attr: str, default=None):
+ def __getattr__(self, attr: str, default: Any | None = None) -> Any:
return getattr(self._dist, attr, default)
def __dir__(self) -> list[str]:
@@ -138,14 +138,14 @@ class PluginManager:
if self._name2plugin.get(plugin_name, -1) is None:
return None # blocked plugin, return None to indicate no registration
raise ValueError(
- "Plugin name already registered: %s=%s\n%s"
- % (plugin_name, plugin, self._name2plugin)
+ "Plugin name already registered: "
+ f"{plugin_name}={plugin}\n{self._name2plugin}"
)
if plugin in self._name2plugin.values():
raise ValueError(
- "Plugin already registered under a different name: %s=%s\n%s"
- % (plugin_name, plugin, self._name2plugin)
+ "Plugin already registered under a different name: "
+ f"{plugin_name}={plugin}\n{self._name2plugin}"
)
# XXX if an error happens we should make sure no state has been
@@ -188,11 +188,11 @@ class PluginManager:
res: HookimplOpts | None = getattr(
method, self.project_name + "_impl", None
)
- except Exception:
- res = {} # type: ignore[assignment]
+ except Exception: # pragma: no cover
+ res = {} # type: ignore[assignment] #pragma: no cover
if res is not None and not isinstance(res, dict):
# false positive
- res = None # type:ignore[unreachable]
+ res = None # type:ignore[unreachable] #pragma: no cover
return res
def unregister(
@@ -329,8 +329,8 @@ class PluginManager:
if hook.is_historic() and (hookimpl.hookwrapper or hookimpl.wrapper):
raise PluginValidationError(
hookimpl.plugin,
- "Plugin %r\nhook %r\nhistoric incompatible with yield/wrapper/hookwrapper"
- % (hookimpl.plugin_name, hook.name),
+ f"Plugin {hookimpl.plugin_name!r}\nhook {hook.name!r}\n"
+ "historic incompatible with yield/wrapper/hookwrapper",
)
assert hook.spec is not None
@@ -342,15 +342,10 @@ class PluginManager:
if notinspec:
raise PluginValidationError(
hookimpl.plugin,
- "Plugin %r for hook %r\nhookimpl definition: %s\n"
- "Argument(s) %s are declared in the hookimpl but "
- "can not be found in the hookspec"
- % (
- hookimpl.plugin_name,
- hook.name,
- _formatdef(hookimpl.function),
- notinspec,
- ),
+ f"Plugin {hookimpl.plugin_name!r} for hook {hook.name!r}\n"
+ f"hookimpl definition: {_formatdef(hookimpl.function)}\n"
+ f"Argument(s) {notinspec} are declared in the hookimpl but "
+ "can not be found in the hookspec",
)
if hook.spec.warn_on_impl_args:
@@ -364,18 +359,18 @@ class PluginManager:
) and not inspect.isgeneratorfunction(hookimpl.function):
raise PluginValidationError(
hookimpl.plugin,
- "Plugin %r for hook %r\nhookimpl definition: %s\n"
+ f"Plugin {hookimpl.plugin_name!r} for hook {hook.name!r}\n"
+ f"hookimpl definition: {_formatdef(hookimpl.function)}\n"
"Declared as wrapper=True or hookwrapper=True "
- "but function is not a generator function"
- % (hookimpl.plugin_name, hook.name, _formatdef(hookimpl.function)),
+ "but function is not a generator function",
)
if hookimpl.wrapper and hookimpl.hookwrapper:
raise PluginValidationError(
hookimpl.plugin,
- "Plugin %r for hook %r\nhookimpl definition: %s\n"
- "The wrapper=True and hookwrapper=True options are mutually exclusive"
- % (hookimpl.plugin_name, hook.name, _formatdef(hookimpl.function)),
+ f"Plugin {hookimpl.plugin_name!r} for hook {hook.name!r}\n"
+ f"hookimpl definition: {_formatdef(hookimpl.function)}\n"
+ "The wrapper=True and hookwrapper=True options are mutually exclusive",
)
def check_pending(self) -> None:
@@ -383,16 +378,16 @@ class PluginManager:
hook specification are optional, otherwise raise
:exc:`PluginValidationError`."""
for name in self.hook.__dict__:
- if name[0] != "_":
- hook: HookCaller = getattr(self.hook, name)
- if not hook.has_spec():
- for hookimpl in hook.get_hookimpls():
- if not hookimpl.optionalhook:
- raise PluginValidationError(
- hookimpl.plugin,
- "unknown hook %r in plugin %r"
- % (name, hookimpl.plugin),
- )
+ if name[0] == "_":
+ continue
+ hook: HookCaller = getattr(self.hook, name)
+ if not hook.has_spec():
+ for hookimpl in hook.get_hookimpls():
+ if not hookimpl.optionalhook:
+ raise PluginValidationError(
+ hookimpl.plugin,
+ f"unknown hook {name!r} in plugin {hookimpl.plugin!r}",
+ )
def load_setuptools_entrypoints(self, group: str, name: str | None = None) -> int:
"""Load modules from querying the specified setuptools ``group``.
diff --git a/contrib/python/pluggy/py3/pluggy/_result.py b/contrib/python/pluggy/py3/pluggy/_result.py
index f9a081c4f68..656a58416ca 100644
--- a/contrib/python/pluggy/py3/pluggy/_result.py
+++ b/contrib/python/pluggy/py3/pluggy/_result.py
@@ -10,12 +10,10 @@ from typing import cast
from typing import final
from typing import Generic
from typing import Optional
-from typing import Tuple
-from typing import Type
from typing import TypeVar
-_ExcInfo = Tuple[Type[BaseException], BaseException, Optional[TracebackType]]
+_ExcInfo = tuple[type[BaseException], BaseException, Optional[TracebackType]]
ResultType = TypeVar("ResultType")
@@ -28,7 +26,7 @@ class Result(Generic[ResultType]):
"""An object used to inspect and set the result in a :ref:`hook wrapper
<hookwrappers>`."""
- __slots__ = ("_result", "_exception")
+ __slots__ = ("_result", "_exception", "_traceback")
def __init__(
self,
@@ -38,6 +36,8 @@ class Result(Generic[ResultType]):
""":meta private:"""
self._result = result
self._exception = exception
+ # Exception __traceback__ is mutable, this keeps the original.
+ self._traceback = exception.__traceback__ if exception is not None else None
@property
def excinfo(self) -> _ExcInfo | None:
@@ -46,7 +46,7 @@ class Result(Generic[ResultType]):
if exc is None:
return None
else:
- return (type(exc), exc, exc.__traceback__)
+ return (type(exc), exc, self._traceback)
@property
def exception(self) -> BaseException | None:
@@ -75,6 +75,7 @@ class Result(Generic[ResultType]):
"""
self._result = result
self._exception = None
+ self._traceback = None
def force_exception(self, exception: BaseException) -> None:
"""Force the result to fail with ``exception``.
@@ -85,6 +86,7 @@ class Result(Generic[ResultType]):
"""
self._result = None
self._exception = exception
+ self._traceback = exception.__traceback__ if exception is not None else None
def get_result(self) -> ResultType:
"""Get the result(s) for this hook call.
@@ -94,10 +96,11 @@ class Result(Generic[ResultType]):
"""
__tracebackhide__ = True
exc = self._exception
+ tb = self._traceback
if exc is None:
return cast(ResultType, self._result)
else:
- raise exc.with_traceback(exc.__traceback__)
+ raise exc.with_traceback(tb)
# Historical name (pluggy<=1.2), kept for backward compatibility.
diff --git a/contrib/python/pluggy/py3/pluggy/_tracing.py b/contrib/python/pluggy/py3/pluggy/_tracing.py
index cd238ad7e54..f0b36db1524 100644
--- a/contrib/python/pluggy/py3/pluggy/_tracing.py
+++ b/contrib/python/pluggy/py3/pluggy/_tracing.py
@@ -4,14 +4,13 @@ Tracing utils
from __future__ import annotations
+from collections.abc import Sequence
from typing import Any
from typing import Callable
-from typing import Sequence
-from typing import Tuple
_Writer = Callable[[str], object]
-_Processor = Callable[[Tuple[str, ...], Tuple[Any, ...]], object]
+_Processor = Callable[[tuple[str, ...], tuple[Any, ...]], object]
class TagTracer:
diff --git a/contrib/python/pluggy/py3/pluggy/_version.py b/contrib/python/pluggy/py3/pluggy/_version.py
index c565007eec7..6b8420c0cf0 100644
--- a/contrib/python/pluggy/py3/pluggy/_version.py
+++ b/contrib/python/pluggy/py3/pluggy/_version.py
@@ -1,8 +1,13 @@
-# file generated by setuptools_scm
+# file generated by setuptools-scm
# don't change, don't track in version control
+
+__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
+
TYPE_CHECKING = False
if TYPE_CHECKING:
- from typing import Tuple, Union
+ from typing import Tuple
+ from typing import Union
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
else:
VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
__version_tuple__: VERSION_TUPLE
version_tuple: VERSION_TUPLE
-__version__ = version = '1.5.0'
-__version_tuple__ = version_tuple = (1, 5, 0)
+__version__ = version = '1.6.0'
+__version_tuple__ = version_tuple = (1, 6, 0)
diff --git a/contrib/python/pluggy/py3/ya.make b/contrib/python/pluggy/py3/ya.make
index ac57e7a2b75..a2eb19c45d1 100644
--- a/contrib/python/pluggy/py3/ya.make
+++ b/contrib/python/pluggy/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(1.5.0)
+VERSION(1.6.0)
LICENSE(MIT)
diff --git a/contrib/python/types-protobuf/.dist-info/METADATA b/contrib/python/types-protobuf/.dist-info/METADATA
index 0fd53bbd2c4..0bf803fbc14 100644
--- a/contrib/python/types-protobuf/.dist-info/METADATA
+++ b/contrib/python/types-protobuf/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.4
Name: types-protobuf
-Version: 6.30.2.20250506
+Version: 6.30.2.20250516
Summary: Typing stubs for protobuf
License-Expression: Apache-2.0
Project-URL: Homepage, https://github.com/python/typeshed
@@ -46,4 +46,4 @@ mypy 1.15.0,
pyright 1.1.400,
and pytype 2024.10.11.
It was generated from typeshed commit
-[`4265ee7c72f476e7156949e55784fd82b40e6953`](https://github.com/python/typeshed/commit/4265ee7c72f476e7156949e55784fd82b40e6953).
+[`126768408a69b7a3a09b7d3992970b289f92937e`](https://github.com/python/typeshed/commit/126768408a69b7a3a09b7d3992970b289f92937e).
diff --git a/contrib/python/types-protobuf/README.md b/contrib/python/types-protobuf/README.md
index c35bf581ad2..5b485981e77 100644
--- a/contrib/python/types-protobuf/README.md
+++ b/contrib/python/types-protobuf/README.md
@@ -29,4 +29,4 @@ mypy 1.15.0,
pyright 1.1.400,
and pytype 2024.10.11.
It was generated from typeshed commit
-[`4265ee7c72f476e7156949e55784fd82b40e6953`](https://github.com/python/typeshed/commit/4265ee7c72f476e7156949e55784fd82b40e6953). \ No newline at end of file
+[`126768408a69b7a3a09b7d3992970b289f92937e`](https://github.com/python/typeshed/commit/126768408a69b7a3a09b7d3992970b289f92937e). \ No newline at end of file
diff --git a/contrib/python/types-protobuf/google-stubs/protobuf/descriptor.pyi b/contrib/python/types-protobuf/google-stubs/protobuf/descriptor.pyi
index 45891fda76c..c1c5fc33823 100644
--- a/contrib/python/types-protobuf/google-stubs/protobuf/descriptor.pyi
+++ b/contrib/python/types-protobuf/google-stubs/protobuf/descriptor.pyi
@@ -1,4 +1,3 @@
-from _typeshed import Incomplete
from typing import Any
from .descriptor_pb2 import (
@@ -71,17 +70,17 @@ class Descriptor(_NestedDescriptorBase):
nested_types: list[FieldDescriptor],
enum_types: list[EnumDescriptor],
extensions: list[FieldDescriptor],
- options: Incomplete | None = None,
- serialized_options: Incomplete | None = None,
+ options=None,
+ serialized_options=None,
is_extendable: bool | None = True,
- extension_ranges: Incomplete | None = None,
+ extension_ranges=None,
oneofs: list[OneofDescriptor] | None = None,
file: FileDescriptor | None = None,
- serialized_start: Incomplete | None = None,
- serialized_end: Incomplete | None = None,
+ serialized_start=None,
+ serialized_end=None,
syntax: str | None = None,
is_map_entry=False,
- create_key: Incomplete | None = None,
+ create_key=None,
): ...
def EnumValueName(self, enum, value): ...
def CopyToProto(self, proto): ...
@@ -264,11 +263,11 @@ class ServiceDescriptor(_NestedDescriptorBase):
index: int,
methods: list[MethodDescriptor],
options: ServiceOptions | None = None,
- serialized_options: Incomplete | None = None,
+ serialized_options=None,
file: FileDescriptor | None = None,
- serialized_start: Incomplete | None = None,
- serialized_end: Incomplete | None = None,
- create_key: Incomplete | None = None,
+ serialized_start=None,
+ serialized_end=None,
+ create_key=None,
): ...
def FindMethodByName(self, name): ...
def CopyToProto(self, proto): ...
diff --git a/contrib/python/types-protobuf/google-stubs/protobuf/descriptor_pool.pyi b/contrib/python/types-protobuf/google-stubs/protobuf/descriptor_pool.pyi
index f555c5d73e5..bb1d19004e8 100644
--- a/contrib/python/types-protobuf/google-stubs/protobuf/descriptor_pool.pyi
+++ b/contrib/python/types-protobuf/google-stubs/protobuf/descriptor_pool.pyi
@@ -1,9 +1,9 @@
-from _typeshed import Incomplete, Unused
+from _typeshed import Unused
class DescriptorPool:
- def __new__(cls, descriptor_db: Incomplete | None = None): ...
+ def __new__(cls, descriptor_db=None): ...
def __init__( # pyright: ignore[reportInconsistentConstructor]
- self, descriptor_db: Incomplete | None = None, use_deprecated_legacy_json_field_conflicts: Unused = False
+ self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts: Unused = False
) -> None: ...
def Add(self, file_desc_proto): ...
def AddSerializedFile(self, serialized_file_desc_proto): ...
diff --git a/contrib/python/types-protobuf/google-stubs/protobuf/internal/well_known_types.pyi b/contrib/python/types-protobuf/google-stubs/protobuf/internal/well_known_types.pyi
index 5013ea53fc0..4ed2f895c76 100644
--- a/contrib/python/types-protobuf/google-stubs/protobuf/internal/well_known_types.pyi
+++ b/contrib/python/types-protobuf/google-stubs/protobuf/internal/well_known_types.pyi
@@ -9,7 +9,7 @@ from google.protobuf import struct_pb2
class Any:
type_url: str
value: Incomplete
- def Pack(self, msg, type_url_prefix: str = "type.googleapis.com/", deterministic: Incomplete | None = None) -> None: ...
+ def Pack(self, msg, type_url_prefix: str = "type.googleapis.com/", deterministic=None) -> None: ...
def Unpack(self, msg) -> bool: ...
def TypeName(self) -> str: ...
def Is(self, descriptor) -> bool: ...
diff --git a/contrib/python/types-protobuf/ya.make b/contrib/python/types-protobuf/ya.make
index e93e7b16e84..33f3f6ac13c 100644
--- a/contrib/python/types-protobuf/ya.make
+++ b/contrib/python/types-protobuf/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(6.30.2.20250506)
+VERSION(6.30.2.20250516)
LICENSE(Apache-2.0)
diff --git a/library/cpp/threading/chunk_queue/queue.h b/library/cpp/threading/chunk_queue/queue.h
index fdde719e503..17ebef91a86 100644
--- a/library/cpp/threading/chunk_queue/queue.h
+++ b/library/cpp/threading/chunk_queue/queue.h
@@ -11,8 +11,6 @@
#include <util/system/yassert.h>
#include <atomic>
-#include <type_traits>
-#include <utility>
namespace NThreading {
////////////////////////////////////////////////////////////////////////////////
@@ -287,7 +285,7 @@ namespace NThreading {
ui64 NextTag() {
// TODO: can we avoid synchronization here? it costs 1.5x performance penalty
// return GetCycleCount();
- return WriteTag.fetch_add(1);
+ return WriteTag.fetch_add(1, std::memory_order_relaxed);
}
template <typename TT>
diff --git a/util/datetime/uptime.cpp b/util/datetime/uptime.cpp
index 1d141665d41..17925d43e3c 100644
--- a/util/datetime/uptime.cpp
+++ b/util/datetime/uptime.cpp
@@ -43,13 +43,10 @@ TDuration Uptime() {
#if defined(_win_)
return TDuration::MilliSeconds(GetTickCount64());
#elif defined(_linux_)
- TUnbufferedFileInput in("/proc/uptime");
- TString uptimeStr = in.ReadLine();
- double up, idle;
- if (sscanf(uptimeStr.data(), "%lf %lf", &up, &idle) < 2) {
- ythrow yexception() << "cannot read values from /proc/uptime";
- }
- return TDuration::MilliSeconds(up * 1000.0);
+ struct timespec ts;
+ int ret = clock_gettime(CLOCK_BOOTTIME, &ts);
+ Y_ENSURE_EX(ret != -1, TSystemError() << "Failed to read the CLOCK_BOOTTIME timer");
+ return TDuration::Seconds(ts.tv_sec) + TDuration::MicroSeconds(ts.tv_nsec / 1000);
#elif defined(_darwin_)
return GetDarwinUptime();
#elif defined(_emscripten_)
diff --git a/yt/cpp/mapreduce/common/retry_lib.cpp b/yt/cpp/mapreduce/common/retry_lib.cpp
index e352bd144be..a39001091cd 100644
--- a/yt/cpp/mapreduce/common/retry_lib.cpp
+++ b/yt/cpp/mapreduce/common/retry_lib.cpp
@@ -225,7 +225,6 @@ static TMaybe<TDuration> TryGetBackoffDuration(const TErrorResponse& errorRespon
NBus::TransportError,
NRpc::Unavailable,
NApi::RetriableArchiveError,
- NSequoiaClient::SequoiaRetriableError,
NRpc::TransientFailure,
Canceled,
Timeout,
diff --git a/yt/cpp/mapreduce/interface/error_codes.h b/yt/cpp/mapreduce/interface/error_codes.h
index 93c60b6da2f..00911d756da 100644
--- a/yt/cpp/mapreduce/interface/error_codes.h
+++ b/yt/cpp/mapreduce/interface/error_codes.h
@@ -468,18 +468,5 @@ namespace NJobProberClient {
} // namespace NJobProberClient
-
-
-// from ./ytlib/sequoia_client/public.h
-namespace NSequoiaClient {
-
-////////////////////////////////////////////////////////////////////////////////
-
- constexpr int SequoiaRetriableError = 6002;
-
-////////////////////////////////////////////////////////////////////////////////
-
-} // namespace NSequoiaClient
-
} // namespace NClusterErrorCodes
} // namespace NYT
diff --git a/yt/python/yt/common.py b/yt/python/yt/common.py
index a0de88b66ab..71a7976df00 100644
--- a/yt/python/yt/common.py
+++ b/yt/python/yt/common.py
@@ -374,10 +374,6 @@ class YtError(Exception):
"""Cross-cell "copy"/"move" command is explicitly disabled."""
return self.contains_code(1002)
- def is_sequoia_retriable_error(self):
- """Probably lock conflict in Sequoia tables."""
- return self.contains_code(6002)
-
def is_backup_checkpoint_rejected(self):
"""Backup checkpoint rejected."""
return self.contains_code(1733)
diff --git a/yt/yt/client/api/rpc_proxy/client_impl.cpp b/yt/yt/client/api/rpc_proxy/client_impl.cpp
index d0caae4c51e..07a8ffc20e6 100644
--- a/yt/yt/client/api/rpc_proxy/client_impl.cpp
+++ b/yt/yt/client/api/rpc_proxy/client_impl.cpp
@@ -67,7 +67,7 @@ TClient::TClient(
const TClientOptions& clientOptions)
: Connection_(std::move(connection))
, ClientOptions_(clientOptions)
- , RetryingChannel_(CreateSequoiaAwareRetryingChannel(
+ , RetryingChannel_(MaybeCreateRetryingChannel(
WrapNonRetryingChannel(Connection_->CreateChannel(false)),
/*retryProxyBanned*/ true))
, TableMountCache_(BIND(
@@ -99,18 +99,19 @@ void TClient::Terminate()
////////////////////////////////////////////////////////////////////////////////
-IChannelPtr TClient::CreateSequoiaAwareRetryingChannel(IChannelPtr channel, bool retryProxyBanned) const
+IChannelPtr TClient::MaybeCreateRetryingChannel(IChannelPtr channel, bool retryProxyBanned) const
{
const auto& config = Connection_->GetConfig();
- bool retrySequoiaErrorsOnly = !config->EnableRetries;
- // NB: Even if client's retries are disabled Sequoia transient failures are
- // still retriable. See IsRetriableError().
- return CreateRetryingChannel(
- config->RetryingChannel,
- std::move(channel),
- BIND([=] (const TError& error) {
- return IsRetriableError(error, retryProxyBanned, retrySequoiaErrorsOnly);
- }));
+ if (config->EnableRetries) {
+ return NRpc::CreateRetryingChannel(
+ config->RetryingChannel,
+ std::move(channel),
+ BIND([=] (const TError& error) {
+ return IsRetriableError(error, retryProxyBanned);
+ }));
+ } else {
+ return channel;
+ }
}
IChannelPtr TClient::CreateNonRetryingChannelByAddress(const std::string& address) const
@@ -144,7 +145,7 @@ IChannelPtr TClient::CreateNonRetryingStickyChannel() const
IChannelPtr TClient::WrapStickyChannelIntoRetrying(IChannelPtr underlying) const
{
- return CreateSequoiaAwareRetryingChannel(
+ return MaybeCreateRetryingChannel(
std::move(underlying),
/*retryProxyBanned*/ false);
}
diff --git a/yt/yt/client/api/rpc_proxy/client_impl.h b/yt/yt/client/api/rpc_proxy/client_impl.h
index 59cc5516b1a..5676e9e04af 100644
--- a/yt/yt/client/api/rpc_proxy/client_impl.h
+++ b/yt/yt/client/api/rpc_proxy/client_impl.h
@@ -626,10 +626,10 @@ private:
TLazyIntrusivePtr<NTabletClient::ITableMountCache> TableMountCache_;
TLazyIntrusivePtr<NTransactionClient::ITimestampProvider> TimestampProvider_;
-
NTransactionClient::ITimestampProviderPtr CreateTimestampProvider() const;
- NRpc::IChannelPtr CreateSequoiaAwareRetryingChannel(NRpc::IChannelPtr channel, bool retryProxyBanned) const;
+ NRpc::IChannelPtr MaybeCreateRetryingChannel(NRpc::IChannelPtr channel, bool retryProxyBanned) const;
+
// Returns an RPC channel to use for API calls to the particular address (e.g.: AttachTransaction).
// The channel is non-retrying, so should be wrapped into retrying channel on demand.
NRpc::IChannelPtr CreateNonRetryingChannelByAddress(const std::string& address) const;
diff --git a/yt/yt/client/api/rpc_proxy/helpers.cpp b/yt/yt/client/api/rpc_proxy/helpers.cpp
index 757e499d969..7528791163e 100644
--- a/yt/yt/client/api/rpc_proxy/helpers.cpp
+++ b/yt/yt/client/api/rpc_proxy/helpers.cpp
@@ -1841,20 +1841,8 @@ bool IsDynamicTableRetriableError(const TError& error)
error.FindMatching(NTabletClient::EErrorCode::TabletReplicationEraMismatch);
}
-bool IsRetriableError(const TError& error, bool retryProxyBanned, bool retrySequoiaErrorsOnly)
+bool IsRetriableError(const TError& error, bool retryProxyBanned)
{
- // For now transient Sequoia failures are always retriable even if client's
- // retries are disabled.
- // TODO(kvk1920): consider to make a separate flag "EnableSequoiaRetries"
- // for this.
- if (error.FindMatching(NSequoiaClient::EErrorCode::SequoiaRetriableError)) {
- return true;
- }
-
- if (retrySequoiaErrorsOnly) {
- return false;
- }
-
if (error.FindMatching(NRpcProxy::EErrorCode::ProxyBanned) ||
error.FindMatching(NRpc::EErrorCode::PeerBanned))
{
diff --git a/yt/yt/client/api/rpc_proxy/helpers.h b/yt/yt/client/api/rpc_proxy/helpers.h
index 8bfa68b54fb..27c36b7ebf1 100644
--- a/yt/yt/client/api/rpc_proxy/helpers.h
+++ b/yt/yt/client/api/rpc_proxy/helpers.h
@@ -328,10 +328,7 @@ void ParseRequest(
////////////////////////////////////////////////////////////////////////////////
-bool IsRetriableError(
- const TError& error,
- bool retryProxyBanned = true,
- bool retrySequoiaErrorsOnly = false);
+bool IsRetriableError(const TError& error, bool retryProxyBanned = true);
////////////////////////////////////////////////////////////////////////////////
diff --git a/yt/yt/client/ypath/parser_detail.cpp b/yt/yt/client/ypath/parser_detail.cpp
index 30739dde0c8..e68553ee8a0 100644
--- a/yt/yt/client/ypath/parser_detail.cpp
+++ b/yt/yt/client/ypath/parser_detail.cpp
@@ -34,6 +34,8 @@ const NYson::ETokenType RangeSeparatorToken = NYson::ETokenType::Comma;
////////////////////////////////////////////////////////////////////////////////
+namespace {
+
void ThrowUnexpectedToken(const TToken& token)
{
THROW_ERROR_EXCEPTION("Unexpected token %Qv",
@@ -128,37 +130,30 @@ TString ParseCluster(TString str, const IAttributeDictionaryPtr& attributes)
auto clusterSeparatorIndex = str.find_first_of(':');
if (clusterSeparatorIndex == TString::npos) {
THROW_ERROR_EXCEPTION(
- "Path %Qv does not start with a valid root-designator, cluster://path short-form assumed; "
- "no \':\' separator symbol found to parse cluster",
+ "Path %Qv does not start with a valid root-designator",
str);
}
const auto clusterName = str.substr(0, clusterSeparatorIndex);
-
if (clusterName.empty()) {
THROW_ERROR_EXCEPTION(
- "Path %Qv does not start with a valid root-designator, cluster://path short-form assumed; "
- "cluster name cannot be empty",
+ "Path %Qv does not start with a valid root-designator",
str);
}
auto illegalSymbolIt = std::find_if_not(clusterName.begin(), clusterName.end(), &IsValidClusterSymbol);
if (illegalSymbolIt != clusterName.end()) {
THROW_ERROR_EXCEPTION(
- "Path %Qv does not start with a valid root-designator, cluster://path short-form assumed; "
- "cluster name contains illegal symbol %Qv",
+ "Possible cluster name in path %Qv contains illegal symbol %Qv",
str,
*illegalSymbolIt);
}
auto remainingString = str.substr(clusterSeparatorIndex + 1);
-
if (!StartsWithRootDesignator(remainingString)) {
THROW_ERROR_EXCEPTION(
- "Path %Qv does not start with a valid root-designator, cluster://path short-form assumed; "
- "path %Qv after cluster-separator does not start with a valid root-designator",
- str,
- remainingString);
+ "Path %Qv does not start with a valid root-designator",
+ str);
}
attributes->Set("cluster", clusterName);
@@ -405,6 +400,8 @@ void ParseRowRanges(NYson::TTokenizer& tokenizer, IAttributeDictionary* attribut
}
}
+} // namespace
+
////////////////////////////////////////////////////////////////////////////////
TRichYPath ParseRichYPathImpl(const TString& str)
diff --git a/yt/yt/library/formats/protobuf_writer.cpp b/yt/yt/library/formats/protobuf_writer.cpp
index 84e90036fa8..009e8001b98 100644
--- a/yt/yt/library/formats/protobuf_writer.cpp
+++ b/yt/yt/library/formats/protobuf_writer.cpp
@@ -81,9 +81,9 @@ public:
if (size <= RemainingBytes()) {
Advance(size);
} else {
- char Buffer[MaxGapSize];
+ const std::array<char, MaxGapSize> gap{};
YT_VERIFY(size <= MaxGapSize);
- Write(Buffer, size);
+ Write(gap.data(), size);
}
return position;
}