aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal
diff options
context:
space:
mode:
authorthegeorg <thegeorg@yandex-team.ru>2022-02-10 16:45:08 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:08 +0300
commit4e839db24a3bbc9f1c610c43d6faaaa99824dcca (patch)
tree506dac10f5df94fab310584ee51b24fc5a081c22 /contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal
parent2d37894b1b037cf24231090eda8589bbb44fb6fc (diff)
downloadydb-4e839db24a3bbc9f1c610c43d6faaaa99824dcca.tar.gz
Restoring authorship annotation for <thegeorg@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal')
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal/ya.make16
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal/ya.make8
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_parse.cc2
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.cc12
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.h280
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.cc2256
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.h1878
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.cc370
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.h530
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.cc136
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.h422
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.cc258
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.h100
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_flat.h14
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.cc110
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.h84
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring_reader.h8
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_test_util.h440
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.cc192
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.h170
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions/ya.make16
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.cc278
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.h262
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle/ya.make26
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.cc890
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.h596
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info/ya.make34
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.cc128
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.h194
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token/ya.make104
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_statistics.h174
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_scope.h142
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_tracker.h242
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/resize_uninitialized.h96
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/arg.h16
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.cc6
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.h2
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.cc12
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.h72
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.cc124
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.h50
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/ya.make2
-rw-r--r--contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_split_internal.h70
43 files changed, 5411 insertions, 5411 deletions
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal/ya.make
index 42b7b6cd5e..aa888d4ecd 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal/ya.make
@@ -19,7 +19,7 @@ PEERDIR(
contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
contrib/restricted/abseil-cpp-tstring/y_absl/numeric
contrib/restricted/abseil-cpp-tstring/y_absl/strings
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
)
ADDINCL(
@@ -28,15 +28,15 @@ ADDINCL(
NO_COMPILER_WARNINGS()
-SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
+SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
SRCS(
- cord_internal.cc
- cord_rep_btree.cc
- cord_rep_btree_navigator.cc
- cord_rep_btree_reader.cc
- cord_rep_consume.cc
- cord_rep_ring.cc
+ cord_internal.cc
+ cord_rep_btree.cc
+ cord_rep_btree_navigator.cc
+ cord_rep_btree_reader.cc
+ cord_rep_consume.cc
+ cord_rep_ring.cc
)
END()
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal/ya.make
index 4e57fc75f6..ad0e94f7a6 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal/ya.make
@@ -2,8 +2,8 @@
LIBRARY()
-WITHOUT_LICENSE_TEXTS()
-
+WITHOUT_LICENSE_TEXTS()
+
OWNER(
somov
g:cpp-contrib
@@ -24,8 +24,8 @@ ADDINCL(
NO_COMPILER_WARNINGS()
-SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
-
+SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
+
SRCS(
escaping.cc
ostringstream.cc
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_parse.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_parse.cc
index f0f78eb68c..80ac216750 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_parse.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/charconv_parse.cc
@@ -52,7 +52,7 @@ static_assert(std::numeric_limits<double>::digits == 53, "IEEE double fact");
// The lowest valued 19-digit decimal mantissa we can read still contains
// sufficient information to reconstruct a binary mantissa.
-static_assert(1000000000000000000u > (uint64_t{1} << (53 + 3)), "(b) above");
+static_assert(1000000000000000000u > (uint64_t{1} << (53 + 3)), "(b) above");
// ParseFloat<16> will read the first 15 significant digits of the mantissa.
//
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.cc
index 6fc39985d8..2a14b0f610 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.cc
@@ -18,7 +18,7 @@
#include <memory>
#include "y_absl/container/inlined_vector.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
#include "y_absl/strings/internal/cord_rep_flat.h"
#include "y_absl/strings/internal/cord_rep_ring.h"
@@ -26,12 +26,12 @@ namespace y_absl {
ABSL_NAMESPACE_BEGIN
namespace cord_internal {
-ABSL_CONST_INIT std::atomic<bool> cord_btree_enabled(kCordEnableBtreeDefault);
+ABSL_CONST_INIT std::atomic<bool> cord_btree_enabled(kCordEnableBtreeDefault);
ABSL_CONST_INIT std::atomic<bool> cord_ring_buffer_enabled(
kCordEnableRingBufferDefault);
ABSL_CONST_INIT std::atomic<bool> shallow_subcords_enabled(
kCordShallowSubcordsDefault);
-ABSL_CONST_INIT std::atomic<bool> cord_btree_exhaustive_validation(false);
+ABSL_CONST_INIT std::atomic<bool> cord_btree_exhaustive_validation(false);
void CordRep::Destroy(CordRep* rep) {
assert(rep != nullptr);
@@ -52,9 +52,9 @@ void CordRep::Destroy(CordRep* rep) {
rep = left;
continue;
}
- } else if (rep->tag == BTREE) {
- CordRepBtree::Destroy(rep->btree());
- rep = nullptr;
+ } else if (rep->tag == BTREE) {
+ CordRepBtree::Destroy(rep->btree());
+ rep = nullptr;
} else if (rep->tag == RING) {
CordRepRing::Destroy(rep->ring());
rep = nullptr;
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.h
index 82f5ac7b81..3806497743 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_internal.h
@@ -37,25 +37,25 @@ class CordzInfo;
// Default feature enable states for cord ring buffers
enum CordFeatureDefaults {
- kCordEnableBtreeDefault = true,
+ kCordEnableBtreeDefault = true,
kCordEnableRingBufferDefault = false,
kCordShallowSubcordsDefault = false
};
-extern std::atomic<bool> cord_btree_enabled;
+extern std::atomic<bool> cord_btree_enabled;
extern std::atomic<bool> cord_ring_buffer_enabled;
extern std::atomic<bool> shallow_subcords_enabled;
-// `cord_btree_exhaustive_validation` can be set to force exhaustive validation
-// in debug assertions, and code that calls `IsValid()` explicitly. By default,
-// assertions should be relatively cheap and AssertValid() can easily lead to
-// O(n^2) complexity as recursive / full tree validation is O(n).
-extern std::atomic<bool> cord_btree_exhaustive_validation;
-
-inline void enable_cord_btree(bool enable) {
- cord_btree_enabled.store(enable, std::memory_order_relaxed);
-}
-
+// `cord_btree_exhaustive_validation` can be set to force exhaustive validation
+// in debug assertions, and code that calls `IsValid()` explicitly. By default,
+// assertions should be relatively cheap and AssertValid() can easily lead to
+// O(n^2) complexity as recursive / full tree validation is O(n).
+extern std::atomic<bool> cord_btree_exhaustive_validation;
+
+inline void enable_cord_btree(bool enable) {
+ cord_btree_enabled.store(enable, std::memory_order_relaxed);
+}
+
inline void enable_cord_ring_buffer(bool enable) {
cord_ring_buffer_enabled.store(enable, std::memory_order_relaxed);
}
@@ -80,16 +80,16 @@ enum Constants {
kMaxBytesToCopy = 511
};
-// Compact class for tracking the reference count and state flags for CordRep
-// instances. Data is stored in an atomic int32_t for compactness and speed.
-class RefcountAndFlags {
+// Compact class for tracking the reference count and state flags for CordRep
+// instances. Data is stored in an atomic int32_t for compactness and speed.
+class RefcountAndFlags {
public:
- constexpr RefcountAndFlags() : count_{kRefIncrement} {}
+ constexpr RefcountAndFlags() : count_{kRefIncrement} {}
struct Immortal {};
- explicit constexpr RefcountAndFlags(Immortal) : count_(kImmortalFlag) {}
- struct WithCrc {};
- explicit constexpr RefcountAndFlags(WithCrc)
- : count_(kCrcFlag | kRefIncrement) {}
+ explicit constexpr RefcountAndFlags(Immortal) : count_(kImmortalFlag) {}
+ struct WithCrc {};
+ explicit constexpr RefcountAndFlags(WithCrc)
+ : count_(kCrcFlag | kRefIncrement) {}
// Increments the reference count. Imposes no memory ordering.
inline void Increment() {
@@ -102,82 +102,82 @@ class RefcountAndFlags {
// Returns false if there are no references outstanding; true otherwise.
// Inserts barriers to ensure that state written before this method returns
// false will be visible to a thread that just observed this method returning
- // false. Always returns false when the immortal bit is set.
+ // false. Always returns false when the immortal bit is set.
inline bool Decrement() {
- int32_t refcount = count_.load(std::memory_order_acquire) & kRefcountMask;
- assert(refcount > 0 || refcount & kImmortalFlag);
+ int32_t refcount = count_.load(std::memory_order_acquire) & kRefcountMask;
+ assert(refcount > 0 || refcount & kImmortalFlag);
return refcount != kRefIncrement &&
- (count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
- kRefcountMask) != kRefIncrement;
+ (count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
+ kRefcountMask) != kRefIncrement;
}
// Same as Decrement but expect that refcount is greater than 1.
inline bool DecrementExpectHighRefcount() {
int32_t refcount =
- count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
- kRefcountMask;
- assert(refcount > 0 || refcount & kImmortalFlag);
+ count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) &
+ kRefcountMask;
+ assert(refcount > 0 || refcount & kImmortalFlag);
return refcount != kRefIncrement;
}
// Returns the current reference count using acquire semantics.
inline int32_t Get() const {
- return count_.load(std::memory_order_acquire) >> kNumFlags;
- }
-
- // Returns true if the referenced object carries a CRC value.
- bool HasCrc() const {
- return (count_.load(std::memory_order_relaxed) & kCrcFlag) != 0;
- }
-
- // Returns true iff the atomic integer is 1 and this node does not store
- // a CRC. When both these conditions are met, the current thread owns
- // the reference and no other thread shares it, so its contents may be
- // safely mutated.
- //
- // If the referenced item is shared, carries a CRC, or is immortal,
- // it should not be modified in-place, and this function returns false.
- //
- // This call performs the memory barrier needed for the owning thread
- // to act on the object, so that if it returns true, it may safely
- // assume exclusive access to the object.
- inline bool IsMutable() {
- return (count_.load(std::memory_order_acquire)) == kRefIncrement;
+ return count_.load(std::memory_order_acquire) >> kNumFlags;
}
- // Returns whether the atomic integer is 1. Similar to IsMutable(),
- // but does not check for a stored CRC. (An unshared node with a CRC is not
- // mutable, because changing its data would invalidate the CRC.)
- //
- // When this returns true, there are no other references, and data sinks
- // may safely adopt the children of the CordRep.
+ // Returns true if the referenced object carries a CRC value.
+ bool HasCrc() const {
+ return (count_.load(std::memory_order_relaxed) & kCrcFlag) != 0;
+ }
+
+ // Returns true iff the atomic integer is 1 and this node does not store
+ // a CRC. When both these conditions are met, the current thread owns
+ // the reference and no other thread shares it, so its contents may be
+ // safely mutated.
+ //
+ // If the referenced item is shared, carries a CRC, or is immortal,
+ // it should not be modified in-place, and this function returns false.
+ //
+ // This call performs the memory barrier needed for the owning thread
+ // to act on the object, so that if it returns true, it may safely
+ // assume exclusive access to the object.
+ inline bool IsMutable() {
+ return (count_.load(std::memory_order_acquire)) == kRefIncrement;
+ }
+
+ // Returns whether the atomic integer is 1. Similar to IsMutable(),
+ // but does not check for a stored CRC. (An unshared node with a CRC is not
+ // mutable, because changing its data would invalidate the CRC.)
+ //
+ // When this returns true, there are no other references, and data sinks
+ // may safely adopt the children of the CordRep.
inline bool IsOne() {
- return (count_.load(std::memory_order_acquire) & kRefcountMask) ==
- kRefIncrement;
+ return (count_.load(std::memory_order_acquire) & kRefcountMask) ==
+ kRefIncrement;
}
bool IsImmortal() const {
- return (count_.load(std::memory_order_relaxed) & kImmortalFlag) != 0;
+ return (count_.load(std::memory_order_relaxed) & kImmortalFlag) != 0;
}
private:
- // We reserve the bottom bits for flags.
- // kImmortalBit indicates that this entity should never be collected; it is
- // used for the StringConstant constructor to avoid collecting immutable
- // constant cords.
- // kReservedFlag is reserved for future use.
+ // We reserve the bottom bits for flags.
+ // kImmortalBit indicates that this entity should never be collected; it is
+ // used for the StringConstant constructor to avoid collecting immutable
+ // constant cords.
+ // kReservedFlag is reserved for future use.
enum {
- kNumFlags = 2,
-
- kImmortalFlag = 0x1,
- kCrcFlag = 0x2,
- kRefIncrement = (1 << kNumFlags),
-
- // Bitmask to use when checking refcount by equality. This masks out
- // all flags except kImmortalFlag, which is part of the refcount for
- // purposes of equality. (A refcount of 0 or 1 does not count as 0 or 1
- // if the immortal bit is set.)
- kRefcountMask = ~kCrcFlag,
+ kNumFlags = 2,
+
+ kImmortalFlag = 0x1,
+ kCrcFlag = 0x2,
+ kRefIncrement = (1 << kNumFlags),
+
+ // Bitmask to use when checking refcount by equality. This masks out
+ // all flags except kImmortalFlag, which is part of the refcount for
+ // purposes of equality. (A refcount of 0 or 1 does not count as 0 or 1
+ // if the immortal bit is set.)
+ kRefcountMask = ~kCrcFlag,
};
std::atomic<int32_t> count_;
@@ -193,68 +193,68 @@ struct CordRepExternal;
struct CordRepFlat;
struct CordRepSubstring;
class CordRepRing;
-class CordRepBtree;
+class CordRepBtree;
// Various representations that we allow
enum CordRepKind {
CONCAT = 0,
- SUBSTRING = 1,
- BTREE = 2,
+ SUBSTRING = 1,
+ BTREE = 2,
RING = 3,
- EXTERNAL = 4,
+ EXTERNAL = 4,
// We have different tags for different sized flat arrays,
- // starting with FLAT, and limited to MAX_FLAT_TAG. The 225 value is based on
+ // starting with FLAT, and limited to MAX_FLAT_TAG. The 225 value is based on
// the current 'size to tag' encoding of 8 / 32 bytes. If a new tag is needed
// in the future, then 'FLAT' and 'MAX_FLAT_TAG' should be adjusted as well
// as the Tag <---> Size logic so that FLAT stil represents the minimum flat
// allocation size. (32 bytes as of now).
- FLAT = 5,
- MAX_FLAT_TAG = 225
+ FLAT = 5,
+ MAX_FLAT_TAG = 225
};
-// There are various locations where we want to check if some rep is a 'plain'
-// data edge, i.e. an external or flat rep. By having FLAT == EXTERNAL + 1, we
-// can perform this check in a single branch as 'tag >= EXTERNAL'
-// Likewise, we have some locations where we check for 'ring or external/flat',
-// so likewise align RING to EXTERNAL.
-// Note that we can leave this optimization to the compiler. The compiler will
-// DTRT when it sees a condition like `tag == EXTERNAL || tag >= FLAT`.
-static_assert(RING == BTREE + 1, "BTREE and RING not consecutive");
-static_assert(EXTERNAL == RING + 1, "BTREE and EXTERNAL not consecutive");
-static_assert(FLAT == EXTERNAL + 1, "EXTERNAL and FLAT not consecutive");
-
+// There are various locations where we want to check if some rep is a 'plain'
+// data edge, i.e. an external or flat rep. By having FLAT == EXTERNAL + 1, we
+// can perform this check in a single branch as 'tag >= EXTERNAL'
+// Likewise, we have some locations where we check for 'ring or external/flat',
+// so likewise align RING to EXTERNAL.
+// Note that we can leave this optimization to the compiler. The compiler will
+// DTRT when it sees a condition like `tag == EXTERNAL || tag >= FLAT`.
+static_assert(RING == BTREE + 1, "BTREE and RING not consecutive");
+static_assert(EXTERNAL == RING + 1, "BTREE and EXTERNAL not consecutive");
+static_assert(FLAT == EXTERNAL + 1, "EXTERNAL and FLAT not consecutive");
+
struct CordRep {
CordRep() = default;
- constexpr CordRep(RefcountAndFlags::Immortal immortal, size_t l)
+ constexpr CordRep(RefcountAndFlags::Immortal immortal, size_t l)
: length(l), refcount(immortal), tag(EXTERNAL), storage{} {}
// The following three fields have to be less than 32 bytes since
// that is the smallest supported flat node size.
size_t length;
- RefcountAndFlags refcount;
+ RefcountAndFlags refcount;
// If tag < FLAT, it represents CordRepKind and indicates the type of node.
// Otherwise, the node type is CordRepFlat and the tag is the encoded size.
uint8_t tag;
- // `storage` provides two main purposes:
- // - the starting point for FlatCordRep.Data() [flexible-array-member]
- // - 3 bytes of additional storage for use by derived classes.
- // The latter is used by CordrepConcat and CordRepBtree. CordRepConcat stores
- // a 'depth' value in storage[0], and the (future) CordRepBtree class stores
- // `height`, `begin` and `end` in the 3 entries. Otherwise we would need to
- // allocate room for these in the derived class, as not all compilers reuse
- // padding space from the base class (clang and gcc do, MSVC does not, etc)
- uint8_t storage[3];
-
- // Returns true if this instance's tag matches the requested type.
- constexpr bool IsRing() const { return tag == RING; }
- constexpr bool IsConcat() const { return tag == CONCAT; }
- constexpr bool IsSubstring() const { return tag == SUBSTRING; }
- constexpr bool IsExternal() const { return tag == EXTERNAL; }
- constexpr bool IsFlat() const { return tag >= FLAT; }
- constexpr bool IsBtree() const { return tag == BTREE; }
-
+ // `storage` provides two main purposes:
+ // - the starting point for FlatCordRep.Data() [flexible-array-member]
+ // - 3 bytes of additional storage for use by derived classes.
+ // The latter is used by CordrepConcat and CordRepBtree. CordRepConcat stores
+ // a 'depth' value in storage[0], and the (future) CordRepBtree class stores
+ // `height`, `begin` and `end` in the 3 entries. Otherwise we would need to
+ // allocate room for these in the derived class, as not all compilers reuse
+ // padding space from the base class (clang and gcc do, MSVC does not, etc)
+ uint8_t storage[3];
+
+ // Returns true if this instance's tag matches the requested type.
+ constexpr bool IsRing() const { return tag == RING; }
+ constexpr bool IsConcat() const { return tag == CONCAT; }
+ constexpr bool IsSubstring() const { return tag == SUBSTRING; }
+ constexpr bool IsExternal() const { return tag == EXTERNAL; }
+ constexpr bool IsFlat() const { return tag >= FLAT; }
+ constexpr bool IsBtree() const { return tag == BTREE; }
+
inline CordRepRing* ring();
inline const CordRepRing* ring() const;
inline CordRepConcat* concat();
@@ -265,8 +265,8 @@ struct CordRep {
inline const CordRepExternal* external() const;
inline CordRepFlat* flat();
inline const CordRepFlat* flat() const;
- inline CordRepBtree* btree();
- inline const CordRepBtree* btree() const;
+ inline CordRepBtree* btree();
+ inline const CordRepBtree* btree() const;
// --------------------------------------------------------------------
// Memory management
@@ -287,8 +287,8 @@ struct CordRepConcat : public CordRep {
CordRep* left;
CordRep* right;
- uint8_t depth() const { return storage[0]; }
- void set_depth(uint8_t depth) { storage[0] = depth; }
+ uint8_t depth() const { return storage[0]; }
+ void set_depth(uint8_t depth) { storage[0] = depth; }
};
struct CordRepSubstring : public CordRep {
@@ -306,7 +306,7 @@ using ExternalReleaserInvoker = void (*)(CordRepExternal*);
struct CordRepExternal : public CordRep {
CordRepExternal() = default;
explicit constexpr CordRepExternal(y_absl::string_view str)
- : CordRep(RefcountAndFlags::Immortal{}, str.size()),
+ : CordRep(RefcountAndFlags::Immortal{}, str.size()),
base(str.data()),
releaser_invoker(nullptr) {}
@@ -315,7 +315,7 @@ struct CordRepExternal : public CordRep {
ExternalReleaserInvoker releaser_invoker;
// Deletes (releases) the external rep.
- // Requires rep != nullptr and rep->IsExternal()
+ // Requires rep != nullptr and rep->IsExternal()
static void Delete(CordRep* rep);
};
@@ -358,7 +358,7 @@ struct CordRepExternalImpl
};
inline void CordRepExternal::Delete(CordRep* rep) {
- assert(rep != nullptr && rep->IsExternal());
+ assert(rep != nullptr && rep->IsExternal());
auto* rep_external = static_cast<CordRepExternal*>(rep);
assert(rep_external->releaser_invoker != nullptr);
rep_external->releaser_invoker(rep_external);
@@ -404,9 +404,9 @@ static constexpr cordz_info_t BigEndianByte(unsigned char value) {
class InlineData {
public:
- // DefaultInitType forces the use of the default initialization constructor.
- enum DefaultInitType { kDefaultInit };
-
+ // DefaultInitType forces the use of the default initialization constructor.
+ enum DefaultInitType { kDefaultInit };
+
// kNullCordzInfo holds the big endian representation of intptr_t(1)
// This is the 'null' / initial value of 'cordz_info'. The null value
// is specifically big endian 1 as with 64-bit pointers, the last
@@ -414,7 +414,7 @@ class InlineData {
static constexpr cordz_info_t kNullCordzInfo = BigEndianByte(1);
constexpr InlineData() : as_chars_{0} {}
- explicit InlineData(DefaultInitType) {}
+ explicit InlineData(DefaultInitType) {}
explicit constexpr InlineData(CordRep* rep) : as_tree_(rep) {}
explicit constexpr InlineData(y_absl::string_view chars)
: as_chars_{
@@ -441,16 +441,16 @@ class InlineData {
return as_tree_.cordz_info != kNullCordzInfo;
}
- // Returns true if either of the provided instances hold a cordz_info value.
- // This method is more efficient than the equivalent `data1.is_profiled() ||
- // data2.is_profiled()`. Requires both arguments to hold a tree.
- static bool is_either_profiled(const InlineData& data1,
- const InlineData& data2) {
- assert(data1.is_tree() && data2.is_tree());
- return (data1.as_tree_.cordz_info | data2.as_tree_.cordz_info) !=
- kNullCordzInfo;
- }
-
+ // Returns true if either of the provided instances hold a cordz_info value.
+ // This method is more efficient than the equivalent `data1.is_profiled() ||
+ // data2.is_profiled()`. Requires both arguments to hold a tree.
+ static bool is_either_profiled(const InlineData& data1,
+ const InlineData& data2) {
+ assert(data1.is_tree() && data2.is_tree());
+ return (data1.as_tree_.cordz_info | data2.as_tree_.cordz_info) !=
+ kNullCordzInfo;
+ }
+
// Returns the cordz_info sampling instance for this instance, or nullptr
// if the current instance is not sampled and does not have CordzInfo data.
// Requires the current instance to hold a tree value.
@@ -560,7 +560,7 @@ class InlineData {
// store the size in the last char of `as_chars_` shifted left + 1.
// Else we store it in a tree and store a pointer to that tree in
// `as_tree_.rep` and store a tag in `tagged_size`.
- union {
+ union {
char as_chars_[kMaxInline + 1];
AsTree as_tree_;
};
@@ -569,32 +569,32 @@ class InlineData {
static_assert(sizeof(InlineData) == kMaxInline + 1, "");
inline CordRepConcat* CordRep::concat() {
- assert(IsConcat());
+ assert(IsConcat());
return static_cast<CordRepConcat*>(this);
}
inline const CordRepConcat* CordRep::concat() const {
- assert(IsConcat());
+ assert(IsConcat());
return static_cast<const CordRepConcat*>(this);
}
inline CordRepSubstring* CordRep::substring() {
- assert(IsSubstring());
+ assert(IsSubstring());
return static_cast<CordRepSubstring*>(this);
}
inline const CordRepSubstring* CordRep::substring() const {
- assert(IsSubstring());
+ assert(IsSubstring());
return static_cast<const CordRepSubstring*>(this);
}
inline CordRepExternal* CordRep::external() {
- assert(IsExternal());
+ assert(IsExternal());
return static_cast<CordRepExternal*>(this);
}
inline const CordRepExternal* CordRep::external() const {
- assert(IsExternal());
+ assert(IsExternal());
return static_cast<const CordRepExternal*>(this);
}
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.cc
index 93121c9958..396a40b499 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.cc
@@ -1,1128 +1,1128 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cord_rep_btree.h"
-
-#include <cassert>
-#include <cstdint>
-#include <iostream>
-#include <util/generic/string.h>
-
-#include "y_absl/base/attributes.h"
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/raw_logging.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_consume.h"
-#include "y_absl/strings/internal/cord_rep_flat.h"
-#include "y_absl/strings/str_cat.h"
-#include "y_absl/strings/string_view.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-constexpr size_t CordRepBtree::kMaxCapacity; // NOLINT: needed for c++ < c++17
-
-namespace {
-
-using NodeStack = CordRepBtree * [CordRepBtree::kMaxDepth];
-using EdgeType = CordRepBtree::EdgeType;
-using OpResult = CordRepBtree::OpResult;
-using CopyResult = CordRepBtree::CopyResult;
-
-constexpr auto kFront = CordRepBtree::kFront;
-constexpr auto kBack = CordRepBtree::kBack;
-
-inline bool exhaustive_validation() {
- return cord_btree_exhaustive_validation.load(std::memory_order_relaxed);
-}
-
-// Implementation of the various 'Dump' functions.
-// Prints the entire tree structure or 'rep'. External callers should
-// not specify 'depth' and leave it to its default (0) value.
-// Rep may be a CordRepBtree tree, or a SUBSTRING / EXTERNAL / FLAT node.
-void DumpAll(const CordRep* rep, bool include_contents, std::ostream& stream,
- int depth = 0) {
- // Allow for full height trees + substring -> flat / external nodes.
- assert(depth <= CordRepBtree::kMaxDepth + 2);
- TString sharing = const_cast<CordRep*>(rep)->refcount.IsOne()
- ? TString("Private")
- : y_absl::StrCat("Shared(", rep->refcount.Get(), ")");
- TString sptr = y_absl::StrCat("0x", y_absl::Hex(rep));
-
- // Dumps the data contents of `rep` if `include_contents` is true.
- // Always emits a new line character.
- auto maybe_dump_data = [&stream, include_contents](const CordRep* r) {
- if (include_contents) {
- // Allow for up to 60 wide display of content data, which with some
- // indentation and prefix / labels keeps us within roughly 80-100 wide.
- constexpr size_t kMaxDataLength = 60;
- stream << ", data = \""
- << CordRepBtree::EdgeData(r).substr(0, kMaxDataLength)
- << (r->length > kMaxDataLength ? "\"..." : "\"");
- }
- stream << '\n';
- };
-
- // For each level, we print the 'shared/private' state and the rep pointer,
- // indented by two spaces per recursive depth.
- stream << TString(depth * 2, ' ') << sharing << " (" << sptr << ") ";
-
- if (rep->IsBtree()) {
- const CordRepBtree* node = rep->btree();
- TString label =
- node->height() ? y_absl::StrCat("Node(", node->height(), ")") : "Leaf";
- stream << label << ", len = " << node->length
- << ", begin = " << node->begin() << ", end = " << node->end()
- << "\n";
- for (CordRep* edge : node->Edges()) {
- DumpAll(edge, include_contents, stream, depth + 1);
- }
- } else if (rep->tag == SUBSTRING) {
- const CordRepSubstring* substring = rep->substring();
- stream << "Substring, len = " << rep->length
- << ", start = " << substring->start;
- maybe_dump_data(rep);
- DumpAll(substring->child, include_contents, stream, depth + 1);
- } else if (rep->tag >= FLAT) {
- stream << "Flat, len = " << rep->length
- << ", cap = " << rep->flat()->Capacity();
- maybe_dump_data(rep);
- } else if (rep->tag == EXTERNAL) {
- stream << "Extn, len = " << rep->length;
- maybe_dump_data(rep);
- }
-}
-
-// TODO(b/192061034): add 'bytes to copy' logic to avoid large slop on substring
-// small data out of large reps, and general efficiency of 'always copy small
-// data'. Consider making this a cord rep internal library function.
-CordRepSubstring* CreateSubstring(CordRep* rep, size_t offset, size_t n) {
- assert(n != 0);
- assert(offset + n <= rep->length);
- assert(offset != 0 || n != rep->length);
-
- if (rep->tag == SUBSTRING) {
- CordRepSubstring* substring = rep->substring();
- offset += substring->start;
- rep = CordRep::Ref(substring->child);
- CordRep::Unref(substring);
- }
- CordRepSubstring* substring = new CordRepSubstring();
- substring->length = n;
- substring->tag = SUBSTRING;
- substring->start = offset;
- substring->child = rep;
- return substring;
-}
-
-// TODO(b/192061034): consider making this a cord rep library function.
-inline CordRep* MakeSubstring(CordRep* rep, size_t offset, size_t n) {
- if (n == rep->length) return rep;
- if (n == 0) return CordRep::Unref(rep), nullptr;
- return CreateSubstring(rep, offset, n);
-}
-
-// TODO(b/192061034): consider making this a cord rep library function.
-inline CordRep* MakeSubstring(CordRep* rep, size_t offset) {
- if (offset == 0) return rep;
- return CreateSubstring(rep, offset, rep->length - offset);
-}
-
-// Resizes `edge` to the provided `length`. Adopts a reference on `edge`.
-// This method directly returns `edge` if `length` equals `edge->length`.
-// If `is_mutable` is set to true, this function may return `edge` with
-// `edge->length` set to the new length depending on the type and size of
-// `edge`. Otherwise, this function returns a new CordRepSubstring value.
-// Requires `length > 0 && length <= edge->length`.
-CordRep* ResizeEdge(CordRep* edge, size_t length, bool is_mutable) {
- assert(length > 0);
- assert(length <= edge->length);
- assert(CordRepBtree::IsDataEdge(edge));
- if (length >= edge->length) return edge;
-
- if (is_mutable && (edge->tag >= FLAT || edge->tag == SUBSTRING)) {
- edge->length = length;
- return edge;
- }
-
- return CreateSubstring(edge, 0, length);
-}
-
-template <EdgeType edge_type>
-inline y_absl::string_view Consume(y_absl::string_view s, size_t n) {
- return edge_type == kBack ? s.substr(n) : s.substr(0, s.size() - n);
-}
-
-template <EdgeType edge_type>
-inline y_absl::string_view Consume(char* dst, y_absl::string_view s, size_t n) {
- if (edge_type == kBack) {
- memcpy(dst, s.data(), n);
- return s.substr(n);
- } else {
- const size_t offset = s.size() - n;
- memcpy(dst, s.data() + offset, n);
- return s.substr(0, offset);
- }
-}
-
-// Known issue / optimization weirdness: the store associated with the
-// decrement introduces traffic between cpus (even if the result of that
-// traffic does nothing), making this faster than a single call to
-// refcount.Decrement() checking the zero refcount condition.
-template <typename R, typename Fn>
-inline void FastUnref(R* r, Fn&& fn) {
- if (r->refcount.IsOne()) {
- fn(r);
- } else if (!r->refcount.DecrementExpectHighRefcount()) {
- fn(r);
- }
-}
-
-// Deletes a leaf node data edge. Requires `rep` to be an EXTERNAL or FLAT
-// node, or a SUBSTRING of an EXTERNAL or FLAT node.
-void DeleteLeafEdge(CordRep* rep) {
- for (;;) {
- if (rep->tag >= FLAT) {
- CordRepFlat::Delete(rep->flat());
- return;
- }
- if (rep->tag == EXTERNAL) {
- CordRepExternal::Delete(rep->external());
- return;
- }
- assert(rep->tag == SUBSTRING);
- CordRepSubstring* substring = rep->substring();
- rep = substring->child;
- assert(rep->tag == EXTERNAL || rep->tag >= FLAT);
- delete substring;
- if (rep->refcount.Decrement()) return;
- }
-}
-
-// StackOperations contains the logic to build a left-most or right-most stack
-// (leg) down to the leaf level of a btree, and 'unwind' / 'Finalize' methods to
-// propagate node changes up the stack.
-template <EdgeType edge_type>
-struct StackOperations {
- // Returns true if the node at 'depth' is mutable, i.e. has a refcount
- // of one, carries no CRC, and all of its parent nodes have a refcount of one.
- inline bool owned(int depth) const { return depth < share_depth; }
-
- // Returns the node at 'depth'.
- inline CordRepBtree* node(int depth) const { return stack[depth]; }
-
- // Builds a `depth` levels deep stack starting at `tree` recording which nodes
- // are private in the form of the 'share depth' where nodes are shared.
- inline CordRepBtree* BuildStack(CordRepBtree* tree, int depth) {
- assert(depth <= tree->height());
- int current_depth = 0;
- while (current_depth < depth && tree->refcount.IsMutable()) {
- stack[current_depth++] = tree;
- tree = tree->Edge(edge_type)->btree();
- }
- share_depth = current_depth + (tree->refcount.IsMutable() ? 1 : 0);
- while (current_depth < depth) {
- stack[current_depth++] = tree;
- tree = tree->Edge(edge_type)->btree();
- }
- return tree;
- }
-
- // Builds a stack with the invariant that all nodes are private owned / not
- // shared and carry no CRC data. This is used in iterative updates where a
- // previous propagation guaranteed all nodes have this property.
- inline void BuildOwnedStack(CordRepBtree* tree, int height) {
- assert(height <= CordRepBtree::kMaxHeight);
- int depth = 0;
- while (depth < height) {
- assert(tree->refcount.IsMutable());
- stack[depth++] = tree;
- tree = tree->Edge(edge_type)->btree();
- }
- assert(tree->refcount.IsMutable());
- share_depth = depth + 1;
- }
-
- // Processes the final 'top level' result action for the tree.
- // See the 'Action' enum for the various action implications.
- static inline CordRepBtree* Finalize(CordRepBtree* tree, OpResult result) {
- switch (result.action) {
- case CordRepBtree::kPopped:
- tree = edge_type == kBack ? CordRepBtree::New(tree, result.tree)
- : CordRepBtree::New(result.tree, tree);
- if (ABSL_PREDICT_FALSE(tree->height() > CordRepBtree::kMaxHeight)) {
- tree = CordRepBtree::Rebuild(tree);
- ABSL_RAW_CHECK(tree->height() <= CordRepBtree::kMaxHeight,
- "Max height exceeded");
- }
- return tree;
- case CordRepBtree::kCopied:
- CordRep::Unref(tree);
- ABSL_FALLTHROUGH_INTENDED;
- case CordRepBtree::kSelf:
- return result.tree;
- }
- ABSL_INTERNAL_UNREACHABLE;
- return result.tree;
- }
-
- // Propagate the action result in 'result' up into all nodes of the stack
- // starting at depth 'depth'. 'length' contains the extra length of data that
- // was added at the lowest level, and is updated into all nodes of the stack.
- // See the 'Action' enum for the various action implications.
- // If 'propagate' is true, then any copied node values are updated into the
- // stack, which is used for iterative processing on the same stack.
- template <bool propagate = false>
- inline CordRepBtree* Unwind(CordRepBtree* tree, int depth, size_t length,
- OpResult result) {
- // TODO(mvels): revisit the below code to check if 3 loops with 3
- // (incremental) conditions is faster than 1 loop with a switch.
- // Benchmarking and perf recordings indicate the loop with switch is
- // fastest, likely because of indirect jumps on the tight case values and
- // dense branches. But it's worth considering 3 loops, as the `action`
- // transitions are mono directional. E.g.:
- // while (action == kPopped) {
- // ...
- // }
- // while (action == kCopied) {
- // ...
- // }
- // ...
- // We also found that an "if () do {}" loop here seems faster, possibly
- // because it allows the branch predictor more granular heuristics on
- // 'single leaf' (`depth` == 0) and 'single depth' (`depth` == 1) cases
- // which appear to be the most common use cases.
- if (depth != 0) {
- do {
- CordRepBtree* node = stack[--depth];
- const bool owned = depth < share_depth;
- switch (result.action) {
- case CordRepBtree::kPopped:
- assert(!propagate);
- result = node->AddEdge<edge_type>(owned, result.tree, length);
- break;
- case CordRepBtree::kCopied:
- result = node->SetEdge<edge_type>(owned, result.tree, length);
- if (propagate) stack[depth] = result.tree;
- break;
- case CordRepBtree::kSelf:
- node->length += length;
- while (depth > 0) {
- node = stack[--depth];
- node->length += length;
- }
- return node;
- }
- } while (depth > 0);
- }
- return Finalize(tree, result);
- }
-
- // Invokes `Unwind` with `propagate=true` to update the stack node values.
- inline CordRepBtree* Propagate(CordRepBtree* tree, int depth, size_t length,
- OpResult result) {
- return Unwind</*propagate=*/true>(tree, depth, length, result);
- }
-
- // `share_depth` contains the depth at which the nodes in the stack cannot
- // be mutated. I.e., if the top most level is shared (i.e.:
- // `!refcount.IsMutable()`), then `share_depth` is 0. If the 2nd node
- // is shared (and implicitly all nodes below that) then `share_depth` is 1,
- // etc. A `share_depth` greater than the depth of the stack indicates that
- // none of the nodes in the stack are shared.
- int share_depth;
-
- NodeStack stack;
-};
-
-} // namespace
-
-void CordRepBtree::Dump(const CordRep* rep, y_absl::string_view label,
- bool include_contents, std::ostream& stream) {
- stream << "===================================\n";
- if (!label.empty()) {
- stream << label << '\n';
- stream << "-----------------------------------\n";
- }
- if (rep) {
- DumpAll(rep, include_contents, stream);
- } else {
- stream << "NULL\n";
- }
-}
-
-void CordRepBtree::Dump(const CordRep* rep, y_absl::string_view label,
- std::ostream& stream) {
- Dump(rep, label, false, stream);
-}
-
-void CordRepBtree::Dump(const CordRep* rep, std::ostream& stream) {
- Dump(rep, y_absl::string_view(), false, stream);
-}
-
-void CordRepBtree::DestroyLeaf(CordRepBtree* tree, size_t begin, size_t end) {
- for (CordRep* edge : tree->Edges(begin, end)) {
- FastUnref(edge, DeleteLeafEdge);
- }
- Delete(tree);
-}
-
-void CordRepBtree::DestroyNonLeaf(CordRepBtree* tree, size_t begin,
- size_t end) {
- for (CordRep* edge : tree->Edges(begin, end)) {
- FastUnref(edge->btree(), Destroy);
- }
- Delete(tree);
-}
-
-bool CordRepBtree::IsValid(const CordRepBtree* tree, bool shallow) {
-#define NODE_CHECK_VALID(x) \
- if (!(x)) { \
- ABSL_RAW_LOG(ERROR, "CordRepBtree::CheckValid() FAILED: %s", #x); \
- return false; \
- }
-#define NODE_CHECK_EQ(x, y) \
- if ((x) != (y)) { \
- ABSL_RAW_LOG(ERROR, \
- "CordRepBtree::CheckValid() FAILED: %s != %s (%s vs %s)", #x, \
- #y, y_absl::StrCat(x).c_str(), y_absl::StrCat(y).c_str()); \
- return false; \
- }
-
- NODE_CHECK_VALID(tree != nullptr);
- NODE_CHECK_VALID(tree->IsBtree());
- NODE_CHECK_VALID(tree->height() <= kMaxHeight);
- NODE_CHECK_VALID(tree->begin() < tree->capacity());
- NODE_CHECK_VALID(tree->end() <= tree->capacity());
- NODE_CHECK_VALID(tree->begin() <= tree->end());
- size_t child_length = 0;
- for (CordRep* edge : tree->Edges()) {
- NODE_CHECK_VALID(edge != nullptr);
- if (tree->height() > 0) {
- NODE_CHECK_VALID(edge->IsBtree());
- NODE_CHECK_VALID(edge->btree()->height() == tree->height() - 1);
- } else {
- NODE_CHECK_VALID(IsDataEdge(edge));
- }
- child_length += edge->length;
- }
- NODE_CHECK_EQ(child_length, tree->length);
- if ((!shallow || exhaustive_validation()) && tree->height() > 0) {
- for (CordRep* edge : tree->Edges()) {
- if (!IsValid(edge->btree(), shallow)) return false;
- }
- }
- return true;
-
-#undef NODE_CHECK_VALID
-#undef NODE_CHECK_EQ
-}
-
-#ifndef NDEBUG
-
-CordRepBtree* CordRepBtree::AssertValid(CordRepBtree* tree, bool shallow) {
- if (!IsValid(tree, shallow)) {
- Dump(tree, "CordRepBtree validation failed:", false, std::cout);
- ABSL_RAW_LOG(FATAL, "CordRepBtree::CheckValid() FAILED");
- }
- return tree;
-}
-
-const CordRepBtree* CordRepBtree::AssertValid(const CordRepBtree* tree,
- bool shallow) {
- if (!IsValid(tree, shallow)) {
- Dump(tree, "CordRepBtree validation failed:", false, std::cout);
- ABSL_RAW_LOG(FATAL, "CordRepBtree::CheckValid() FAILED");
- }
- return tree;
-}
-
-#endif // NDEBUG
-
-template <EdgeType edge_type>
-inline OpResult CordRepBtree::AddEdge(bool owned, CordRep* edge, size_t delta) {
- if (size() >= kMaxCapacity) return {New(edge), kPopped};
- OpResult result = ToOpResult(owned);
- result.tree->Add<edge_type>(edge);
- result.tree->length += delta;
- return result;
-}
-
-template <EdgeType edge_type>
-OpResult CordRepBtree::SetEdge(bool owned, CordRep* edge, size_t delta) {
- OpResult result;
- const size_t idx = index(edge_type);
- if (owned) {
- result = {this, kSelf};
- CordRep::Unref(edges_[idx]);
- } else {
- // Create a copy containing all unchanged edges. Unchanged edges are the
- // open interval [begin, back) or [begin + 1, end) depending on `edge_type`.
- // We conveniently cover both case using a constexpr `shift` being 0 or 1
- // as `end :== back + 1`.
- result = {CopyRaw(), kCopied};
- constexpr int shift = edge_type == kFront ? 1 : 0;
- for (CordRep* r : Edges(begin() + shift, back() + shift)) {
- CordRep::Ref(r);
- }
- }
- result.tree->edges_[idx] = edge;
- result.tree->length += delta;
- return result;
-}
-
-template <EdgeType edge_type>
-CordRepBtree* CordRepBtree::AddCordRep(CordRepBtree* tree, CordRep* rep) {
- const int depth = tree->height();
- const size_t length = rep->length;
- StackOperations<edge_type> ops;
- CordRepBtree* leaf = ops.BuildStack(tree, depth);
- const OpResult result =
- leaf->AddEdge<edge_type>(ops.owned(depth), rep, length);
- return ops.Unwind(tree, depth, length, result);
-}
-
-template <>
-CordRepBtree* CordRepBtree::NewLeaf<kBack>(y_absl::string_view data,
- size_t extra) {
- CordRepBtree* leaf = CordRepBtree::New(0);
- size_t length = 0;
- size_t end = 0;
- const size_t cap = leaf->capacity();
- while (!data.empty() && end != cap) {
- auto* flat = CordRepFlat::New(data.length() + extra);
- flat->length = (std::min)(data.length(), flat->Capacity());
- length += flat->length;
- leaf->edges_[end++] = flat;
- data = Consume<kBack>(flat->Data(), data, flat->length);
- }
- leaf->length = length;
- leaf->set_end(end);
- return leaf;
-}
-
-template <>
-CordRepBtree* CordRepBtree::NewLeaf<kFront>(y_absl::string_view data,
- size_t extra) {
- CordRepBtree* leaf = CordRepBtree::New(0);
- size_t length = 0;
- size_t begin = leaf->capacity();
- leaf->set_end(leaf->capacity());
- while (!data.empty() && begin != 0) {
- auto* flat = CordRepFlat::New(data.length() + extra);
- flat->length = (std::min)(data.length(), flat->Capacity());
- length += flat->length;
- leaf->edges_[--begin] = flat;
- data = Consume<kFront>(flat->Data(), data, flat->length);
- }
- leaf->length = length;
- leaf->set_begin(begin);
- return leaf;
-}
-
-template <>
-y_absl::string_view CordRepBtree::AddData<kBack>(y_absl::string_view data,
- size_t extra) {
- assert(!data.empty());
- assert(size() < capacity());
- AlignBegin();
- const size_t cap = capacity();
- do {
- CordRepFlat* flat = CordRepFlat::New(data.length() + extra);
- const size_t n = (std::min)(data.length(), flat->Capacity());
- flat->length = n;
- edges_[fetch_add_end(1)] = flat;
- data = Consume<kBack>(flat->Data(), data, n);
- } while (!data.empty() && end() != cap);
- return data;
-}
-
-template <>
-y_absl::string_view CordRepBtree::AddData<kFront>(y_absl::string_view data,
- size_t extra) {
- assert(!data.empty());
- assert(size() < capacity());
- AlignEnd();
- do {
- CordRepFlat* flat = CordRepFlat::New(data.length() + extra);
- const size_t n = (std::min)(data.length(), flat->Capacity());
- flat->length = n;
- edges_[sub_fetch_begin(1)] = flat;
- data = Consume<kFront>(flat->Data(), data, n);
- } while (!data.empty() && begin() != 0);
- return data;
-}
-
-template <EdgeType edge_type>
-CordRepBtree* CordRepBtree::AddData(CordRepBtree* tree, y_absl::string_view data,
- size_t extra) {
- if (ABSL_PREDICT_FALSE(data.empty())) return tree;
-
- const size_t original_data_size = data.size();
- int depth = tree->height();
- StackOperations<edge_type> ops;
- CordRepBtree* leaf = ops.BuildStack(tree, depth);
-
- // If there is capacity in the last edge, append as much data
- // as possible into this last edge.
- if (leaf->size() < leaf->capacity()) {
- OpResult result = leaf->ToOpResult(ops.owned(depth));
- data = result.tree->AddData<edge_type>(data, extra);
- if (data.empty()) {
- result.tree->length += original_data_size;
- return ops.Unwind(tree, depth, original_data_size, result);
- }
-
- // We added some data into this leaf, but not all. Propagate the added
- // length to the top most node, and rebuild the stack with any newly copied
- // or updated nodes. From this point on, the path (leg) from the top most
- // node to the right-most node towards the leaf node is privately owned.
- size_t delta = original_data_size - data.size();
- assert(delta > 0);
- result.tree->length += delta;
- tree = ops.Propagate(tree, depth, delta, result);
- ops.share_depth = depth + 1;
- }
-
- // We were unable to append all data into the existing right-most leaf node.
- // This means all remaining data must be put into (a) new leaf node(s) which
- // we append to the tree. To make this efficient, we iteratively build full
- // leaf nodes from `data` until the created leaf contains all remaining data.
- // We utilize the `Unwind` method to merge the created leaf into the first
- // level towards root that has capacity. On each iteration with remaining
- // data, we rebuild the stack in the knowledge that right-most nodes are
- // privately owned after the first `Unwind` completes.
- for (;;) {
- OpResult result = {CordRepBtree::NewLeaf<edge_type>(data, extra), kPopped};
- if (result.tree->length == data.size()) {
- return ops.Unwind(tree, depth, result.tree->length, result);
- }
- data = Consume<edge_type>(data, result.tree->length);
- tree = ops.Unwind(tree, depth, result.tree->length, result);
- depth = tree->height();
- ops.BuildOwnedStack(tree, depth);
- }
-}
-
-template <EdgeType edge_type>
-CordRepBtree* CordRepBtree::Merge(CordRepBtree* dst, CordRepBtree* src) {
- assert(dst->height() >= src->height());
-
- // Capture source length as we may consume / destroy `src`.
- const size_t length = src->length;
-
- // We attempt to merge `src` at its corresponding height in `dst`.
- const int depth = dst->height() - src->height();
- StackOperations<edge_type> ops;
- CordRepBtree* merge_node = ops.BuildStack(dst, depth);
-
- // If there is enough space in `merge_node` for all edges from `src`, add all
- // edges to this node, making a fresh copy as needed if not privately owned.
- // If `merge_node` does not have capacity for `src`, we rely on `Unwind` and
- // `Finalize` to merge `src` into the first level towards `root` where there
- // is capacity for another edge, or create a new top level node.
- OpResult result;
- if (merge_node->size() + src->size() <= kMaxCapacity) {
- result = merge_node->ToOpResult(ops.owned(depth));
- result.tree->Add<edge_type>(src->Edges());
- result.tree->length += src->length;
- if (src->refcount.IsOne()) {
- Delete(src);
- } else {
- for (CordRep* edge : src->Edges()) CordRep::Ref(edge);
- CordRepBtree::Unref(src);
- }
- } else {
- result = {src, kPopped};
- }
-
- // Unless we merged at the top level (i.e.: src and dst are equal height),
- // unwind the result towards the top level, and finalize the result.
- if (depth) {
- return ops.Unwind(dst, depth, length, result);
- }
- return ops.Finalize(dst, result);
-}
-
-CopyResult CordRepBtree::CopySuffix(size_t offset) {
- assert(offset < this->length);
-
- // As long as `offset` starts inside the last edge, we can 'drop' the current
- // depth. For the most extreme example: if offset references the last data
- // edge in the tree, there is only a single edge / path from the top of the
- // tree to that last edge, so we can drop all the nodes except that edge.
- // The fast path check for this is `back->length >= length - offset`.
- int height = this->height();
- CordRepBtree* node = this;
- size_t len = node->length - offset;
- CordRep* back = node->Edge(kBack);
- while (back->length >= len) {
- offset = back->length - len;
- if (--height < 0) {
- return {MakeSubstring(CordRep::Ref(back), offset), height};
- }
- node = back->btree();
- back = node->Edge(kBack);
- }
- if (offset == 0) return {CordRep::Ref(node), height};
-
- // Offset does not point into the last edge, so we span at least two edges.
- // Find the index of offset with `IndexBeyond` which provides us the edge
- // 'beyond' the offset if offset is not a clean starting point of an edge.
- Position pos = node->IndexBeyond(offset);
- CordRepBtree* sub = node->CopyToEndFrom(pos.index, len);
- const CopyResult result = {sub, height};
-
- // `pos.n` contains a non zero value if the offset is not an exact starting
- // point of an edge. In this case, `pos.n` contains the 'trailing' amount of
- // bytes of the edge preceding that in `pos.index`. We need to iteratively
- // adjust the preceding edge with the 'broken' offset until we have a perfect
- // start of the edge.
- while (pos.n != 0) {
- assert(pos.index >= 1);
- const size_t begin = pos.index - 1;
- sub->set_begin(begin);
- CordRep* const edge = node->Edge(begin);
-
- len = pos.n;
- offset = edge->length - len;
-
- if (--height < 0) {
- sub->edges_[begin] = MakeSubstring(CordRep::Ref(edge), offset, len);
- return result;
- }
-
- node = edge->btree();
- pos = node->IndexBeyond(offset);
-
- CordRepBtree* nsub = node->CopyToEndFrom(pos.index, len);
- sub->edges_[begin] = nsub;
- sub = nsub;
- }
- sub->set_begin(pos.index);
- return result;
-}
-
-CopyResult CordRepBtree::CopyPrefix(size_t n, bool allow_folding) {
- assert(n > 0);
- assert(n <= this->length);
-
- // As long as `n` does not exceed the length of the first edge, we can 'drop'
- // the current depth. For the most extreme example: if we'd copy a 1 byte
- // prefix from a tree, there is only a single edge / path from the top of the
- // tree to the single data edge containing this byte, so we can drop all the
- // nodes except the data node.
- int height = this->height();
- CordRepBtree* node = this;
- CordRep* front = node->Edge(kFront);
- if (allow_folding) {
- while (front->length >= n) {
- if (--height < 0) return {MakeSubstring(CordRep::Ref(front), 0, n), -1};
- node = front->btree();
- front = node->Edge(kFront);
- }
- }
- if (node->length == n) return {CordRep::Ref(node), height};
-
- // `n` spans at least two nodes, find the end point of the span.
- Position pos = node->IndexOf(n);
-
- // Create a partial copy of the node up to `pos.index`, with a defined length
- // of `n`. Any 'partial last edge' is added further below as needed.
- CordRepBtree* sub = node->CopyBeginTo(pos.index, n);
- const CopyResult result = {sub, height};
-
- // `pos.n` contains the 'offset inside the edge for IndexOf(n)'. As long as
- // this is not zero, we don't have a 'clean cut', so we need to make a
- // (partial) copy of that last edge, and repeat this until pos.n is zero.
- while (pos.n != 0) {
- size_t end = pos.index;
- n = pos.n;
-
- CordRep* edge = node->Edge(pos.index);
- if (--height < 0) {
- sub->edges_[end++] = MakeSubstring(CordRep::Ref(edge), 0, n);
- sub->set_end(end);
- AssertValid(result.edge->btree());
- return result;
- }
-
- node = edge->btree();
- pos = node->IndexOf(n);
- CordRepBtree* nsub = node->CopyBeginTo(pos.index, n);
- sub->edges_[end++] = nsub;
- sub->set_end(end);
- sub = nsub;
- }
- sub->set_end(pos.index);
- AssertValid(result.edge->btree());
- return result;
-}
-
-CordRep* CordRepBtree::ExtractFront(CordRepBtree* tree) {
- CordRep* front = tree->Edge(tree->begin());
- if (tree->refcount.IsMutable()) {
- Unref(tree->Edges(tree->begin() + 1, tree->end()));
- CordRepBtree::Delete(tree);
- } else {
- CordRep::Ref(front);
- CordRep::Unref(tree);
- }
- return front;
-}
-
-CordRepBtree* CordRepBtree::ConsumeBeginTo(CordRepBtree* tree, size_t end,
- size_t new_length) {
- assert(end <= tree->end());
- if (tree->refcount.IsMutable()) {
- Unref(tree->Edges(end, tree->end()));
- tree->set_end(end);
- tree->length = new_length;
- } else {
- CordRepBtree* old = tree;
- tree = tree->CopyBeginTo(end, new_length);
- CordRep::Unref(old);
- }
- return tree;
-}
-
-CordRep* CordRepBtree::RemoveSuffix(CordRepBtree* tree, size_t n) {
- // Check input and deal with trivial cases 'Remove all/none'
- assert(tree != nullptr);
- assert(n <= tree->length);
- const size_t len = tree->length;
- if (ABSL_PREDICT_FALSE(n == 0)) {
- return tree;
- }
- if (ABSL_PREDICT_FALSE(n >= len)) {
- CordRepBtree::Unref(tree);
- return nullptr;
- }
-
- size_t length = len - n;
- int height = tree->height();
- bool is_mutable = tree->refcount.IsMutable();
-
- // Extract all top nodes which are reduced to size = 1
- Position pos = tree->IndexOfLength(length);
- while (pos.index == tree->begin()) {
- CordRep* edge = ExtractFront(tree);
- is_mutable &= edge->refcount.IsMutable();
- if (height-- == 0) return ResizeEdge(edge, length, is_mutable);
- tree = edge->btree();
- pos = tree->IndexOfLength(length);
- }
-
- // Repeat the following sequence traversing down the tree:
- // - Crop the top node to the 'last remaining edge' adjusting length.
- // - Set the length for down edges to the partial length in that last edge.
- // - Repeat this until the last edge is 'included in full'
- // - If we hit the data edge level, resize and return the last data edge
- CordRepBtree* top = tree = ConsumeBeginTo(tree, pos.index + 1, length);
- CordRep* edge = tree->Edge(pos.index);
- length = pos.n;
- while (length != edge->length) {
- // ConsumeBeginTo guarantees `tree` is a clean, privately owned copy.
- assert(tree->refcount.IsMutable());
- const bool edge_is_mutable = edge->refcount.IsMutable();
-
- if (height-- == 0) {
- tree->edges_[pos.index] = ResizeEdge(edge, length, edge_is_mutable);
- return AssertValid(top);
- }
-
- if (!edge_is_mutable) {
- // We can't 'in place' remove any suffixes down this edge.
- // Replace this edge with a prefix copy instead.
- tree->edges_[pos.index] = edge->btree()->CopyPrefix(length, false).edge;
- CordRep::Unref(edge);
- return AssertValid(top);
- }
-
- // Move down one level, rinse repeat.
- tree = edge->btree();
- pos = tree->IndexOfLength(length);
- tree = ConsumeBeginTo(edge->btree(), pos.index + 1, length);
- edge = tree->Edge(pos.index);
- length = pos.n;
- }
-
- return AssertValid(top);
-}
-
-CordRep* CordRepBtree::SubTree(size_t offset, size_t n) {
- assert(n <= this->length);
- assert(offset <= this->length - n);
- if (ABSL_PREDICT_FALSE(n == 0)) return nullptr;
-
- CordRepBtree* node = this;
- int height = node->height();
- Position front = node->IndexOf(offset);
- CordRep* left = node->edges_[front.index];
- while (front.n + n <= left->length) {
- if (--height < 0) return MakeSubstring(CordRep::Ref(left), front.n, n);
- node = left->btree();
- front = node->IndexOf(front.n);
- left = node->edges_[front.index];
- }
-
- const Position back = node->IndexBefore(front, n);
- CordRep* const right = node->edges_[back.index];
- assert(back.index > front.index);
-
- // Get partial suffix and prefix entries.
- CopyResult prefix;
- CopyResult suffix;
- if (height > 0) {
- // Copy prefix and suffix of the boundary nodes.
- prefix = left->btree()->CopySuffix(front.n);
- suffix = right->btree()->CopyPrefix(back.n);
-
- // If there is an edge between the prefix and suffix edges, then the tree
- // must remain at its previous (full) height. If we have no edges between
- // prefix and suffix edges, then the tree must be as high as either the
- // suffix or prefix edges (which are collapsed to their minimum heights).
- if (front.index + 1 == back.index) {
- height = (std::max)(prefix.height, suffix.height) + 1;
- }
-
- // Raise prefix and suffixes to the new tree height.
- for (int h = prefix.height + 1; h < height; ++h) {
- prefix.edge = CordRepBtree::New(prefix.edge);
- }
- for (int h = suffix.height + 1; h < height; ++h) {
- suffix.edge = CordRepBtree::New(suffix.edge);
- }
- } else {
- // Leaf node, simply take substrings for prefix and suffix.
- prefix = CopyResult{MakeSubstring(CordRep::Ref(left), front.n), -1};
- suffix = CopyResult{MakeSubstring(CordRep::Ref(right), 0, back.n), -1};
- }
-
- // Compose resulting tree.
- CordRepBtree* sub = CordRepBtree::New(height);
- size_t end = 0;
- sub->edges_[end++] = prefix.edge;
- for (CordRep* r : node->Edges(front.index + 1, back.index)) {
- sub->edges_[end++] = CordRep::Ref(r);
- }
- sub->edges_[end++] = suffix.edge;
- sub->set_end(end);
- sub->length = n;
- return AssertValid(sub);
-}
-
-CordRepBtree* CordRepBtree::MergeTrees(CordRepBtree* left,
- CordRepBtree* right) {
- return left->height() >= right->height() ? Merge<kBack>(left, right)
- : Merge<kFront>(right, left);
-}
-
-bool CordRepBtree::IsFlat(y_absl::string_view* fragment) const {
- if (height() == 0 && size() == 1) {
- if (fragment) *fragment = Data(begin());
- return true;
- }
- return false;
-}
-
-bool CordRepBtree::IsFlat(size_t offset, const size_t n,
- y_absl::string_view* fragment) const {
- assert(n <= this->length);
- assert(offset <= this->length - n);
- if (ABSL_PREDICT_FALSE(n == 0)) return false;
- int height = this->height();
- const CordRepBtree* node = this;
- for (;;) {
- const Position front = node->IndexOf(offset);
- const CordRep* edge = node->Edge(front.index);
- if (edge->length < front.n + n) return false;
- if (--height < 0) {
- if (fragment) *fragment = EdgeData(edge).substr(front.n, n);
- return true;
- }
- offset = front.n;
- node = node->Edge(front.index)->btree();
- }
-}
-
-char CordRepBtree::GetCharacter(size_t offset) const {
- assert(offset < length);
- const CordRepBtree* node = this;
- int height = node->height();
- for (;;) {
- Position front = node->IndexOf(offset);
- if (--height < 0) return node->Data(front.index)[front.n];
- offset = front.n;
- node = node->Edge(front.index)->btree();
- }
-}
-
-Span<char> CordRepBtree::GetAppendBufferSlow(size_t size) {
- // The inlined version in `GetAppendBuffer()` deals with all heights <= 3.
- assert(height() >= 4);
- assert(refcount.IsMutable());
-
- // Build a stack of nodes we may potentially need to update if we find a
- // non-shared FLAT with capacity at the leaf level.
- const int depth = height();
- CordRepBtree* node = this;
- CordRepBtree* stack[kMaxDepth];
- for (int i = 0; i < depth; ++i) {
- node = node->Edge(kBack)->btree();
- if (!node->refcount.IsMutable()) return {};
- stack[i] = node;
- }
-
- // Must be a privately owned, mutable flat.
- CordRep* const edge = node->Edge(kBack);
- if (!edge->refcount.IsMutable() || edge->tag < FLAT) return {};
-
- // Must have capacity.
- const size_t avail = edge->flat()->Capacity() - edge->length;
- if (avail == 0) return {};
-
- // Build span on remaining capacity.
- size_t delta = (std::min)(size, avail);
- Span<char> span = {edge->flat()->Data() + edge->length, delta};
- edge->length += delta;
- this->length += delta;
- for (int i = 0; i < depth; ++i) {
- stack[i]->length += delta;
- }
- return span;
-}
-
-CordRepBtree* CordRepBtree::CreateSlow(CordRep* rep) {
- if (rep->IsBtree()) return rep->btree();
-
- CordRepBtree* node = nullptr;
- auto consume = [&node](CordRep* r, size_t offset, size_t length) {
- r = MakeSubstring(r, offset, length);
- if (node == nullptr) {
- node = New(r);
- } else {
- node = CordRepBtree::AddCordRep<kBack>(node, r);
- }
- };
- Consume(rep, consume);
- return node;
-}
-
-CordRepBtree* CordRepBtree::AppendSlow(CordRepBtree* tree, CordRep* rep) {
- if (ABSL_PREDICT_TRUE(rep->IsBtree())) {
- return MergeTrees(tree, rep->btree());
- }
- auto consume = [&tree](CordRep* r, size_t offset, size_t length) {
- r = MakeSubstring(r, offset, length);
- tree = CordRepBtree::AddCordRep<kBack>(tree, r);
- };
- Consume(rep, consume);
- return tree;
-}
-
-CordRepBtree* CordRepBtree::PrependSlow(CordRepBtree* tree, CordRep* rep) {
- if (ABSL_PREDICT_TRUE(rep->IsBtree())) {
- return MergeTrees(rep->btree(), tree);
- }
- auto consume = [&tree](CordRep* r, size_t offset, size_t length) {
- r = MakeSubstring(r, offset, length);
- tree = CordRepBtree::AddCordRep<kFront>(tree, r);
- };
- ReverseConsume(rep, consume);
- return tree;
-}
-
-CordRepBtree* CordRepBtree::Append(CordRepBtree* tree, y_absl::string_view data,
- size_t extra) {
- return CordRepBtree::AddData<kBack>(tree, data, extra);
-}
-
-CordRepBtree* CordRepBtree::Prepend(CordRepBtree* tree, y_absl::string_view data,
- size_t extra) {
- return CordRepBtree::AddData<kFront>(tree, data, extra);
-}
-
-template CordRepBtree* CordRepBtree::AddCordRep<kFront>(CordRepBtree* tree,
- CordRep* rep);
-template CordRepBtree* CordRepBtree::AddCordRep<kBack>(CordRepBtree* tree,
- CordRep* rep);
-template CordRepBtree* CordRepBtree::AddData<kFront>(CordRepBtree* tree,
- y_absl::string_view data,
- size_t extra);
-template CordRepBtree* CordRepBtree::AddData<kBack>(CordRepBtree* tree,
- y_absl::string_view data,
- size_t extra);
-
-void CordRepBtree::Rebuild(CordRepBtree** stack, CordRepBtree* tree,
- bool consume) {
- bool owned = consume && tree->refcount.IsOne();
- if (tree->height() == 0) {
- for (CordRep* edge : tree->Edges()) {
- if (!owned) edge = CordRep::Ref(edge);
- size_t height = 0;
- size_t length = edge->length;
- CordRepBtree* node = stack[0];
- OpResult result = node->AddEdge<kBack>(true, edge, length);
- while (result.action == CordRepBtree::kPopped) {
- stack[height] = result.tree;
- if (stack[++height] == nullptr) {
- result.action = CordRepBtree::kSelf;
- stack[height] = CordRepBtree::New(node, result.tree);
- } else {
- node = stack[height];
- result = node->AddEdge<kBack>(true, result.tree, length);
- }
- }
- while (stack[++height] != nullptr) {
- stack[height]->length += length;
- }
- }
- } else {
- for (CordRep* rep : tree->Edges()) {
- Rebuild(stack, rep->btree(), owned);
- }
- }
- if (consume) {
- if (owned) {
- CordRepBtree::Delete(tree);
- } else {
- CordRepBtree::Unref(tree);
- }
- }
-}
-
-CordRepBtree* CordRepBtree::Rebuild(CordRepBtree* tree) {
- // Set up initial stack with empty leaf node.
- CordRepBtree* node = CordRepBtree::New();
- CordRepBtree* stack[CordRepBtree::kMaxDepth + 1] = {node};
-
- // Recursively build the tree, consuming the input tree.
- Rebuild(stack, tree, /* consume reference */ true);
-
- // Return top most node
- for (CordRepBtree* parent : stack) {
- if (parent == nullptr) return node;
- node = parent;
- }
-
- // Unreachable
- assert(false);
- return nullptr;
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cord_rep_btree.h"
+
+#include <cassert>
+#include <cstdint>
+#include <iostream>
+#include <util/generic/string.h>
+
+#include "y_absl/base/attributes.h"
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/raw_logging.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_consume.h"
+#include "y_absl/strings/internal/cord_rep_flat.h"
+#include "y_absl/strings/str_cat.h"
+#include "y_absl/strings/string_view.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+constexpr size_t CordRepBtree::kMaxCapacity; // NOLINT: needed for c++ < c++17
+
+namespace {
+
+using NodeStack = CordRepBtree * [CordRepBtree::kMaxDepth];
+using EdgeType = CordRepBtree::EdgeType;
+using OpResult = CordRepBtree::OpResult;
+using CopyResult = CordRepBtree::CopyResult;
+
+constexpr auto kFront = CordRepBtree::kFront;
+constexpr auto kBack = CordRepBtree::kBack;
+
+inline bool exhaustive_validation() {
+ return cord_btree_exhaustive_validation.load(std::memory_order_relaxed);
+}
+
+// Implementation of the various 'Dump' functions.
+// Prints the entire tree structure or 'rep'. External callers should
+// not specify 'depth' and leave it to its default (0) value.
+// Rep may be a CordRepBtree tree, or a SUBSTRING / EXTERNAL / FLAT node.
+void DumpAll(const CordRep* rep, bool include_contents, std::ostream& stream,
+ int depth = 0) {
+ // Allow for full height trees + substring -> flat / external nodes.
+ assert(depth <= CordRepBtree::kMaxDepth + 2);
+ TString sharing = const_cast<CordRep*>(rep)->refcount.IsOne()
+ ? TString("Private")
+ : y_absl::StrCat("Shared(", rep->refcount.Get(), ")");
+ TString sptr = y_absl::StrCat("0x", y_absl::Hex(rep));
+
+ // Dumps the data contents of `rep` if `include_contents` is true.
+ // Always emits a new line character.
+ auto maybe_dump_data = [&stream, include_contents](const CordRep* r) {
+ if (include_contents) {
+ // Allow for up to 60 wide display of content data, which with some
+ // indentation and prefix / labels keeps us within roughly 80-100 wide.
+ constexpr size_t kMaxDataLength = 60;
+ stream << ", data = \""
+ << CordRepBtree::EdgeData(r).substr(0, kMaxDataLength)
+ << (r->length > kMaxDataLength ? "\"..." : "\"");
+ }
+ stream << '\n';
+ };
+
+ // For each level, we print the 'shared/private' state and the rep pointer,
+ // indented by two spaces per recursive depth.
+ stream << TString(depth * 2, ' ') << sharing << " (" << sptr << ") ";
+
+ if (rep->IsBtree()) {
+ const CordRepBtree* node = rep->btree();
+ TString label =
+ node->height() ? y_absl::StrCat("Node(", node->height(), ")") : "Leaf";
+ stream << label << ", len = " << node->length
+ << ", begin = " << node->begin() << ", end = " << node->end()
+ << "\n";
+ for (CordRep* edge : node->Edges()) {
+ DumpAll(edge, include_contents, stream, depth + 1);
+ }
+ } else if (rep->tag == SUBSTRING) {
+ const CordRepSubstring* substring = rep->substring();
+ stream << "Substring, len = " << rep->length
+ << ", start = " << substring->start;
+ maybe_dump_data(rep);
+ DumpAll(substring->child, include_contents, stream, depth + 1);
+ } else if (rep->tag >= FLAT) {
+ stream << "Flat, len = " << rep->length
+ << ", cap = " << rep->flat()->Capacity();
+ maybe_dump_data(rep);
+ } else if (rep->tag == EXTERNAL) {
+ stream << "Extn, len = " << rep->length;
+ maybe_dump_data(rep);
+ }
+}
+
+// TODO(b/192061034): add 'bytes to copy' logic to avoid large slop on substring
+// small data out of large reps, and general efficiency of 'always copy small
+// data'. Consider making this a cord rep internal library function.
+CordRepSubstring* CreateSubstring(CordRep* rep, size_t offset, size_t n) {
+ assert(n != 0);
+ assert(offset + n <= rep->length);
+ assert(offset != 0 || n != rep->length);
+
+ if (rep->tag == SUBSTRING) {
+ CordRepSubstring* substring = rep->substring();
+ offset += substring->start;
+ rep = CordRep::Ref(substring->child);
+ CordRep::Unref(substring);
+ }
+ CordRepSubstring* substring = new CordRepSubstring();
+ substring->length = n;
+ substring->tag = SUBSTRING;
+ substring->start = offset;
+ substring->child = rep;
+ return substring;
+}
+
+// TODO(b/192061034): consider making this a cord rep library function.
+inline CordRep* MakeSubstring(CordRep* rep, size_t offset, size_t n) {
+ if (n == rep->length) return rep;
+ if (n == 0) return CordRep::Unref(rep), nullptr;
+ return CreateSubstring(rep, offset, n);
+}
+
+// TODO(b/192061034): consider making this a cord rep library function.
+inline CordRep* MakeSubstring(CordRep* rep, size_t offset) {
+ if (offset == 0) return rep;
+ return CreateSubstring(rep, offset, rep->length - offset);
+}
+
+// Resizes `edge` to the provided `length`. Adopts a reference on `edge`.
+// This method directly returns `edge` if `length` equals `edge->length`.
+// If `is_mutable` is set to true, this function may return `edge` with
+// `edge->length` set to the new length depending on the type and size of
+// `edge`. Otherwise, this function returns a new CordRepSubstring value.
+// Requires `length > 0 && length <= edge->length`.
+CordRep* ResizeEdge(CordRep* edge, size_t length, bool is_mutable) {
+ assert(length > 0);
+ assert(length <= edge->length);
+ assert(CordRepBtree::IsDataEdge(edge));
+ if (length >= edge->length) return edge;
+
+ if (is_mutable && (edge->tag >= FLAT || edge->tag == SUBSTRING)) {
+ edge->length = length;
+ return edge;
+ }
+
+ return CreateSubstring(edge, 0, length);
+}
+
+template <EdgeType edge_type>
+inline y_absl::string_view Consume(y_absl::string_view s, size_t n) {
+ return edge_type == kBack ? s.substr(n) : s.substr(0, s.size() - n);
+}
+
+template <EdgeType edge_type>
+inline y_absl::string_view Consume(char* dst, y_absl::string_view s, size_t n) {
+ if (edge_type == kBack) {
+ memcpy(dst, s.data(), n);
+ return s.substr(n);
+ } else {
+ const size_t offset = s.size() - n;
+ memcpy(dst, s.data() + offset, n);
+ return s.substr(0, offset);
+ }
+}
+
+// Known issue / optimization weirdness: the store associated with the
+// decrement introduces traffic between cpus (even if the result of that
+// traffic does nothing), making this faster than a single call to
+// refcount.Decrement() checking the zero refcount condition.
+template <typename R, typename Fn>
+inline void FastUnref(R* r, Fn&& fn) {
+ if (r->refcount.IsOne()) {
+ fn(r);
+ } else if (!r->refcount.DecrementExpectHighRefcount()) {
+ fn(r);
+ }
+}
+
+// Deletes a leaf node data edge. Requires `rep` to be an EXTERNAL or FLAT
+// node, or a SUBSTRING of an EXTERNAL or FLAT node.
+void DeleteLeafEdge(CordRep* rep) {
+ for (;;) {
+ if (rep->tag >= FLAT) {
+ CordRepFlat::Delete(rep->flat());
+ return;
+ }
+ if (rep->tag == EXTERNAL) {
+ CordRepExternal::Delete(rep->external());
+ return;
+ }
+ assert(rep->tag == SUBSTRING);
+ CordRepSubstring* substring = rep->substring();
+ rep = substring->child;
+ assert(rep->tag == EXTERNAL || rep->tag >= FLAT);
+ delete substring;
+ if (rep->refcount.Decrement()) return;
+ }
+}
+
+// StackOperations contains the logic to build a left-most or right-most stack
+// (leg) down to the leaf level of a btree, and 'unwind' / 'Finalize' methods to
+// propagate node changes up the stack.
+template <EdgeType edge_type>
+struct StackOperations {
+ // Returns true if the node at 'depth' is mutable, i.e. has a refcount
+ // of one, carries no CRC, and all of its parent nodes have a refcount of one.
+ inline bool owned(int depth) const { return depth < share_depth; }
+
+ // Returns the node at 'depth'.
+ inline CordRepBtree* node(int depth) const { return stack[depth]; }
+
+ // Builds a `depth` levels deep stack starting at `tree` recording which nodes
+ // are private in the form of the 'share depth' where nodes are shared.
+ inline CordRepBtree* BuildStack(CordRepBtree* tree, int depth) {
+ assert(depth <= tree->height());
+ int current_depth = 0;
+ while (current_depth < depth && tree->refcount.IsMutable()) {
+ stack[current_depth++] = tree;
+ tree = tree->Edge(edge_type)->btree();
+ }
+ share_depth = current_depth + (tree->refcount.IsMutable() ? 1 : 0);
+ while (current_depth < depth) {
+ stack[current_depth++] = tree;
+ tree = tree->Edge(edge_type)->btree();
+ }
+ return tree;
+ }
+
+ // Builds a stack with the invariant that all nodes are private owned / not
+ // shared and carry no CRC data. This is used in iterative updates where a
+ // previous propagation guaranteed all nodes have this property.
+ inline void BuildOwnedStack(CordRepBtree* tree, int height) {
+ assert(height <= CordRepBtree::kMaxHeight);
+ int depth = 0;
+ while (depth < height) {
+ assert(tree->refcount.IsMutable());
+ stack[depth++] = tree;
+ tree = tree->Edge(edge_type)->btree();
+ }
+ assert(tree->refcount.IsMutable());
+ share_depth = depth + 1;
+ }
+
+ // Processes the final 'top level' result action for the tree.
+ // See the 'Action' enum for the various action implications.
+ static inline CordRepBtree* Finalize(CordRepBtree* tree, OpResult result) {
+ switch (result.action) {
+ case CordRepBtree::kPopped:
+ tree = edge_type == kBack ? CordRepBtree::New(tree, result.tree)
+ : CordRepBtree::New(result.tree, tree);
+ if (ABSL_PREDICT_FALSE(tree->height() > CordRepBtree::kMaxHeight)) {
+ tree = CordRepBtree::Rebuild(tree);
+ ABSL_RAW_CHECK(tree->height() <= CordRepBtree::kMaxHeight,
+ "Max height exceeded");
+ }
+ return tree;
+ case CordRepBtree::kCopied:
+ CordRep::Unref(tree);
+ ABSL_FALLTHROUGH_INTENDED;
+ case CordRepBtree::kSelf:
+ return result.tree;
+ }
+ ABSL_INTERNAL_UNREACHABLE;
+ return result.tree;
+ }
+
+ // Propagate the action result in 'result' up into all nodes of the stack
+ // starting at depth 'depth'. 'length' contains the extra length of data that
+ // was added at the lowest level, and is updated into all nodes of the stack.
+ // See the 'Action' enum for the various action implications.
+ // If 'propagate' is true, then any copied node values are updated into the
+ // stack, which is used for iterative processing on the same stack.
+ template <bool propagate = false>
+ inline CordRepBtree* Unwind(CordRepBtree* tree, int depth, size_t length,
+ OpResult result) {
+ // TODO(mvels): revisit the below code to check if 3 loops with 3
+ // (incremental) conditions is faster than 1 loop with a switch.
+ // Benchmarking and perf recordings indicate the loop with switch is
+ // fastest, likely because of indirect jumps on the tight case values and
+ // dense branches. But it's worth considering 3 loops, as the `action`
+ // transitions are mono directional. E.g.:
+ // while (action == kPopped) {
+ // ...
+ // }
+ // while (action == kCopied) {
+ // ...
+ // }
+ // ...
+ // We also found that an "if () do {}" loop here seems faster, possibly
+ // because it allows the branch predictor more granular heuristics on
+ // 'single leaf' (`depth` == 0) and 'single depth' (`depth` == 1) cases
+ // which appear to be the most common use cases.
+ if (depth != 0) {
+ do {
+ CordRepBtree* node = stack[--depth];
+ const bool owned = depth < share_depth;
+ switch (result.action) {
+ case CordRepBtree::kPopped:
+ assert(!propagate);
+ result = node->AddEdge<edge_type>(owned, result.tree, length);
+ break;
+ case CordRepBtree::kCopied:
+ result = node->SetEdge<edge_type>(owned, result.tree, length);
+ if (propagate) stack[depth] = result.tree;
+ break;
+ case CordRepBtree::kSelf:
+ node->length += length;
+ while (depth > 0) {
+ node = stack[--depth];
+ node->length += length;
+ }
+ return node;
+ }
+ } while (depth > 0);
+ }
+ return Finalize(tree, result);
+ }
+
+ // Invokes `Unwind` with `propagate=true` to update the stack node values.
+ inline CordRepBtree* Propagate(CordRepBtree* tree, int depth, size_t length,
+ OpResult result) {
+ return Unwind</*propagate=*/true>(tree, depth, length, result);
+ }
+
+ // `share_depth` contains the depth at which the nodes in the stack cannot
+ // be mutated. I.e., if the top most level is shared (i.e.:
+ // `!refcount.IsMutable()`), then `share_depth` is 0. If the 2nd node
+ // is shared (and implicitly all nodes below that) then `share_depth` is 1,
+ // etc. A `share_depth` greater than the depth of the stack indicates that
+ // none of the nodes in the stack are shared.
+ int share_depth;
+
+ NodeStack stack;
+};
+
+} // namespace
+
+void CordRepBtree::Dump(const CordRep* rep, y_absl::string_view label,
+ bool include_contents, std::ostream& stream) {
+ stream << "===================================\n";
+ if (!label.empty()) {
+ stream << label << '\n';
+ stream << "-----------------------------------\n";
+ }
+ if (rep) {
+ DumpAll(rep, include_contents, stream);
+ } else {
+ stream << "NULL\n";
+ }
+}
+
+void CordRepBtree::Dump(const CordRep* rep, y_absl::string_view label,
+ std::ostream& stream) {
+ Dump(rep, label, false, stream);
+}
+
+void CordRepBtree::Dump(const CordRep* rep, std::ostream& stream) {
+ Dump(rep, y_absl::string_view(), false, stream);
+}
+
+void CordRepBtree::DestroyLeaf(CordRepBtree* tree, size_t begin, size_t end) {
+ for (CordRep* edge : tree->Edges(begin, end)) {
+ FastUnref(edge, DeleteLeafEdge);
+ }
+ Delete(tree);
+}
+
+void CordRepBtree::DestroyNonLeaf(CordRepBtree* tree, size_t begin,
+ size_t end) {
+ for (CordRep* edge : tree->Edges(begin, end)) {
+ FastUnref(edge->btree(), Destroy);
+ }
+ Delete(tree);
+}
+
+bool CordRepBtree::IsValid(const CordRepBtree* tree, bool shallow) {
+#define NODE_CHECK_VALID(x) \
+ if (!(x)) { \
+ ABSL_RAW_LOG(ERROR, "CordRepBtree::CheckValid() FAILED: %s", #x); \
+ return false; \
+ }
+#define NODE_CHECK_EQ(x, y) \
+ if ((x) != (y)) { \
+ ABSL_RAW_LOG(ERROR, \
+ "CordRepBtree::CheckValid() FAILED: %s != %s (%s vs %s)", #x, \
+ #y, y_absl::StrCat(x).c_str(), y_absl::StrCat(y).c_str()); \
+ return false; \
+ }
+
+ NODE_CHECK_VALID(tree != nullptr);
+ NODE_CHECK_VALID(tree->IsBtree());
+ NODE_CHECK_VALID(tree->height() <= kMaxHeight);
+ NODE_CHECK_VALID(tree->begin() < tree->capacity());
+ NODE_CHECK_VALID(tree->end() <= tree->capacity());
+ NODE_CHECK_VALID(tree->begin() <= tree->end());
+ size_t child_length = 0;
+ for (CordRep* edge : tree->Edges()) {
+ NODE_CHECK_VALID(edge != nullptr);
+ if (tree->height() > 0) {
+ NODE_CHECK_VALID(edge->IsBtree());
+ NODE_CHECK_VALID(edge->btree()->height() == tree->height() - 1);
+ } else {
+ NODE_CHECK_VALID(IsDataEdge(edge));
+ }
+ child_length += edge->length;
+ }
+ NODE_CHECK_EQ(child_length, tree->length);
+ if ((!shallow || exhaustive_validation()) && tree->height() > 0) {
+ for (CordRep* edge : tree->Edges()) {
+ if (!IsValid(edge->btree(), shallow)) return false;
+ }
+ }
+ return true;
+
+#undef NODE_CHECK_VALID
+#undef NODE_CHECK_EQ
+}
+
+#ifndef NDEBUG
+
+CordRepBtree* CordRepBtree::AssertValid(CordRepBtree* tree, bool shallow) {
+ if (!IsValid(tree, shallow)) {
+ Dump(tree, "CordRepBtree validation failed:", false, std::cout);
+ ABSL_RAW_LOG(FATAL, "CordRepBtree::CheckValid() FAILED");
+ }
+ return tree;
+}
+
+const CordRepBtree* CordRepBtree::AssertValid(const CordRepBtree* tree,
+ bool shallow) {
+ if (!IsValid(tree, shallow)) {
+ Dump(tree, "CordRepBtree validation failed:", false, std::cout);
+ ABSL_RAW_LOG(FATAL, "CordRepBtree::CheckValid() FAILED");
+ }
+ return tree;
+}
+
+#endif // NDEBUG
+
+template <EdgeType edge_type>
+inline OpResult CordRepBtree::AddEdge(bool owned, CordRep* edge, size_t delta) {
+ if (size() >= kMaxCapacity) return {New(edge), kPopped};
+ OpResult result = ToOpResult(owned);
+ result.tree->Add<edge_type>(edge);
+ result.tree->length += delta;
+ return result;
+}
+
+template <EdgeType edge_type>
+OpResult CordRepBtree::SetEdge(bool owned, CordRep* edge, size_t delta) {
+ OpResult result;
+ const size_t idx = index(edge_type);
+ if (owned) {
+ result = {this, kSelf};
+ CordRep::Unref(edges_[idx]);
+ } else {
+ // Create a copy containing all unchanged edges. Unchanged edges are the
+ // open interval [begin, back) or [begin + 1, end) depending on `edge_type`.
+ // We conveniently cover both case using a constexpr `shift` being 0 or 1
+ // as `end :== back + 1`.
+ result = {CopyRaw(), kCopied};
+ constexpr int shift = edge_type == kFront ? 1 : 0;
+ for (CordRep* r : Edges(begin() + shift, back() + shift)) {
+ CordRep::Ref(r);
+ }
+ }
+ result.tree->edges_[idx] = edge;
+ result.tree->length += delta;
+ return result;
+}
+
+template <EdgeType edge_type>
+CordRepBtree* CordRepBtree::AddCordRep(CordRepBtree* tree, CordRep* rep) {
+ const int depth = tree->height();
+ const size_t length = rep->length;
+ StackOperations<edge_type> ops;
+ CordRepBtree* leaf = ops.BuildStack(tree, depth);
+ const OpResult result =
+ leaf->AddEdge<edge_type>(ops.owned(depth), rep, length);
+ return ops.Unwind(tree, depth, length, result);
+}
+
+template <>
+CordRepBtree* CordRepBtree::NewLeaf<kBack>(y_absl::string_view data,
+ size_t extra) {
+ CordRepBtree* leaf = CordRepBtree::New(0);
+ size_t length = 0;
+ size_t end = 0;
+ const size_t cap = leaf->capacity();
+ while (!data.empty() && end != cap) {
+ auto* flat = CordRepFlat::New(data.length() + extra);
+ flat->length = (std::min)(data.length(), flat->Capacity());
+ length += flat->length;
+ leaf->edges_[end++] = flat;
+ data = Consume<kBack>(flat->Data(), data, flat->length);
+ }
+ leaf->length = length;
+ leaf->set_end(end);
+ return leaf;
+}
+
+template <>
+CordRepBtree* CordRepBtree::NewLeaf<kFront>(y_absl::string_view data,
+ size_t extra) {
+ CordRepBtree* leaf = CordRepBtree::New(0);
+ size_t length = 0;
+ size_t begin = leaf->capacity();
+ leaf->set_end(leaf->capacity());
+ while (!data.empty() && begin != 0) {
+ auto* flat = CordRepFlat::New(data.length() + extra);
+ flat->length = (std::min)(data.length(), flat->Capacity());
+ length += flat->length;
+ leaf->edges_[--begin] = flat;
+ data = Consume<kFront>(flat->Data(), data, flat->length);
+ }
+ leaf->length = length;
+ leaf->set_begin(begin);
+ return leaf;
+}
+
+template <>
+y_absl::string_view CordRepBtree::AddData<kBack>(y_absl::string_view data,
+ size_t extra) {
+ assert(!data.empty());
+ assert(size() < capacity());
+ AlignBegin();
+ const size_t cap = capacity();
+ do {
+ CordRepFlat* flat = CordRepFlat::New(data.length() + extra);
+ const size_t n = (std::min)(data.length(), flat->Capacity());
+ flat->length = n;
+ edges_[fetch_add_end(1)] = flat;
+ data = Consume<kBack>(flat->Data(), data, n);
+ } while (!data.empty() && end() != cap);
+ return data;
+}
+
+template <>
+y_absl::string_view CordRepBtree::AddData<kFront>(y_absl::string_view data,
+ size_t extra) {
+ assert(!data.empty());
+ assert(size() < capacity());
+ AlignEnd();
+ do {
+ CordRepFlat* flat = CordRepFlat::New(data.length() + extra);
+ const size_t n = (std::min)(data.length(), flat->Capacity());
+ flat->length = n;
+ edges_[sub_fetch_begin(1)] = flat;
+ data = Consume<kFront>(flat->Data(), data, n);
+ } while (!data.empty() && begin() != 0);
+ return data;
+}
+
+template <EdgeType edge_type>
+CordRepBtree* CordRepBtree::AddData(CordRepBtree* tree, y_absl::string_view data,
+ size_t extra) {
+ if (ABSL_PREDICT_FALSE(data.empty())) return tree;
+
+ const size_t original_data_size = data.size();
+ int depth = tree->height();
+ StackOperations<edge_type> ops;
+ CordRepBtree* leaf = ops.BuildStack(tree, depth);
+
+ // If there is capacity in the last edge, append as much data
+ // as possible into this last edge.
+ if (leaf->size() < leaf->capacity()) {
+ OpResult result = leaf->ToOpResult(ops.owned(depth));
+ data = result.tree->AddData<edge_type>(data, extra);
+ if (data.empty()) {
+ result.tree->length += original_data_size;
+ return ops.Unwind(tree, depth, original_data_size, result);
+ }
+
+ // We added some data into this leaf, but not all. Propagate the added
+ // length to the top most node, and rebuild the stack with any newly copied
+ // or updated nodes. From this point on, the path (leg) from the top most
+ // node to the right-most node towards the leaf node is privately owned.
+ size_t delta = original_data_size - data.size();
+ assert(delta > 0);
+ result.tree->length += delta;
+ tree = ops.Propagate(tree, depth, delta, result);
+ ops.share_depth = depth + 1;
+ }
+
+ // We were unable to append all data into the existing right-most leaf node.
+ // This means all remaining data must be put into (a) new leaf node(s) which
+ // we append to the tree. To make this efficient, we iteratively build full
+ // leaf nodes from `data` until the created leaf contains all remaining data.
+ // We utilize the `Unwind` method to merge the created leaf into the first
+ // level towards root that has capacity. On each iteration with remaining
+ // data, we rebuild the stack in the knowledge that right-most nodes are
+ // privately owned after the first `Unwind` completes.
+ for (;;) {
+ OpResult result = {CordRepBtree::NewLeaf<edge_type>(data, extra), kPopped};
+ if (result.tree->length == data.size()) {
+ return ops.Unwind(tree, depth, result.tree->length, result);
+ }
+ data = Consume<edge_type>(data, result.tree->length);
+ tree = ops.Unwind(tree, depth, result.tree->length, result);
+ depth = tree->height();
+ ops.BuildOwnedStack(tree, depth);
+ }
+}
+
+template <EdgeType edge_type>
+CordRepBtree* CordRepBtree::Merge(CordRepBtree* dst, CordRepBtree* src) {
+ assert(dst->height() >= src->height());
+
+ // Capture source length as we may consume / destroy `src`.
+ const size_t length = src->length;
+
+ // We attempt to merge `src` at its corresponding height in `dst`.
+ const int depth = dst->height() - src->height();
+ StackOperations<edge_type> ops;
+ CordRepBtree* merge_node = ops.BuildStack(dst, depth);
+
+ // If there is enough space in `merge_node` for all edges from `src`, add all
+ // edges to this node, making a fresh copy as needed if not privately owned.
+ // If `merge_node` does not have capacity for `src`, we rely on `Unwind` and
+ // `Finalize` to merge `src` into the first level towards `root` where there
+ // is capacity for another edge, or create a new top level node.
+ OpResult result;
+ if (merge_node->size() + src->size() <= kMaxCapacity) {
+ result = merge_node->ToOpResult(ops.owned(depth));
+ result.tree->Add<edge_type>(src->Edges());
+ result.tree->length += src->length;
+ if (src->refcount.IsOne()) {
+ Delete(src);
+ } else {
+ for (CordRep* edge : src->Edges()) CordRep::Ref(edge);
+ CordRepBtree::Unref(src);
+ }
+ } else {
+ result = {src, kPopped};
+ }
+
+ // Unless we merged at the top level (i.e.: src and dst are equal height),
+ // unwind the result towards the top level, and finalize the result.
+ if (depth) {
+ return ops.Unwind(dst, depth, length, result);
+ }
+ return ops.Finalize(dst, result);
+}
+
+CopyResult CordRepBtree::CopySuffix(size_t offset) {
+ assert(offset < this->length);
+
+ // As long as `offset` starts inside the last edge, we can 'drop' the current
+ // depth. For the most extreme example: if offset references the last data
+ // edge in the tree, there is only a single edge / path from the top of the
+ // tree to that last edge, so we can drop all the nodes except that edge.
+ // The fast path check for this is `back->length >= length - offset`.
+ int height = this->height();
+ CordRepBtree* node = this;
+ size_t len = node->length - offset;
+ CordRep* back = node->Edge(kBack);
+ while (back->length >= len) {
+ offset = back->length - len;
+ if (--height < 0) {
+ return {MakeSubstring(CordRep::Ref(back), offset), height};
+ }
+ node = back->btree();
+ back = node->Edge(kBack);
+ }
+ if (offset == 0) return {CordRep::Ref(node), height};
+
+ // Offset does not point into the last edge, so we span at least two edges.
+ // Find the index of offset with `IndexBeyond` which provides us the edge
+ // 'beyond' the offset if offset is not a clean starting point of an edge.
+ Position pos = node->IndexBeyond(offset);
+ CordRepBtree* sub = node->CopyToEndFrom(pos.index, len);
+ const CopyResult result = {sub, height};
+
+ // `pos.n` contains a non zero value if the offset is not an exact starting
+ // point of an edge. In this case, `pos.n` contains the 'trailing' amount of
+ // bytes of the edge preceding that in `pos.index`. We need to iteratively
+ // adjust the preceding edge with the 'broken' offset until we have a perfect
+ // start of the edge.
+ while (pos.n != 0) {
+ assert(pos.index >= 1);
+ const size_t begin = pos.index - 1;
+ sub->set_begin(begin);
+ CordRep* const edge = node->Edge(begin);
+
+ len = pos.n;
+ offset = edge->length - len;
+
+ if (--height < 0) {
+ sub->edges_[begin] = MakeSubstring(CordRep::Ref(edge), offset, len);
+ return result;
+ }
+
+ node = edge->btree();
+ pos = node->IndexBeyond(offset);
+
+ CordRepBtree* nsub = node->CopyToEndFrom(pos.index, len);
+ sub->edges_[begin] = nsub;
+ sub = nsub;
+ }
+ sub->set_begin(pos.index);
+ return result;
+}
+
+CopyResult CordRepBtree::CopyPrefix(size_t n, bool allow_folding) {
+ assert(n > 0);
+ assert(n <= this->length);
+
+ // As long as `n` does not exceed the length of the first edge, we can 'drop'
+ // the current depth. For the most extreme example: if we'd copy a 1 byte
+ // prefix from a tree, there is only a single edge / path from the top of the
+ // tree to the single data edge containing this byte, so we can drop all the
+ // nodes except the data node.
+ int height = this->height();
+ CordRepBtree* node = this;
+ CordRep* front = node->Edge(kFront);
+ if (allow_folding) {
+ while (front->length >= n) {
+ if (--height < 0) return {MakeSubstring(CordRep::Ref(front), 0, n), -1};
+ node = front->btree();
+ front = node->Edge(kFront);
+ }
+ }
+ if (node->length == n) return {CordRep::Ref(node), height};
+
+ // `n` spans at least two nodes, find the end point of the span.
+ Position pos = node->IndexOf(n);
+
+ // Create a partial copy of the node up to `pos.index`, with a defined length
+ // of `n`. Any 'partial last edge' is added further below as needed.
+ CordRepBtree* sub = node->CopyBeginTo(pos.index, n);
+ const CopyResult result = {sub, height};
+
+ // `pos.n` contains the 'offset inside the edge for IndexOf(n)'. As long as
+ // this is not zero, we don't have a 'clean cut', so we need to make a
+ // (partial) copy of that last edge, and repeat this until pos.n is zero.
+ while (pos.n != 0) {
+ size_t end = pos.index;
+ n = pos.n;
+
+ CordRep* edge = node->Edge(pos.index);
+ if (--height < 0) {
+ sub->edges_[end++] = MakeSubstring(CordRep::Ref(edge), 0, n);
+ sub->set_end(end);
+ AssertValid(result.edge->btree());
+ return result;
+ }
+
+ node = edge->btree();
+ pos = node->IndexOf(n);
+ CordRepBtree* nsub = node->CopyBeginTo(pos.index, n);
+ sub->edges_[end++] = nsub;
+ sub->set_end(end);
+ sub = nsub;
+ }
+ sub->set_end(pos.index);
+ AssertValid(result.edge->btree());
+ return result;
+}
+
+CordRep* CordRepBtree::ExtractFront(CordRepBtree* tree) {
+ CordRep* front = tree->Edge(tree->begin());
+ if (tree->refcount.IsMutable()) {
+ Unref(tree->Edges(tree->begin() + 1, tree->end()));
+ CordRepBtree::Delete(tree);
+ } else {
+ CordRep::Ref(front);
+ CordRep::Unref(tree);
+ }
+ return front;
+}
+
+CordRepBtree* CordRepBtree::ConsumeBeginTo(CordRepBtree* tree, size_t end,
+ size_t new_length) {
+ assert(end <= tree->end());
+ if (tree->refcount.IsMutable()) {
+ Unref(tree->Edges(end, tree->end()));
+ tree->set_end(end);
+ tree->length = new_length;
+ } else {
+ CordRepBtree* old = tree;
+ tree = tree->CopyBeginTo(end, new_length);
+ CordRep::Unref(old);
+ }
+ return tree;
+}
+
+CordRep* CordRepBtree::RemoveSuffix(CordRepBtree* tree, size_t n) {
+ // Check input and deal with trivial cases 'Remove all/none'
+ assert(tree != nullptr);
+ assert(n <= tree->length);
+ const size_t len = tree->length;
+ if (ABSL_PREDICT_FALSE(n == 0)) {
+ return tree;
+ }
+ if (ABSL_PREDICT_FALSE(n >= len)) {
+ CordRepBtree::Unref(tree);
+ return nullptr;
+ }
+
+ size_t length = len - n;
+ int height = tree->height();
+ bool is_mutable = tree->refcount.IsMutable();
+
+ // Extract all top nodes which are reduced to size = 1
+ Position pos = tree->IndexOfLength(length);
+ while (pos.index == tree->begin()) {
+ CordRep* edge = ExtractFront(tree);
+ is_mutable &= edge->refcount.IsMutable();
+ if (height-- == 0) return ResizeEdge(edge, length, is_mutable);
+ tree = edge->btree();
+ pos = tree->IndexOfLength(length);
+ }
+
+ // Repeat the following sequence traversing down the tree:
+ // - Crop the top node to the 'last remaining edge' adjusting length.
+ // - Set the length for down edges to the partial length in that last edge.
+ // - Repeat this until the last edge is 'included in full'
+ // - If we hit the data edge level, resize and return the last data edge
+ CordRepBtree* top = tree = ConsumeBeginTo(tree, pos.index + 1, length);
+ CordRep* edge = tree->Edge(pos.index);
+ length = pos.n;
+ while (length != edge->length) {
+ // ConsumeBeginTo guarantees `tree` is a clean, privately owned copy.
+ assert(tree->refcount.IsMutable());
+ const bool edge_is_mutable = edge->refcount.IsMutable();
+
+ if (height-- == 0) {
+ tree->edges_[pos.index] = ResizeEdge(edge, length, edge_is_mutable);
+ return AssertValid(top);
+ }
+
+ if (!edge_is_mutable) {
+ // We can't 'in place' remove any suffixes down this edge.
+ // Replace this edge with a prefix copy instead.
+ tree->edges_[pos.index] = edge->btree()->CopyPrefix(length, false).edge;
+ CordRep::Unref(edge);
+ return AssertValid(top);
+ }
+
+ // Move down one level, rinse repeat.
+ tree = edge->btree();
+ pos = tree->IndexOfLength(length);
+ tree = ConsumeBeginTo(edge->btree(), pos.index + 1, length);
+ edge = tree->Edge(pos.index);
+ length = pos.n;
+ }
+
+ return AssertValid(top);
+}
+
+CordRep* CordRepBtree::SubTree(size_t offset, size_t n) {
+ assert(n <= this->length);
+ assert(offset <= this->length - n);
+ if (ABSL_PREDICT_FALSE(n == 0)) return nullptr;
+
+ CordRepBtree* node = this;
+ int height = node->height();
+ Position front = node->IndexOf(offset);
+ CordRep* left = node->edges_[front.index];
+ while (front.n + n <= left->length) {
+ if (--height < 0) return MakeSubstring(CordRep::Ref(left), front.n, n);
+ node = left->btree();
+ front = node->IndexOf(front.n);
+ left = node->edges_[front.index];
+ }
+
+ const Position back = node->IndexBefore(front, n);
+ CordRep* const right = node->edges_[back.index];
+ assert(back.index > front.index);
+
+ // Get partial suffix and prefix entries.
+ CopyResult prefix;
+ CopyResult suffix;
+ if (height > 0) {
+ // Copy prefix and suffix of the boundary nodes.
+ prefix = left->btree()->CopySuffix(front.n);
+ suffix = right->btree()->CopyPrefix(back.n);
+
+ // If there is an edge between the prefix and suffix edges, then the tree
+ // must remain at its previous (full) height. If we have no edges between
+ // prefix and suffix edges, then the tree must be as high as either the
+ // suffix or prefix edges (which are collapsed to their minimum heights).
+ if (front.index + 1 == back.index) {
+ height = (std::max)(prefix.height, suffix.height) + 1;
+ }
+
+ // Raise prefix and suffixes to the new tree height.
+ for (int h = prefix.height + 1; h < height; ++h) {
+ prefix.edge = CordRepBtree::New(prefix.edge);
+ }
+ for (int h = suffix.height + 1; h < height; ++h) {
+ suffix.edge = CordRepBtree::New(suffix.edge);
+ }
+ } else {
+ // Leaf node, simply take substrings for prefix and suffix.
+ prefix = CopyResult{MakeSubstring(CordRep::Ref(left), front.n), -1};
+ suffix = CopyResult{MakeSubstring(CordRep::Ref(right), 0, back.n), -1};
+ }
+
+ // Compose resulting tree.
+ CordRepBtree* sub = CordRepBtree::New(height);
+ size_t end = 0;
+ sub->edges_[end++] = prefix.edge;
+ for (CordRep* r : node->Edges(front.index + 1, back.index)) {
+ sub->edges_[end++] = CordRep::Ref(r);
+ }
+ sub->edges_[end++] = suffix.edge;
+ sub->set_end(end);
+ sub->length = n;
+ return AssertValid(sub);
+}
+
+CordRepBtree* CordRepBtree::MergeTrees(CordRepBtree* left,
+ CordRepBtree* right) {
+ return left->height() >= right->height() ? Merge<kBack>(left, right)
+ : Merge<kFront>(right, left);
+}
+
+bool CordRepBtree::IsFlat(y_absl::string_view* fragment) const {
+ if (height() == 0 && size() == 1) {
+ if (fragment) *fragment = Data(begin());
+ return true;
+ }
+ return false;
+}
+
+bool CordRepBtree::IsFlat(size_t offset, const size_t n,
+ y_absl::string_view* fragment) const {
+ assert(n <= this->length);
+ assert(offset <= this->length - n);
+ if (ABSL_PREDICT_FALSE(n == 0)) return false;
+ int height = this->height();
+ const CordRepBtree* node = this;
+ for (;;) {
+ const Position front = node->IndexOf(offset);
+ const CordRep* edge = node->Edge(front.index);
+ if (edge->length < front.n + n) return false;
+ if (--height < 0) {
+ if (fragment) *fragment = EdgeData(edge).substr(front.n, n);
+ return true;
+ }
+ offset = front.n;
+ node = node->Edge(front.index)->btree();
+ }
+}
+
+char CordRepBtree::GetCharacter(size_t offset) const {
+ assert(offset < length);
+ const CordRepBtree* node = this;
+ int height = node->height();
+ for (;;) {
+ Position front = node->IndexOf(offset);
+ if (--height < 0) return node->Data(front.index)[front.n];
+ offset = front.n;
+ node = node->Edge(front.index)->btree();
+ }
+}
+
+Span<char> CordRepBtree::GetAppendBufferSlow(size_t size) {
+ // The inlined version in `GetAppendBuffer()` deals with all heights <= 3.
+ assert(height() >= 4);
+ assert(refcount.IsMutable());
+
+ // Build a stack of nodes we may potentially need to update if we find a
+ // non-shared FLAT with capacity at the leaf level.
+ const int depth = height();
+ CordRepBtree* node = this;
+ CordRepBtree* stack[kMaxDepth];
+ for (int i = 0; i < depth; ++i) {
+ node = node->Edge(kBack)->btree();
+ if (!node->refcount.IsMutable()) return {};
+ stack[i] = node;
+ }
+
+ // Must be a privately owned, mutable flat.
+ CordRep* const edge = node->Edge(kBack);
+ if (!edge->refcount.IsMutable() || edge->tag < FLAT) return {};
+
+ // Must have capacity.
+ const size_t avail = edge->flat()->Capacity() - edge->length;
+ if (avail == 0) return {};
+
+ // Build span on remaining capacity.
+ size_t delta = (std::min)(size, avail);
+ Span<char> span = {edge->flat()->Data() + edge->length, delta};
+ edge->length += delta;
+ this->length += delta;
+ for (int i = 0; i < depth; ++i) {
+ stack[i]->length += delta;
+ }
+ return span;
+}
+
+CordRepBtree* CordRepBtree::CreateSlow(CordRep* rep) {
+ if (rep->IsBtree()) return rep->btree();
+
+ CordRepBtree* node = nullptr;
+ auto consume = [&node](CordRep* r, size_t offset, size_t length) {
+ r = MakeSubstring(r, offset, length);
+ if (node == nullptr) {
+ node = New(r);
+ } else {
+ node = CordRepBtree::AddCordRep<kBack>(node, r);
+ }
+ };
+ Consume(rep, consume);
+ return node;
+}
+
+CordRepBtree* CordRepBtree::AppendSlow(CordRepBtree* tree, CordRep* rep) {
+ if (ABSL_PREDICT_TRUE(rep->IsBtree())) {
+ return MergeTrees(tree, rep->btree());
+ }
+ auto consume = [&tree](CordRep* r, size_t offset, size_t length) {
+ r = MakeSubstring(r, offset, length);
+ tree = CordRepBtree::AddCordRep<kBack>(tree, r);
+ };
+ Consume(rep, consume);
+ return tree;
+}
+
+CordRepBtree* CordRepBtree::PrependSlow(CordRepBtree* tree, CordRep* rep) {
+ if (ABSL_PREDICT_TRUE(rep->IsBtree())) {
+ return MergeTrees(rep->btree(), tree);
+ }
+ auto consume = [&tree](CordRep* r, size_t offset, size_t length) {
+ r = MakeSubstring(r, offset, length);
+ tree = CordRepBtree::AddCordRep<kFront>(tree, r);
+ };
+ ReverseConsume(rep, consume);
+ return tree;
+}
+
+CordRepBtree* CordRepBtree::Append(CordRepBtree* tree, y_absl::string_view data,
+ size_t extra) {
+ return CordRepBtree::AddData<kBack>(tree, data, extra);
+}
+
+CordRepBtree* CordRepBtree::Prepend(CordRepBtree* tree, y_absl::string_view data,
+ size_t extra) {
+ return CordRepBtree::AddData<kFront>(tree, data, extra);
+}
+
+template CordRepBtree* CordRepBtree::AddCordRep<kFront>(CordRepBtree* tree,
+ CordRep* rep);
+template CordRepBtree* CordRepBtree::AddCordRep<kBack>(CordRepBtree* tree,
+ CordRep* rep);
+template CordRepBtree* CordRepBtree::AddData<kFront>(CordRepBtree* tree,
+ y_absl::string_view data,
+ size_t extra);
+template CordRepBtree* CordRepBtree::AddData<kBack>(CordRepBtree* tree,
+ y_absl::string_view data,
+ size_t extra);
+
+void CordRepBtree::Rebuild(CordRepBtree** stack, CordRepBtree* tree,
+ bool consume) {
+ bool owned = consume && tree->refcount.IsOne();
+ if (tree->height() == 0) {
+ for (CordRep* edge : tree->Edges()) {
+ if (!owned) edge = CordRep::Ref(edge);
+ size_t height = 0;
+ size_t length = edge->length;
+ CordRepBtree* node = stack[0];
+ OpResult result = node->AddEdge<kBack>(true, edge, length);
+ while (result.action == CordRepBtree::kPopped) {
+ stack[height] = result.tree;
+ if (stack[++height] == nullptr) {
+ result.action = CordRepBtree::kSelf;
+ stack[height] = CordRepBtree::New(node, result.tree);
+ } else {
+ node = stack[height];
+ result = node->AddEdge<kBack>(true, result.tree, length);
+ }
+ }
+ while (stack[++height] != nullptr) {
+ stack[height]->length += length;
+ }
+ }
+ } else {
+ for (CordRep* rep : tree->Edges()) {
+ Rebuild(stack, rep->btree(), owned);
+ }
+ }
+ if (consume) {
+ if (owned) {
+ CordRepBtree::Delete(tree);
+ } else {
+ CordRepBtree::Unref(tree);
+ }
+ }
+}
+
+CordRepBtree* CordRepBtree::Rebuild(CordRepBtree* tree) {
+ // Set up initial stack with empty leaf node.
+ CordRepBtree* node = CordRepBtree::New();
+ CordRepBtree* stack[CordRepBtree::kMaxDepth + 1] = {node};
+
+ // Recursively build the tree, consuming the input tree.
+ Rebuild(stack, tree, /* consume reference */ true);
+
+ // Return top most node
+ for (CordRepBtree* parent : stack) {
+ if (parent == nullptr) return node;
+ node = parent;
+ }
+
+ // Unreachable
+ assert(false);
+ return nullptr;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.h
index 3ad8097cc8..b60358ab9e 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree.h
@@ -1,939 +1,939 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
-#define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
-
-#include <cassert>
-#include <cstdint>
-#include <iosfwd>
-
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/raw_logging.h"
-#include "y_absl/base/optimization.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_flat.h"
-#include "y_absl/strings/string_view.h"
-#include "y_absl/types/span.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-class CordRepBtreeNavigator;
-
-// CordRepBtree is as the name implies a btree implementation of a Cordrep tree.
-// Data is stored at the leaf level only, non leaf nodes contain down pointers
-// only. Allowed types of data edges are FLAT, EXTERNAL and SUBSTRINGs of FLAT
-// or EXTERNAL nodes. The implementation allows for data to be added to either
-// end of the tree only, it does not provide any 'insert' logic. This has the
-// benefit that we can expect good fill ratios: all nodes except the outer
-// 'legs' will have 100% fill ratios for trees built using Append/Prepend
-// methods. Merged trees will typically have a fill ratio well above 50% as in a
-// similar fashion, one side of the merged tree will typically have a 100% fill
-// ratio, and the 'open' end will average 50%. All operations are O(log(n)) or
-// better, and the tree never needs balancing.
-//
-// All methods accepting a CordRep* or CordRepBtree* adopt a reference on that
-// input unless explicitly stated otherwise. All functions returning a CordRep*
-// or CordRepBtree* instance transfer a reference back to the caller.
-// Simplified, callers both 'donate' and 'consume' a reference count on each
-// call, simplifying the API. An example of building a tree:
-//
-// CordRepBtree* tree = CordRepBtree::Create(MakeFlat("Hello"));
-// tree = CordRepBtree::Append(tree, MakeFlat("world"));
-//
-// In the above example, all inputs are consumed, making each call affecting
-// `tree` reference count neutral. The returned `tree` value can be different
-// from the input if the input is shared with other threads, or if the tree
-// grows in height, but callers typically never have to concern themselves with
-// that and trust that all methods DTRT at all times.
-class CordRepBtree : public CordRep {
- public:
- // EdgeType identifies `front` and `back` enum values.
- // Various implementations in CordRepBtree such as `Add` and `Edge` are
- // generic and templated on operating on either of the boundary edges.
- // For more information on the possible edges contained in a CordRepBtree
- // instance see the documentation for `edges_`.
- enum class EdgeType { kFront, kBack };
-
- // Convenience constants into `EdgeType`
- static constexpr EdgeType kFront = EdgeType::kFront;
- static constexpr EdgeType kBack = EdgeType::kBack;
-
- // Maximum number of edges: based on experiments and performance data, we can
- // pick suitable values resulting in optimum cacheline aligned values. The
- // preferred values are based on 64-bit systems where we aim to align this
- // class onto 64 bytes, i.e.: 6 = 64 bytes, 14 = 128 bytes, etc.
- // TODO(b/192061034): experiment with alternative sizes.
- static constexpr size_t kMaxCapacity = 6;
-
- // Reasonable maximum height of the btree. We can expect a fill ratio of at
- // least 50%: trees are always expanded at the front or back. Concatenating
- // trees will then typically fold at the top most node, where the lower nodes
- // are at least at capacity on one side of joined inputs. At a lower fill
- // rate of 4 edges per node, we have capacity for ~16 million leaf nodes.
- // We will fail / abort if an application ever exceeds this height, which
- // should be extremely rare (near impossible) and be an indication of an
- // application error: we do not assume it reasonable for any application to
- // operate correctly with such monster trees.
- // Another compelling reason for the number `12` is that any contextual stack
- // required for navigation or insertion requires 12 words and 12 bytes, which
- // fits inside 2 cache lines with some room to spare, and is reasonable as a
- // local stack variable compared to Cord's current near 400 bytes stack use.
- // The maximum `height` value of a node is then `kMaxDepth - 1` as node height
- // values start with a value of 0 for leaf nodes.
- static constexpr int kMaxDepth = 12;
- static constexpr int kMaxHeight = kMaxDepth - 1;
-
- // `Action` defines the action for unwinding changes done at the btree's leaf
- // level that need to be propagated up to the parent node(s). Each operation
- // on a node has an effect / action defined as follows:
- // - kSelf
- // The operation (add / update, etc) was performed directly on the node as
- // the node is private to the current thread (i.e.: not shared directly or
- // indirectly through a refcount > 1). Changes can be propagated directly to
- // all parent nodes as all parent nodes are also then private to the current
- // thread.
- // - kCopied
- // The operation (add / update, etc) was performed on a copy of the original
- // node, as the node is (potentially) directly or indirectly shared with
- // other threads. Changes need to be propagated into the parent nodes where
- // the old down pointer must be unreffed and replaced with this new copy.
- // Such changes to parent nodes may themselves require a copy if the parent
- // node is also shared. A kCopied action can propagate all the way to the
- // top node where we then must unref the `tree` input provided by the
- // caller, and return the new copy.
- // - kPopped
- // The operation (typically add) could not be satisfied due to insufficient
- // capacity in the targeted node, and a new 'leg' was created that needs to
- // be added into the parent node. For example, adding a FLAT inside a leaf
- // node that is at capacity will create a new leaf node containing that
- // FLAT, that needs to be 'popped' up the btree. Such 'pop' actions can
- // cascade up the tree if parent nodes are also at capacity. A 'Popped'
- // action propagating all the way to the top of the tree will result in
- // the tree becoming one level higher than the current tree through a final
- // `CordRepBtree::New(tree, popped)` call, resulting in a new top node
- // referencing the old tree and the new (fully popped upwards) 'leg'.
- enum Action { kSelf, kCopied, kPopped };
-
- // Result of an operation on a node. See the `Action` enum for details.
- struct OpResult {
- CordRepBtree* tree;
- Action action;
- };
-
- // Return value of the CopyPrefix and CopySuffix methods which can
- // return a node or data edge at any height inside the tree.
- // A height of 0 defines the lowest (leaf) node, a height of -1 identifies
- // `edge` as being a plain data node: EXTERNAL / FLAT or SUBSTRING thereof.
- struct CopyResult {
- CordRep* edge;
- int height;
- };
-
- // Logical position inside a node:
- // - index: index of the edge.
- // - n: size or offset value depending on context.
- struct Position {
- size_t index;
- size_t n;
- };
-
- // Creates a btree from the given input. Adopts a ref of `rep`.
- // If the input `rep` is itself a btree, i.e., `IsBtree()`, then this
- // function immediately returns `rep->btree()`. If the input is a valid data
- // edge (see IsDataEdge()), then a new leaf node is returned containing `rep`
- // as the sole data edge. Else, the input is assumed to be a (legacy) concat
- // tree, and the input is consumed and transformed into a btree().
- static CordRepBtree* Create(CordRep* rep);
-
- // Destroys the provided tree. Should only be called by cord internal API's,
- // typically after a ref_count.Decrement() on the last reference count.
- static void Destroy(CordRepBtree* tree);
-
- // Use CordRep::Unref() as we overload for y_absl::Span<CordRep* const>.
- using CordRep::Unref;
-
- // Unrefs all edges in `edges` which are assumed to be 'likely one'.
- static void Unref(y_absl::Span<CordRep* const> edges);
-
- // Appends / Prepends an existing CordRep instance to this tree.
- // The below methods accept three types of input:
- // 1) `rep` is a data node (See `IsDataNode` for valid data edges).
- // `rep` is appended or prepended to this tree 'as is'.
- // 2) `rep` is a BTREE.
- // `rep` is merged into `tree` respecting the Append/Prepend order.
- // 3) `rep` is some other (legacy) type.
- // `rep` is converted in place and added to `tree`
- // Requires `tree` and `rep` to be not null.
- static CordRepBtree* Append(CordRepBtree* tree, CordRep* rep);
- static CordRepBtree* Prepend(CordRepBtree* tree, CordRep* rep);
-
- // Append/Prepend the data in `data` to this tree.
- // The `extra` parameter defines how much extra capacity should be allocated
- // for any additional FLAT being allocated. This is an optimization hint from
- // the caller. For example, a caller may need to add 2 string_views of data
- // "abc" and "defghi" which are not consecutive. The caller can in this case
- // invoke `AddData(tree, "abc", 6)`, and any newly added flat is allocated
- // where possible with at least 6 bytes of extra capacity beyond `length`.
- // This helps avoiding data getting fragmented over multiple flats.
- // There is no limit on the size of `data`. If `data` can not be stored inside
- // a single flat, then the function will iteratively add flats until all data
- // has been consumed and appended or prepended to the tree.
- static CordRepBtree* Append(CordRepBtree* tree, string_view data,
- size_t extra = 0);
- static CordRepBtree* Prepend(CordRepBtree* tree, string_view data,
- size_t extra = 0);
-
- // Returns a new tree, containing `n` bytes of data from this instance
- // starting at offset `offset`. Where possible, the returned tree shares
- // (re-uses) data edges and nodes with this instance to minimize the
- // combined memory footprint of both trees.
- // Requires `offset + n <= length`. Returns `nullptr` if `n` is zero.
- CordRep* SubTree(size_t offset, size_t n);
-
- // Removes `n` trailing bytes from `tree`, and returns the resulting tree
- // or data edge. Returns `tree` if n is zero, and nullptr if n == length.
- // This function is logically identical to:
- // result = tree->SubTree(0, tree->length - n);
- // Unref(tree);
- // return result;
- // However, the actual implementation will as much as possible perform 'in
- // place' modifications on the tree on all nodes and edges that are mutable.
- // For example, in a fully privately owned tree with the last edge being a
- // flat of length 12, RemoveSuffix(1) will simply set the length of that data
- // edge to 11, and reduce the length of all nodes on the edge path by 1.
- static CordRep* RemoveSuffix(CordRepBtree* tree, size_t n);
-
- // Returns the character at the given offset.
- char GetCharacter(size_t offset) const;
-
- // Returns true if this node holds a single data edge, and if so, sets
- // `fragment` to reference the contained data. `fragment` is an optional
- // output parameter and allowed to be null.
- bool IsFlat(y_absl::string_view* fragment) const;
-
- // Returns true if the data of `n` bytes starting at offset `offset`
- // is contained in a single data edge, and if so, sets fragment to reference
- // the contained data. `fragment` is an optional output parameter and allowed
- // to be null.
- bool IsFlat(size_t offset, size_t n, y_absl::string_view* fragment) const;
-
- // Returns a span (mutable range of bytes) of up to `size` bytes into the
- // last FLAT data edge inside this tree under the following conditions:
- // - none of the nodes down into the FLAT node are shared.
- // - the last data edge in this tree is a non-shared FLAT.
- // - the referenced FLAT has additional capacity available.
- // If all these conditions are met, a non-empty span is returned, and the
- // length of the flat node and involved tree nodes have been increased by
- // `span.length()`. The caller is responsible for immediately assigning values
- // to all uninitialized data reference by the returned span.
- // Requires `this->refcount.IsMutable()`: this function forces the
- // caller to do this fast path check on the top level node, as this is the
- // most commonly shared node of a cord tree.
- Span<char> GetAppendBuffer(size_t size);
-
- // Returns the `height` of the tree. The height of a tree is limited to
- // kMaxHeight. `height` is implemented as an `int` as in some places we
- // use negative (-1) values for 'data edges'.
- int height() const { return static_cast<int>(storage[0]); }
-
- // Properties: begin, back, end, front/back boundary indexes.
- size_t begin() const { return static_cast<size_t>(storage[1]); }
- size_t back() const { return static_cast<size_t>(storage[2]) - 1; }
- size_t end() const { return static_cast<size_t>(storage[2]); }
- size_t index(EdgeType edge) const {
- return edge == kFront ? begin() : back();
- }
-
- // Properties: size and capacity.
- // `capacity` contains the current capacity of this instance, where
- // `kMaxCapacity` contains the maximum capacity of a btree node.
- // For now, `capacity` and `kMaxCapacity` return the same value, but this may
- // change in the future if we see benefit in dynamically sizing 'small' nodes
- // to 'large' nodes for large data trees.
- size_t size() const { return end() - begin(); }
- size_t capacity() const { return kMaxCapacity; }
-
- // Edge access
- inline CordRep* Edge(size_t index) const;
- inline CordRep* Edge(EdgeType edge_type) const;
- inline y_absl::Span<CordRep* const> Edges() const;
- inline y_absl::Span<CordRep* const> Edges(size_t begin, size_t end) const;
-
- // Returns reference to the data edge at `index`.
- // Requires this instance to be a leaf node, and `index` to be valid index.
- inline y_absl::string_view Data(size_t index) const;
-
- static const char* EdgeDataPtr(const CordRep* r);
- static y_absl::string_view EdgeData(const CordRep* r);
-
- // Returns true if the provided rep is a FLAT, EXTERNAL or a SUBSTRING node
- // holding a FLAT or EXTERNAL child rep.
- static bool IsDataEdge(const CordRep* rep);
-
- // Diagnostics: returns true if `tree` is valid and internally consistent.
- // If `shallow` is false, then the provided top level node and all child nodes
- // below it are recursively checked. If `shallow` is true, only the provided
- // node in `tree` and the cumulative length, type and height of the direct
- // child nodes of `tree` are checked. The value of `shallow` is ignored if the
- // internal `cord_btree_exhaustive_validation` diagnostics variable is true,
- // in which case the performed validations works as if `shallow` were false.
- // This function is intended for debugging and testing purposes only.
- static bool IsValid(const CordRepBtree* tree, bool shallow = false);
-
- // Diagnostics: asserts that the provided tree is valid.
- // `AssertValid()` performs a shallow validation by default. `shallow` can be
- // set to false in which case an exhaustive validation is performed. This
- // function is implemented in terms of calling `IsValid()` and asserting the
- // return value to be true. See `IsValid()` for more information.
- // This function is intended for debugging and testing purposes only.
- static CordRepBtree* AssertValid(CordRepBtree* tree, bool shallow = true);
- static const CordRepBtree* AssertValid(const CordRepBtree* tree,
- bool shallow = true);
-
- // Diagnostics: dump the contents of this tree to `stream`.
- // This function is intended for debugging and testing purposes only.
- static void Dump(const CordRep* rep, std::ostream& stream);
- static void Dump(const CordRep* rep, y_absl::string_view label,
- std::ostream& stream);
- static void Dump(const CordRep* rep, y_absl::string_view label,
- bool include_contents, std::ostream& stream);
-
- // Adds the edge `edge` to this node if possible. `owned` indicates if the
- // current node is potentially shared or not with other threads. Returns:
- // - {kSelf, <this>}
- // The edge was directly added to this node.
- // - {kCopied, <node>}
- // The edge was added to a copy of this node.
- // - {kPopped, New(edge, height())}
- // A new leg with the edge was created as this node has no extra capacity.
- template <EdgeType edge_type>
- inline OpResult AddEdge(bool owned, CordRep* edge, size_t delta);
-
- // Replaces the front or back edge with the provided new edge. Returns:
- // - {kSelf, <this>}
- // The edge was directly set in this node. The old edge is unreffed.
- // - {kCopied, <node>}
- // A copy of this node was created with the new edge value.
- // In both cases, the function adopts a reference on `edge`.
- template <EdgeType edge_type>
- OpResult SetEdge(bool owned, CordRep* edge, size_t delta);
-
- // Creates a new empty node at the specified height.
- static CordRepBtree* New(int height = 0);
-
- // Creates a new node containing `rep`, with the height being computed
- // automatically based on the type of `rep`.
- static CordRepBtree* New(CordRep* rep);
-
- // Creates a new node containing both `front` and `back` at height
- // `front.height() + 1`. Requires `back.height() == front.height()`.
- static CordRepBtree* New(CordRepBtree* front, CordRepBtree* back);
-
- // Creates a fully balanced tree from the provided tree by rebuilding a new
- // tree from all data edges in the input. This function is automatically
- // invoked internally when the tree exceeds the maximum height.
- static CordRepBtree* Rebuild(CordRepBtree* tree);
-
- private:
- CordRepBtree() = default;
- ~CordRepBtree() = default;
-
- // Initializes the main properties `tag`, `begin`, `end`, `height`.
- inline void InitInstance(int height, size_t begin = 0, size_t end = 0);
-
- // Direct property access begin / end
- void set_begin(size_t begin) { storage[1] = static_cast<uint8_t>(begin); }
- void set_end(size_t end) { storage[2] = static_cast<uint8_t>(end); }
-
- // Decreases the value of `begin` by `n`, and returns the new value. Notice
- // how this returns the new value unlike atomic::fetch_add which returns the
- // old value. This is because this is used to prepend edges at 'begin - 1'.
- size_t sub_fetch_begin(size_t n) {
- storage[1] -= static_cast<uint8_t>(n);
- return storage[1];
- }
-
- // Increases the value of `end` by `n`, and returns the previous value. This
- // function is typically used to append edges at 'end'.
- size_t fetch_add_end(size_t n) {
- const uint8_t current = storage[2];
- storage[2] = static_cast<uint8_t>(current + n);
- return current;
- }
-
- // Returns the index of the last edge starting on, or before `offset`, with
- // `n` containing the relative offset of `offset` inside that edge.
- // Requires `offset` < length.
- Position IndexOf(size_t offset) const;
-
- // Returns the index of the last edge starting before `offset`, with `n`
- // containing the relative offset of `offset` inside that edge.
- // This function is useful to find the edges for some span of bytes ending at
- // `offset` (i.e., `n` bytes). For example:
- //
- // Position pos = IndexBefore(n)
- // edges = Edges(begin(), pos.index) // All full edges (may be empty)
- // last = Sub(Edge(pos.index), 0, pos.n) // Last partial edge (may be empty)
- //
- // Requires 0 < `offset` <= length.
- Position IndexBefore(size_t offset) const;
-
- // Returns the index of the edge ending at (or on) length `length`, and the
- // number of bytes inside that edge up to `length`. For example, if we have a
- // Node with 2 edges, one of 10 and one of 20 long, then IndexOfLength(27)
- // will return {1, 17}, and IndexOfLength(10) will return {0, 10}.
- Position IndexOfLength(size_t n) const;
-
- // Identical to the above function except starting from the position `front`.
- // This function is equivalent to `IndexBefore(front.n + offset)`, with
- // the difference that this function is optimized to start at `front.index`.
- Position IndexBefore(Position front, size_t offset) const;
-
- // Returns the index of the edge directly beyond the edge containing offset
- // `offset`, with `n` containing the distance of that edge from `offset`.
- // This function is useful for iteratively finding suffix nodes and remaining
- // partial bytes in left-most suffix nodes as for example in CopySuffix.
- // Requires `offset` < length.
- Position IndexBeyond(size_t offset) const;
-
- // Destruction
- static void DestroyLeaf(CordRepBtree* tree, size_t begin, size_t end);
- static void DestroyNonLeaf(CordRepBtree* tree, size_t begin, size_t end);
- static void DestroyTree(CordRepBtree* tree, size_t begin, size_t end);
- static void Delete(CordRepBtree* tree) { delete tree; }
-
- // Creates a new leaf node containing as much data as possible from `data`.
- // The data is added either forwards or reversed depending on `edge_type`.
- // Callers must check the length of the returned node to determine if all data
- // was copied or not.
- // See the `Append/Prepend` function for the meaning and purpose of `extra`.
- template <EdgeType edge_type>
- static CordRepBtree* NewLeaf(y_absl::string_view data, size_t extra);
-
- // Creates a raw copy of this Btree node, copying all properties, but
- // without adding any references to existing edges.
- CordRepBtree* CopyRaw() const;
-
- // Creates a full copy of this Btree node, adding a reference on all edges.
- CordRepBtree* Copy() const;
-
- // Creates a partial copy of this Btree node, copying all edges up to `end`,
- // adding a reference on each copied edge, and sets the length of the newly
- // created copy to `new_length`.
- CordRepBtree* CopyBeginTo(size_t end, size_t new_length) const;
-
- // Returns a tree containing the edges [tree->begin(), end) and length
- // of `new_length`. This method consumes a reference on the provided
- // tree, and logically performs the following operation:
- // result = tree->CopyBeginTo(end, new_length);
- // CordRep::Unref(tree);
- // return result;
- static CordRepBtree* ConsumeBeginTo(CordRepBtree* tree, size_t end,
- size_t new_length);
-
- // Creates a partial copy of this Btree node, copying all edges starting at
- // `begin`, adding a reference on each copied edge, and sets the length of
- // the newly created copy to `new_length`.
- CordRepBtree* CopyToEndFrom(size_t begin, size_t new_length) const;
-
- // Extracts and returns the front edge from the provided tree.
- // This method consumes a reference on the provided tree, and logically
- // performs the following operation:
- // edge = CordRep::Ref(tree->Edge(kFront));
- // CordRep::Unref(tree);
- // return edge;
- static CordRep* ExtractFront(CordRepBtree* tree);
-
- // Returns a tree containing the result of appending `right` to `left`.
- static CordRepBtree* MergeTrees(CordRepBtree* left, CordRepBtree* right);
-
- // Fallback functions for `Create()`, `Append()` and `Prepend()` which
- // deal with legacy / non conforming input, i.e.: CONCAT trees.
- static CordRepBtree* CreateSlow(CordRep* rep);
- static CordRepBtree* AppendSlow(CordRepBtree*, CordRep* rep);
- static CordRepBtree* PrependSlow(CordRepBtree*, CordRep* rep);
-
- // Recursively rebuilds `tree` into `stack`. If 'consume` is set to true, the
- // function will consume a reference on `tree`. `stack` is a null terminated
- // array containing the new tree's state, with the current leaf node at
- // stack[0], and parent nodes above that, or null for 'top of tree'.
- static void Rebuild(CordRepBtree** stack, CordRepBtree* tree, bool consume);
-
- // Aligns existing edges to start at index 0, to allow for a new edge to be
- // added to the back of the current edges.
- inline void AlignBegin();
-
- // Aligns existing edges to end at `capacity`, to allow for a new edge to be
- // added in front of the current edges.
- inline void AlignEnd();
-
- // Adds the provided edge to this node.
- // Requires this node to have capacity for the edge. Realigns / moves
- // existing edges as needed to prepend or append the new edge.
- template <EdgeType edge_type>
- inline void Add(CordRep* rep);
-
- // Adds the provided edges to this node.
- // Requires this node to have capacity for the edges. Realigns / moves
- // existing edges as needed to prepend or append the new edges.
- template <EdgeType edge_type>
- inline void Add(y_absl::Span<CordRep* const>);
-
- // Adds data from `data` to this node until either all data has been consumed,
- // or there is no more capacity for additional flat nodes inside this node.
- // Requires the current node to be a leaf node, data to be non empty, and the
- // current node to have capacity for at least one more data edge.
- // Returns any remaining data from `data` that was not added, which is
- // depending on the edge type (front / back) either the remaining prefix of
- // suffix of the input.
- // See the `Append/Prepend` function for the meaning and purpose of `extra`.
- template <EdgeType edge_type>
- y_absl::string_view AddData(y_absl::string_view data, size_t extra);
-
- // Replace the front or back edge with the provided value.
- // Adopts a reference on `edge` and unrefs the old edge.
- template <EdgeType edge_type>
- inline void SetEdge(CordRep* edge);
-
- // Returns a partial copy of the current tree containing the first `n` bytes
- // of data. `CopyResult` contains both the resulting edge and its height. The
- // resulting tree may be less high than the current tree, or even be a single
- // matching data edge if `allow_folding` is set to true.
- // For example, if `n == 1`, then the result will be the single data edge, and
- // height will be set to -1 (one below the owning leaf node). If n == 0, this
- // function returns null. Requires `n <= length`
- CopyResult CopyPrefix(size_t n, bool allow_folding = true);
-
- // Returns a partial copy of the current tree containing all data starting
- // after `offset`. `CopyResult` contains both the resulting edge and its
- // height. The resulting tree may be less high than the current tree, or even
- // be a single matching data edge. For example, if `n == length - 1`, then the
- // result will be a single data edge, and height will be set to -1 (one below
- // the owning leaf node).
- // Requires `offset < length`
- CopyResult CopySuffix(size_t offset);
-
- // Returns a OpResult value of {this, kSelf} or {Copy(), kCopied}
- // depending on the value of `owned`.
- inline OpResult ToOpResult(bool owned);
-
- // Adds `rep` to the specified tree, returning the modified tree.
- template <EdgeType edge_type>
- static CordRepBtree* AddCordRep(CordRepBtree* tree, CordRep* rep);
-
- // Adds `data` to the specified tree, returning the modified tree.
- // See the `Append/Prepend` function for the meaning and purpose of `extra`.
- template <EdgeType edge_type>
- static CordRepBtree* AddData(CordRepBtree* tree, y_absl::string_view data,
- size_t extra = 0);
-
- // Merges `src` into `dst` with `src` being added either before (kFront) or
- // after (kBack) `dst`. Requires the height of `dst` to be greater than or
- // equal to the height of `src`.
- template <EdgeType edge_type>
- static CordRepBtree* Merge(CordRepBtree* dst, CordRepBtree* src);
-
- // Fallback version of GetAppendBuffer for large trees: GetAppendBuffer()
- // implements an inlined version for trees of limited height (3 levels),
- // GetAppendBufferSlow implements the logic for large trees.
- Span<char> GetAppendBufferSlow(size_t size);
-
- // `edges_` contains all edges starting from this instance.
- // These are explicitly `child` edges only, a cord btree (or any cord tree in
- // that respect) does not store `parent` pointers anywhere: multiple trees /
- // parents can reference the same shared child edge. The type of these edges
- // depends on the height of the node. `Leaf nodes` (height == 0) contain `data
- // edges` (external or flat nodes, or sub-strings thereof). All other nodes
- // (height > 0) contain pointers to BTREE nodes with a height of `height - 1`.
- CordRep* edges_[kMaxCapacity];
-
- friend class CordRepBtreeTestPeer;
- friend class CordRepBtreeNavigator;
-};
-
-inline CordRepBtree* CordRep::btree() {
- assert(IsBtree());
- return static_cast<CordRepBtree*>(this);
-}
-
-inline const CordRepBtree* CordRep::btree() const {
- assert(IsBtree());
- return static_cast<const CordRepBtree*>(this);
-}
-
-inline void CordRepBtree::InitInstance(int height, size_t begin, size_t end) {
- tag = BTREE;
- storage[0] = static_cast<uint8_t>(height);
- storage[1] = static_cast<uint8_t>(begin);
- storage[2] = static_cast<uint8_t>(end);
-}
-
-inline CordRep* CordRepBtree::Edge(size_t index) const {
- assert(index >= begin());
- assert(index < end());
- return edges_[index];
-}
-
-inline CordRep* CordRepBtree::Edge(EdgeType edge_type) const {
- return edges_[edge_type == kFront ? begin() : back()];
-}
-
-inline y_absl::Span<CordRep* const> CordRepBtree::Edges() const {
- return {edges_ + begin(), size()};
-}
-
-inline y_absl::Span<CordRep* const> CordRepBtree::Edges(size_t begin,
- size_t end) const {
- assert(begin <= end);
- assert(begin >= this->begin());
- assert(end <= this->end());
- return {edges_ + begin, static_cast<size_t>(end - begin)};
-}
-
-inline const char* CordRepBtree::EdgeDataPtr(const CordRep* r) {
- assert(IsDataEdge(r));
- size_t offset = 0;
- if (r->tag == SUBSTRING) {
- offset = r->substring()->start;
- r = r->substring()->child;
- }
- return (r->tag >= FLAT ? r->flat()->Data() : r->external()->base) + offset;
-}
-
-inline y_absl::string_view CordRepBtree::EdgeData(const CordRep* r) {
- return y_absl::string_view(EdgeDataPtr(r), r->length);
-}
-
-inline y_absl::string_view CordRepBtree::Data(size_t index) const {
- assert(height() == 0);
- return EdgeData(Edge(index));
-}
-
-inline bool CordRepBtree::IsDataEdge(const CordRep* rep) {
- // The fast path is that `rep` is an EXTERNAL or FLAT node, making the below
- // if a single, well predicted branch. We then repeat the FLAT or EXTERNAL
- // check in the slow path the SUBSTRING check to optimize for the hot path.
- if (rep->tag == EXTERNAL || rep->tag >= FLAT) return true;
- if (rep->tag == SUBSTRING) rep = rep->substring()->child;
- return rep->tag == EXTERNAL || rep->tag >= FLAT;
-}
-
-inline CordRepBtree* CordRepBtree::New(int height) {
- CordRepBtree* tree = new CordRepBtree;
- tree->length = 0;
- tree->InitInstance(height);
- return tree;
-}
-
-inline CordRepBtree* CordRepBtree::New(CordRep* rep) {
- CordRepBtree* tree = new CordRepBtree;
- int height = rep->IsBtree() ? rep->btree()->height() + 1 : 0;
- tree->length = rep->length;
- tree->InitInstance(height, /*begin=*/0, /*end=*/1);
- tree->edges_[0] = rep;
- return tree;
-}
-
-inline CordRepBtree* CordRepBtree::New(CordRepBtree* front,
- CordRepBtree* back) {
- assert(front->height() == back->height());
- CordRepBtree* tree = new CordRepBtree;
- tree->length = front->length + back->length;
- tree->InitInstance(front->height() + 1, /*begin=*/0, /*end=*/2);
- tree->edges_[0] = front;
- tree->edges_[1] = back;
- return tree;
-}
-
-inline void CordRepBtree::DestroyTree(CordRepBtree* tree, size_t begin,
- size_t end) {
- if (tree->height() == 0) {
- DestroyLeaf(tree, begin, end);
- } else {
- DestroyNonLeaf(tree, begin, end);
- }
-}
-
-inline void CordRepBtree::Destroy(CordRepBtree* tree) {
- DestroyTree(tree, tree->begin(), tree->end());
-}
-
-inline void CordRepBtree::Unref(y_absl::Span<CordRep* const> edges) {
- for (CordRep* edge : edges) {
- if (ABSL_PREDICT_FALSE(!edge->refcount.Decrement())) {
- CordRep::Destroy(edge);
- }
- }
-}
-
-inline CordRepBtree* CordRepBtree::CopyRaw() const {
- auto* tree = static_cast<CordRepBtree*>(::operator new(sizeof(CordRepBtree)));
- memcpy(static_cast<void*>(tree), this, sizeof(CordRepBtree));
- new (&tree->refcount) RefcountAndFlags;
- return tree;
-}
-
-inline CordRepBtree* CordRepBtree::Copy() const {
- CordRepBtree* tree = CopyRaw();
- for (CordRep* rep : Edges()) CordRep::Ref(rep);
- return tree;
-}
-
-inline CordRepBtree* CordRepBtree::CopyToEndFrom(size_t begin,
- size_t new_length) const {
- assert(begin >= this->begin());
- assert(begin <= this->end());
- CordRepBtree* tree = CopyRaw();
- tree->length = new_length;
- tree->set_begin(begin);
- for (CordRep* edge : tree->Edges()) CordRep::Ref(edge);
- return tree;
-}
-
-inline CordRepBtree* CordRepBtree::CopyBeginTo(size_t end,
- size_t new_length) const {
- assert(end <= capacity());
- assert(end >= this->begin());
- CordRepBtree* tree = CopyRaw();
- tree->length = new_length;
- tree->set_end(end);
- for (CordRep* edge : tree->Edges()) CordRep::Ref(edge);
- return tree;
-}
-
-inline void CordRepBtree::AlignBegin() {
- // The below code itself does not need to be fast as typically we have
- // mono-directional append/prepend calls, and `begin` / `end` are typically
- // adjusted no more than once. But we want to avoid potential register clobber
- // effects, making the compiler emit register save/store/spills, and minimize
- // the size of code.
- const size_t delta = begin();
- if (ABSL_PREDICT_FALSE(delta != 0)) {
- const size_t new_end = end() - delta;
- set_begin(0);
- set_end(new_end);
- // TODO(mvels): we can write this using 2 loads / 2 stores depending on
- // total size for the kMaxCapacity = 6 case. I.e., we can branch (switch) on
- // size, and then do overlapping load/store of up to 4 pointers (inlined as
- // XMM, YMM or ZMM load/store) and up to 2 pointers (XMM / YMM), which is a)
- // compact and b) not clobbering any registers.
- ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity);
-#ifdef __clang__
-#pragma unroll 1
-#endif
- for (size_t i = 0; i < new_end; ++i) {
- edges_[i] = edges_[i + delta];
- }
- }
-}
-
-inline void CordRepBtree::AlignEnd() {
- // See comments in `AlignBegin` for motivation on the hand-rolled for loops.
- const size_t delta = capacity() - end();
- if (delta != 0) {
- const size_t new_begin = begin() + delta;
- const size_t new_end = end() + delta;
- set_begin(new_begin);
- set_end(new_end);
- ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity);
-#ifdef __clang__
-#pragma unroll 1
-#endif
- for (size_t i = new_end - 1; i >= new_begin; --i) {
- edges_[i] = edges_[i - delta];
- }
- }
-}
-
-template <>
-inline void CordRepBtree::Add<CordRepBtree::kBack>(CordRep* rep) {
- AlignBegin();
- edges_[fetch_add_end(1)] = rep;
-}
-
-template <>
-inline void CordRepBtree::Add<CordRepBtree::kBack>(
- y_absl::Span<CordRep* const> edges) {
- AlignBegin();
- size_t new_end = end();
- for (CordRep* edge : edges) edges_[new_end++] = edge;
- set_end(new_end);
-}
-
-template <>
-inline void CordRepBtree::Add<CordRepBtree::kFront>(CordRep* rep) {
- AlignEnd();
- edges_[sub_fetch_begin(1)] = rep;
-}
-
-template <>
-inline void CordRepBtree::Add<CordRepBtree::kFront>(
- y_absl::Span<CordRep* const> edges) {
- AlignEnd();
- size_t new_begin = begin() - edges.size();
- set_begin(new_begin);
- for (CordRep* edge : edges) edges_[new_begin++] = edge;
-}
-
-template <CordRepBtree::EdgeType edge_type>
-inline void CordRepBtree::SetEdge(CordRep* edge) {
- const int idx = edge_type == kFront ? begin() : back();
- CordRep::Unref(edges_[idx]);
- edges_[idx] = edge;
-}
-
-inline CordRepBtree::OpResult CordRepBtree::ToOpResult(bool owned) {
- return owned ? OpResult{this, kSelf} : OpResult{Copy(), kCopied};
-}
-
-inline CordRepBtree::Position CordRepBtree::IndexOf(size_t offset) const {
- assert(offset < length);
- size_t index = begin();
- while (offset >= edges_[index]->length) offset -= edges_[index++]->length;
- return {index, offset};
-}
-
-inline CordRepBtree::Position CordRepBtree::IndexBefore(size_t offset) const {
- assert(offset > 0);
- assert(offset <= length);
- size_t index = begin();
- while (offset > edges_[index]->length) offset -= edges_[index++]->length;
- return {index, offset};
-}
-
-inline CordRepBtree::Position CordRepBtree::IndexBefore(Position front,
- size_t offset) const {
- size_t index = front.index;
- offset = offset + front.n;
- while (offset > edges_[index]->length) offset -= edges_[index++]->length;
- return {index, offset};
-}
-
-inline CordRepBtree::Position CordRepBtree::IndexOfLength(size_t n) const {
- assert(n <= length);
- size_t index = back();
- size_t strip = length - n;
- while (strip >= edges_[index]->length) strip -= edges_[index--]->length;
- return {index, edges_[index]->length - strip};
-}
-
-inline CordRepBtree::Position CordRepBtree::IndexBeyond(
- const size_t offset) const {
- // We need to find the edge which `starting offset` is beyond (>=)`offset`.
- // For this we can't use the `offset -= length` logic of IndexOf. Instead, we
- // track the offset of the `current edge` in `off`, which we increase as we
- // iterate over the edges until we find the matching edge.
- size_t off = 0;
- size_t index = begin();
- while (offset > off) off += edges_[index++]->length;
- return {index, off - offset};
-}
-
-inline CordRepBtree* CordRepBtree::Create(CordRep* rep) {
- if (IsDataEdge(rep)) return New(rep);
- return CreateSlow(rep);
-}
-
-inline Span<char> CordRepBtree::GetAppendBuffer(size_t size) {
- assert(refcount.IsMutable());
- CordRepBtree* tree = this;
- const int height = this->height();
- CordRepBtree* n1 = tree;
- CordRepBtree* n2 = tree;
- CordRepBtree* n3 = tree;
- switch (height) {
- case 3:
- tree = tree->Edge(kBack)->btree();
- if (!tree->refcount.IsMutable()) return {};
- n2 = tree;
- ABSL_FALLTHROUGH_INTENDED;
- case 2:
- tree = tree->Edge(kBack)->btree();
- if (!tree->refcount.IsMutable()) return {};
- n1 = tree;
- ABSL_FALLTHROUGH_INTENDED;
- case 1:
- tree = tree->Edge(kBack)->btree();
- if (!tree->refcount.IsMutable()) return {};
- ABSL_FALLTHROUGH_INTENDED;
- case 0:
- CordRep* edge = tree->Edge(kBack);
- if (!edge->refcount.IsMutable()) return {};
- if (edge->tag < FLAT) return {};
- size_t avail = edge->flat()->Capacity() - edge->length;
- if (avail == 0) return {};
- size_t delta = (std::min)(size, avail);
- Span<char> span = {edge->flat()->Data() + edge->length, delta};
- edge->length += delta;
- switch (height) {
- case 3:
- n3->length += delta;
- ABSL_FALLTHROUGH_INTENDED;
- case 2:
- n2->length += delta;
- ABSL_FALLTHROUGH_INTENDED;
- case 1:
- n1->length += delta;
- ABSL_FALLTHROUGH_INTENDED;
- case 0:
- tree->length += delta;
- return span;
- }
- break;
- }
- return GetAppendBufferSlow(size);
-}
-
-extern template CordRepBtree* CordRepBtree::AddCordRep<CordRepBtree::kBack>(
- CordRepBtree* tree, CordRep* rep);
-
-extern template CordRepBtree* CordRepBtree::AddCordRep<CordRepBtree::kFront>(
- CordRepBtree* tree, CordRep* rep);
-
-inline CordRepBtree* CordRepBtree::Append(CordRepBtree* tree, CordRep* rep) {
- if (ABSL_PREDICT_TRUE(IsDataEdge(rep))) {
- return CordRepBtree::AddCordRep<kBack>(tree, rep);
- }
- return AppendSlow(tree, rep);
-}
-
-inline CordRepBtree* CordRepBtree::Prepend(CordRepBtree* tree, CordRep* rep) {
- if (ABSL_PREDICT_TRUE(IsDataEdge(rep))) {
- return CordRepBtree::AddCordRep<kFront>(tree, rep);
- }
- return PrependSlow(tree, rep);
-}
-
-#ifdef NDEBUG
-
-inline CordRepBtree* CordRepBtree::AssertValid(CordRepBtree* tree,
- bool /* shallow */) {
- return tree;
-}
-
-inline const CordRepBtree* CordRepBtree::AssertValid(const CordRepBtree* tree,
- bool /* shallow */) {
- return tree;
-}
-
-#endif
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
+#define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
+
+#include <cassert>
+#include <cstdint>
+#include <iosfwd>
+
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/raw_logging.h"
+#include "y_absl/base/optimization.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_flat.h"
+#include "y_absl/strings/string_view.h"
+#include "y_absl/types/span.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+class CordRepBtreeNavigator;
+
+// CordRepBtree is as the name implies a btree implementation of a Cordrep tree.
+// Data is stored at the leaf level only, non leaf nodes contain down pointers
+// only. Allowed types of data edges are FLAT, EXTERNAL and SUBSTRINGs of FLAT
+// or EXTERNAL nodes. The implementation allows for data to be added to either
+// end of the tree only, it does not provide any 'insert' logic. This has the
+// benefit that we can expect good fill ratios: all nodes except the outer
+// 'legs' will have 100% fill ratios for trees built using Append/Prepend
+// methods. Merged trees will typically have a fill ratio well above 50% as in a
+// similar fashion, one side of the merged tree will typically have a 100% fill
+// ratio, and the 'open' end will average 50%. All operations are O(log(n)) or
+// better, and the tree never needs balancing.
+//
+// All methods accepting a CordRep* or CordRepBtree* adopt a reference on that
+// input unless explicitly stated otherwise. All functions returning a CordRep*
+// or CordRepBtree* instance transfer a reference back to the caller.
+// Simplified, callers both 'donate' and 'consume' a reference count on each
+// call, simplifying the API. An example of building a tree:
+//
+// CordRepBtree* tree = CordRepBtree::Create(MakeFlat("Hello"));
+// tree = CordRepBtree::Append(tree, MakeFlat("world"));
+//
+// In the above example, all inputs are consumed, making each call affecting
+// `tree` reference count neutral. The returned `tree` value can be different
+// from the input if the input is shared with other threads, or if the tree
+// grows in height, but callers typically never have to concern themselves with
+// that and trust that all methods DTRT at all times.
+class CordRepBtree : public CordRep {
+ public:
+ // EdgeType identifies `front` and `back` enum values.
+ // Various implementations in CordRepBtree such as `Add` and `Edge` are
+ // generic and templated on operating on either of the boundary edges.
+ // For more information on the possible edges contained in a CordRepBtree
+ // instance see the documentation for `edges_`.
+ enum class EdgeType { kFront, kBack };
+
+ // Convenience constants into `EdgeType`
+ static constexpr EdgeType kFront = EdgeType::kFront;
+ static constexpr EdgeType kBack = EdgeType::kBack;
+
+ // Maximum number of edges: based on experiments and performance data, we can
+ // pick suitable values resulting in optimum cacheline aligned values. The
+ // preferred values are based on 64-bit systems where we aim to align this
+ // class onto 64 bytes, i.e.: 6 = 64 bytes, 14 = 128 bytes, etc.
+ // TODO(b/192061034): experiment with alternative sizes.
+ static constexpr size_t kMaxCapacity = 6;
+
+ // Reasonable maximum height of the btree. We can expect a fill ratio of at
+ // least 50%: trees are always expanded at the front or back. Concatenating
+ // trees will then typically fold at the top most node, where the lower nodes
+ // are at least at capacity on one side of joined inputs. At a lower fill
+ // rate of 4 edges per node, we have capacity for ~16 million leaf nodes.
+ // We will fail / abort if an application ever exceeds this height, which
+ // should be extremely rare (near impossible) and be an indication of an
+ // application error: we do not assume it reasonable for any application to
+ // operate correctly with such monster trees.
+ // Another compelling reason for the number `12` is that any contextual stack
+ // required for navigation or insertion requires 12 words and 12 bytes, which
+ // fits inside 2 cache lines with some room to spare, and is reasonable as a
+ // local stack variable compared to Cord's current near 400 bytes stack use.
+ // The maximum `height` value of a node is then `kMaxDepth - 1` as node height
+ // values start with a value of 0 for leaf nodes.
+ static constexpr int kMaxDepth = 12;
+ static constexpr int kMaxHeight = kMaxDepth - 1;
+
+ // `Action` defines the action for unwinding changes done at the btree's leaf
+ // level that need to be propagated up to the parent node(s). Each operation
+ // on a node has an effect / action defined as follows:
+ // - kSelf
+ // The operation (add / update, etc) was performed directly on the node as
+ // the node is private to the current thread (i.e.: not shared directly or
+ // indirectly through a refcount > 1). Changes can be propagated directly to
+ // all parent nodes as all parent nodes are also then private to the current
+ // thread.
+ // - kCopied
+ // The operation (add / update, etc) was performed on a copy of the original
+ // node, as the node is (potentially) directly or indirectly shared with
+ // other threads. Changes need to be propagated into the parent nodes where
+ // the old down pointer must be unreffed and replaced with this new copy.
+ // Such changes to parent nodes may themselves require a copy if the parent
+ // node is also shared. A kCopied action can propagate all the way to the
+ // top node where we then must unref the `tree` input provided by the
+ // caller, and return the new copy.
+ // - kPopped
+ // The operation (typically add) could not be satisfied due to insufficient
+ // capacity in the targeted node, and a new 'leg' was created that needs to
+ // be added into the parent node. For example, adding a FLAT inside a leaf
+ // node that is at capacity will create a new leaf node containing that
+ // FLAT, that needs to be 'popped' up the btree. Such 'pop' actions can
+ // cascade up the tree if parent nodes are also at capacity. A 'Popped'
+ // action propagating all the way to the top of the tree will result in
+ // the tree becoming one level higher than the current tree through a final
+ // `CordRepBtree::New(tree, popped)` call, resulting in a new top node
+ // referencing the old tree and the new (fully popped upwards) 'leg'.
+ enum Action { kSelf, kCopied, kPopped };
+
+ // Result of an operation on a node. See the `Action` enum for details.
+ struct OpResult {
+ CordRepBtree* tree;
+ Action action;
+ };
+
+ // Return value of the CopyPrefix and CopySuffix methods which can
+ // return a node or data edge at any height inside the tree.
+ // A height of 0 defines the lowest (leaf) node, a height of -1 identifies
+ // `edge` as being a plain data node: EXTERNAL / FLAT or SUBSTRING thereof.
+ struct CopyResult {
+ CordRep* edge;
+ int height;
+ };
+
+ // Logical position inside a node:
+ // - index: index of the edge.
+ // - n: size or offset value depending on context.
+ struct Position {
+ size_t index;
+ size_t n;
+ };
+
+ // Creates a btree from the given input. Adopts a ref of `rep`.
+ // If the input `rep` is itself a btree, i.e., `IsBtree()`, then this
+ // function immediately returns `rep->btree()`. If the input is a valid data
+ // edge (see IsDataEdge()), then a new leaf node is returned containing `rep`
+ // as the sole data edge. Else, the input is assumed to be a (legacy) concat
+ // tree, and the input is consumed and transformed into a btree().
+ static CordRepBtree* Create(CordRep* rep);
+
+ // Destroys the provided tree. Should only be called by cord internal API's,
+ // typically after a ref_count.Decrement() on the last reference count.
+ static void Destroy(CordRepBtree* tree);
+
+ // Use CordRep::Unref() as we overload for y_absl::Span<CordRep* const>.
+ using CordRep::Unref;
+
+ // Unrefs all edges in `edges` which are assumed to be 'likely one'.
+ static void Unref(y_absl::Span<CordRep* const> edges);
+
+ // Appends / Prepends an existing CordRep instance to this tree.
+ // The below methods accept three types of input:
+ // 1) `rep` is a data node (See `IsDataNode` for valid data edges).
+ // `rep` is appended or prepended to this tree 'as is'.
+ // 2) `rep` is a BTREE.
+ // `rep` is merged into `tree` respecting the Append/Prepend order.
+ // 3) `rep` is some other (legacy) type.
+ // `rep` is converted in place and added to `tree`
+ // Requires `tree` and `rep` to be not null.
+ static CordRepBtree* Append(CordRepBtree* tree, CordRep* rep);
+ static CordRepBtree* Prepend(CordRepBtree* tree, CordRep* rep);
+
+ // Append/Prepend the data in `data` to this tree.
+ // The `extra` parameter defines how much extra capacity should be allocated
+ // for any additional FLAT being allocated. This is an optimization hint from
+ // the caller. For example, a caller may need to add 2 string_views of data
+ // "abc" and "defghi" which are not consecutive. The caller can in this case
+ // invoke `AddData(tree, "abc", 6)`, and any newly added flat is allocated
+ // where possible with at least 6 bytes of extra capacity beyond `length`.
+ // This helps avoiding data getting fragmented over multiple flats.
+ // There is no limit on the size of `data`. If `data` can not be stored inside
+ // a single flat, then the function will iteratively add flats until all data
+ // has been consumed and appended or prepended to the tree.
+ static CordRepBtree* Append(CordRepBtree* tree, string_view data,
+ size_t extra = 0);
+ static CordRepBtree* Prepend(CordRepBtree* tree, string_view data,
+ size_t extra = 0);
+
+ // Returns a new tree, containing `n` bytes of data from this instance
+ // starting at offset `offset`. Where possible, the returned tree shares
+ // (re-uses) data edges and nodes with this instance to minimize the
+ // combined memory footprint of both trees.
+ // Requires `offset + n <= length`. Returns `nullptr` if `n` is zero.
+ CordRep* SubTree(size_t offset, size_t n);
+
+ // Removes `n` trailing bytes from `tree`, and returns the resulting tree
+ // or data edge. Returns `tree` if n is zero, and nullptr if n == length.
+ // This function is logically identical to:
+ // result = tree->SubTree(0, tree->length - n);
+ // Unref(tree);
+ // return result;
+ // However, the actual implementation will as much as possible perform 'in
+ // place' modifications on the tree on all nodes and edges that are mutable.
+ // For example, in a fully privately owned tree with the last edge being a
+ // flat of length 12, RemoveSuffix(1) will simply set the length of that data
+ // edge to 11, and reduce the length of all nodes on the edge path by 1.
+ static CordRep* RemoveSuffix(CordRepBtree* tree, size_t n);
+
+ // Returns the character at the given offset.
+ char GetCharacter(size_t offset) const;
+
+ // Returns true if this node holds a single data edge, and if so, sets
+ // `fragment` to reference the contained data. `fragment` is an optional
+ // output parameter and allowed to be null.
+ bool IsFlat(y_absl::string_view* fragment) const;
+
+ // Returns true if the data of `n` bytes starting at offset `offset`
+ // is contained in a single data edge, and if so, sets fragment to reference
+ // the contained data. `fragment` is an optional output parameter and allowed
+ // to be null.
+ bool IsFlat(size_t offset, size_t n, y_absl::string_view* fragment) const;
+
+ // Returns a span (mutable range of bytes) of up to `size` bytes into the
+ // last FLAT data edge inside this tree under the following conditions:
+ // - none of the nodes down into the FLAT node are shared.
+ // - the last data edge in this tree is a non-shared FLAT.
+ // - the referenced FLAT has additional capacity available.
+ // If all these conditions are met, a non-empty span is returned, and the
+ // length of the flat node and involved tree nodes have been increased by
+ // `span.length()`. The caller is responsible for immediately assigning values
+ // to all uninitialized data reference by the returned span.
+ // Requires `this->refcount.IsMutable()`: this function forces the
+ // caller to do this fast path check on the top level node, as this is the
+ // most commonly shared node of a cord tree.
+ Span<char> GetAppendBuffer(size_t size);
+
+ // Returns the `height` of the tree. The height of a tree is limited to
+ // kMaxHeight. `height` is implemented as an `int` as in some places we
+ // use negative (-1) values for 'data edges'.
+ int height() const { return static_cast<int>(storage[0]); }
+
+ // Properties: begin, back, end, front/back boundary indexes.
+ size_t begin() const { return static_cast<size_t>(storage[1]); }
+ size_t back() const { return static_cast<size_t>(storage[2]) - 1; }
+ size_t end() const { return static_cast<size_t>(storage[2]); }
+ size_t index(EdgeType edge) const {
+ return edge == kFront ? begin() : back();
+ }
+
+ // Properties: size and capacity.
+ // `capacity` contains the current capacity of this instance, where
+ // `kMaxCapacity` contains the maximum capacity of a btree node.
+ // For now, `capacity` and `kMaxCapacity` return the same value, but this may
+ // change in the future if we see benefit in dynamically sizing 'small' nodes
+ // to 'large' nodes for large data trees.
+ size_t size() const { return end() - begin(); }
+ size_t capacity() const { return kMaxCapacity; }
+
+ // Edge access
+ inline CordRep* Edge(size_t index) const;
+ inline CordRep* Edge(EdgeType edge_type) const;
+ inline y_absl::Span<CordRep* const> Edges() const;
+ inline y_absl::Span<CordRep* const> Edges(size_t begin, size_t end) const;
+
+ // Returns reference to the data edge at `index`.
+ // Requires this instance to be a leaf node, and `index` to be valid index.
+ inline y_absl::string_view Data(size_t index) const;
+
+ static const char* EdgeDataPtr(const CordRep* r);
+ static y_absl::string_view EdgeData(const CordRep* r);
+
+ // Returns true if the provided rep is a FLAT, EXTERNAL or a SUBSTRING node
+ // holding a FLAT or EXTERNAL child rep.
+ static bool IsDataEdge(const CordRep* rep);
+
+ // Diagnostics: returns true if `tree` is valid and internally consistent.
+ // If `shallow` is false, then the provided top level node and all child nodes
+ // below it are recursively checked. If `shallow` is true, only the provided
+ // node in `tree` and the cumulative length, type and height of the direct
+ // child nodes of `tree` are checked. The value of `shallow` is ignored if the
+ // internal `cord_btree_exhaustive_validation` diagnostics variable is true,
+ // in which case the performed validations works as if `shallow` were false.
+ // This function is intended for debugging and testing purposes only.
+ static bool IsValid(const CordRepBtree* tree, bool shallow = false);
+
+ // Diagnostics: asserts that the provided tree is valid.
+ // `AssertValid()` performs a shallow validation by default. `shallow` can be
+ // set to false in which case an exhaustive validation is performed. This
+ // function is implemented in terms of calling `IsValid()` and asserting the
+ // return value to be true. See `IsValid()` for more information.
+ // This function is intended for debugging and testing purposes only.
+ static CordRepBtree* AssertValid(CordRepBtree* tree, bool shallow = true);
+ static const CordRepBtree* AssertValid(const CordRepBtree* tree,
+ bool shallow = true);
+
+ // Diagnostics: dump the contents of this tree to `stream`.
+ // This function is intended for debugging and testing purposes only.
+ static void Dump(const CordRep* rep, std::ostream& stream);
+ static void Dump(const CordRep* rep, y_absl::string_view label,
+ std::ostream& stream);
+ static void Dump(const CordRep* rep, y_absl::string_view label,
+ bool include_contents, std::ostream& stream);
+
+ // Adds the edge `edge` to this node if possible. `owned` indicates if the
+ // current node is potentially shared or not with other threads. Returns:
+ // - {kSelf, <this>}
+ // The edge was directly added to this node.
+ // - {kCopied, <node>}
+ // The edge was added to a copy of this node.
+ // - {kPopped, New(edge, height())}
+ // A new leg with the edge was created as this node has no extra capacity.
+ template <EdgeType edge_type>
+ inline OpResult AddEdge(bool owned, CordRep* edge, size_t delta);
+
+ // Replaces the front or back edge with the provided new edge. Returns:
+ // - {kSelf, <this>}
+ // The edge was directly set in this node. The old edge is unreffed.
+ // - {kCopied, <node>}
+ // A copy of this node was created with the new edge value.
+ // In both cases, the function adopts a reference on `edge`.
+ template <EdgeType edge_type>
+ OpResult SetEdge(bool owned, CordRep* edge, size_t delta);
+
+ // Creates a new empty node at the specified height.
+ static CordRepBtree* New(int height = 0);
+
+ // Creates a new node containing `rep`, with the height being computed
+ // automatically based on the type of `rep`.
+ static CordRepBtree* New(CordRep* rep);
+
+ // Creates a new node containing both `front` and `back` at height
+ // `front.height() + 1`. Requires `back.height() == front.height()`.
+ static CordRepBtree* New(CordRepBtree* front, CordRepBtree* back);
+
+ // Creates a fully balanced tree from the provided tree by rebuilding a new
+ // tree from all data edges in the input. This function is automatically
+ // invoked internally when the tree exceeds the maximum height.
+ static CordRepBtree* Rebuild(CordRepBtree* tree);
+
+ private:
+ CordRepBtree() = default;
+ ~CordRepBtree() = default;
+
+ // Initializes the main properties `tag`, `begin`, `end`, `height`.
+ inline void InitInstance(int height, size_t begin = 0, size_t end = 0);
+
+ // Direct property access begin / end
+ void set_begin(size_t begin) { storage[1] = static_cast<uint8_t>(begin); }
+ void set_end(size_t end) { storage[2] = static_cast<uint8_t>(end); }
+
+ // Decreases the value of `begin` by `n`, and returns the new value. Notice
+ // how this returns the new value unlike atomic::fetch_add which returns the
+ // old value. This is because this is used to prepend edges at 'begin - 1'.
+ size_t sub_fetch_begin(size_t n) {
+ storage[1] -= static_cast<uint8_t>(n);
+ return storage[1];
+ }
+
+ // Increases the value of `end` by `n`, and returns the previous value. This
+ // function is typically used to append edges at 'end'.
+ size_t fetch_add_end(size_t n) {
+ const uint8_t current = storage[2];
+ storage[2] = static_cast<uint8_t>(current + n);
+ return current;
+ }
+
+ // Returns the index of the last edge starting on, or before `offset`, with
+ // `n` containing the relative offset of `offset` inside that edge.
+ // Requires `offset` < length.
+ Position IndexOf(size_t offset) const;
+
+ // Returns the index of the last edge starting before `offset`, with `n`
+ // containing the relative offset of `offset` inside that edge.
+ // This function is useful to find the edges for some span of bytes ending at
+ // `offset` (i.e., `n` bytes). For example:
+ //
+ // Position pos = IndexBefore(n)
+ // edges = Edges(begin(), pos.index) // All full edges (may be empty)
+ // last = Sub(Edge(pos.index), 0, pos.n) // Last partial edge (may be empty)
+ //
+ // Requires 0 < `offset` <= length.
+ Position IndexBefore(size_t offset) const;
+
+ // Returns the index of the edge ending at (or on) length `length`, and the
+ // number of bytes inside that edge up to `length`. For example, if we have a
+ // Node with 2 edges, one of 10 and one of 20 long, then IndexOfLength(27)
+ // will return {1, 17}, and IndexOfLength(10) will return {0, 10}.
+ Position IndexOfLength(size_t n) const;
+
+ // Identical to the above function except starting from the position `front`.
+ // This function is equivalent to `IndexBefore(front.n + offset)`, with
+ // the difference that this function is optimized to start at `front.index`.
+ Position IndexBefore(Position front, size_t offset) const;
+
+ // Returns the index of the edge directly beyond the edge containing offset
+ // `offset`, with `n` containing the distance of that edge from `offset`.
+ // This function is useful for iteratively finding suffix nodes and remaining
+ // partial bytes in left-most suffix nodes as for example in CopySuffix.
+ // Requires `offset` < length.
+ Position IndexBeyond(size_t offset) const;
+
+ // Destruction
+ static void DestroyLeaf(CordRepBtree* tree, size_t begin, size_t end);
+ static void DestroyNonLeaf(CordRepBtree* tree, size_t begin, size_t end);
+ static void DestroyTree(CordRepBtree* tree, size_t begin, size_t end);
+ static void Delete(CordRepBtree* tree) { delete tree; }
+
+ // Creates a new leaf node containing as much data as possible from `data`.
+ // The data is added either forwards or reversed depending on `edge_type`.
+ // Callers must check the length of the returned node to determine if all data
+ // was copied or not.
+ // See the `Append/Prepend` function for the meaning and purpose of `extra`.
+ template <EdgeType edge_type>
+ static CordRepBtree* NewLeaf(y_absl::string_view data, size_t extra);
+
+ // Creates a raw copy of this Btree node, copying all properties, but
+ // without adding any references to existing edges.
+ CordRepBtree* CopyRaw() const;
+
+ // Creates a full copy of this Btree node, adding a reference on all edges.
+ CordRepBtree* Copy() const;
+
+ // Creates a partial copy of this Btree node, copying all edges up to `end`,
+ // adding a reference on each copied edge, and sets the length of the newly
+ // created copy to `new_length`.
+ CordRepBtree* CopyBeginTo(size_t end, size_t new_length) const;
+
+ // Returns a tree containing the edges [tree->begin(), end) and length
+ // of `new_length`. This method consumes a reference on the provided
+ // tree, and logically performs the following operation:
+ // result = tree->CopyBeginTo(end, new_length);
+ // CordRep::Unref(tree);
+ // return result;
+ static CordRepBtree* ConsumeBeginTo(CordRepBtree* tree, size_t end,
+ size_t new_length);
+
+ // Creates a partial copy of this Btree node, copying all edges starting at
+ // `begin`, adding a reference on each copied edge, and sets the length of
+ // the newly created copy to `new_length`.
+ CordRepBtree* CopyToEndFrom(size_t begin, size_t new_length) const;
+
+ // Extracts and returns the front edge from the provided tree.
+ // This method consumes a reference on the provided tree, and logically
+ // performs the following operation:
+ // edge = CordRep::Ref(tree->Edge(kFront));
+ // CordRep::Unref(tree);
+ // return edge;
+ static CordRep* ExtractFront(CordRepBtree* tree);
+
+ // Returns a tree containing the result of appending `right` to `left`.
+ static CordRepBtree* MergeTrees(CordRepBtree* left, CordRepBtree* right);
+
+ // Fallback functions for `Create()`, `Append()` and `Prepend()` which
+ // deal with legacy / non conforming input, i.e.: CONCAT trees.
+ static CordRepBtree* CreateSlow(CordRep* rep);
+ static CordRepBtree* AppendSlow(CordRepBtree*, CordRep* rep);
+ static CordRepBtree* PrependSlow(CordRepBtree*, CordRep* rep);
+
+ // Recursively rebuilds `tree` into `stack`. If 'consume` is set to true, the
+ // function will consume a reference on `tree`. `stack` is a null terminated
+ // array containing the new tree's state, with the current leaf node at
+ // stack[0], and parent nodes above that, or null for 'top of tree'.
+ static void Rebuild(CordRepBtree** stack, CordRepBtree* tree, bool consume);
+
+ // Aligns existing edges to start at index 0, to allow for a new edge to be
+ // added to the back of the current edges.
+ inline void AlignBegin();
+
+ // Aligns existing edges to end at `capacity`, to allow for a new edge to be
+ // added in front of the current edges.
+ inline void AlignEnd();
+
+ // Adds the provided edge to this node.
+ // Requires this node to have capacity for the edge. Realigns / moves
+ // existing edges as needed to prepend or append the new edge.
+ template <EdgeType edge_type>
+ inline void Add(CordRep* rep);
+
+ // Adds the provided edges to this node.
+ // Requires this node to have capacity for the edges. Realigns / moves
+ // existing edges as needed to prepend or append the new edges.
+ template <EdgeType edge_type>
+ inline void Add(y_absl::Span<CordRep* const>);
+
+ // Adds data from `data` to this node until either all data has been consumed,
+ // or there is no more capacity for additional flat nodes inside this node.
+ // Requires the current node to be a leaf node, data to be non empty, and the
+ // current node to have capacity for at least one more data edge.
+ // Returns any remaining data from `data` that was not added, which is
+ // depending on the edge type (front / back) either the remaining prefix of
+ // suffix of the input.
+ // See the `Append/Prepend` function for the meaning and purpose of `extra`.
+ template <EdgeType edge_type>
+ y_absl::string_view AddData(y_absl::string_view data, size_t extra);
+
+ // Replace the front or back edge with the provided value.
+ // Adopts a reference on `edge` and unrefs the old edge.
+ template <EdgeType edge_type>
+ inline void SetEdge(CordRep* edge);
+
+ // Returns a partial copy of the current tree containing the first `n` bytes
+ // of data. `CopyResult` contains both the resulting edge and its height. The
+ // resulting tree may be less high than the current tree, or even be a single
+ // matching data edge if `allow_folding` is set to true.
+ // For example, if `n == 1`, then the result will be the single data edge, and
+ // height will be set to -1 (one below the owning leaf node). If n == 0, this
+ // function returns null. Requires `n <= length`
+ CopyResult CopyPrefix(size_t n, bool allow_folding = true);
+
+ // Returns a partial copy of the current tree containing all data starting
+ // after `offset`. `CopyResult` contains both the resulting edge and its
+ // height. The resulting tree may be less high than the current tree, or even
+ // be a single matching data edge. For example, if `n == length - 1`, then the
+ // result will be a single data edge, and height will be set to -1 (one below
+ // the owning leaf node).
+ // Requires `offset < length`
+ CopyResult CopySuffix(size_t offset);
+
+ // Returns a OpResult value of {this, kSelf} or {Copy(), kCopied}
+ // depending on the value of `owned`.
+ inline OpResult ToOpResult(bool owned);
+
+ // Adds `rep` to the specified tree, returning the modified tree.
+ template <EdgeType edge_type>
+ static CordRepBtree* AddCordRep(CordRepBtree* tree, CordRep* rep);
+
+ // Adds `data` to the specified tree, returning the modified tree.
+ // See the `Append/Prepend` function for the meaning and purpose of `extra`.
+ template <EdgeType edge_type>
+ static CordRepBtree* AddData(CordRepBtree* tree, y_absl::string_view data,
+ size_t extra = 0);
+
+ // Merges `src` into `dst` with `src` being added either before (kFront) or
+ // after (kBack) `dst`. Requires the height of `dst` to be greater than or
+ // equal to the height of `src`.
+ template <EdgeType edge_type>
+ static CordRepBtree* Merge(CordRepBtree* dst, CordRepBtree* src);
+
+ // Fallback version of GetAppendBuffer for large trees: GetAppendBuffer()
+ // implements an inlined version for trees of limited height (3 levels),
+ // GetAppendBufferSlow implements the logic for large trees.
+ Span<char> GetAppendBufferSlow(size_t size);
+
+ // `edges_` contains all edges starting from this instance.
+ // These are explicitly `child` edges only, a cord btree (or any cord tree in
+ // that respect) does not store `parent` pointers anywhere: multiple trees /
+ // parents can reference the same shared child edge. The type of these edges
+ // depends on the height of the node. `Leaf nodes` (height == 0) contain `data
+ // edges` (external or flat nodes, or sub-strings thereof). All other nodes
+ // (height > 0) contain pointers to BTREE nodes with a height of `height - 1`.
+ CordRep* edges_[kMaxCapacity];
+
+ friend class CordRepBtreeTestPeer;
+ friend class CordRepBtreeNavigator;
+};
+
+inline CordRepBtree* CordRep::btree() {
+ assert(IsBtree());
+ return static_cast<CordRepBtree*>(this);
+}
+
+inline const CordRepBtree* CordRep::btree() const {
+ assert(IsBtree());
+ return static_cast<const CordRepBtree*>(this);
+}
+
+inline void CordRepBtree::InitInstance(int height, size_t begin, size_t end) {
+ tag = BTREE;
+ storage[0] = static_cast<uint8_t>(height);
+ storage[1] = static_cast<uint8_t>(begin);
+ storage[2] = static_cast<uint8_t>(end);
+}
+
+inline CordRep* CordRepBtree::Edge(size_t index) const {
+ assert(index >= begin());
+ assert(index < end());
+ return edges_[index];
+}
+
+inline CordRep* CordRepBtree::Edge(EdgeType edge_type) const {
+ return edges_[edge_type == kFront ? begin() : back()];
+}
+
+inline y_absl::Span<CordRep* const> CordRepBtree::Edges() const {
+ return {edges_ + begin(), size()};
+}
+
+inline y_absl::Span<CordRep* const> CordRepBtree::Edges(size_t begin,
+ size_t end) const {
+ assert(begin <= end);
+ assert(begin >= this->begin());
+ assert(end <= this->end());
+ return {edges_ + begin, static_cast<size_t>(end - begin)};
+}
+
+inline const char* CordRepBtree::EdgeDataPtr(const CordRep* r) {
+ assert(IsDataEdge(r));
+ size_t offset = 0;
+ if (r->tag == SUBSTRING) {
+ offset = r->substring()->start;
+ r = r->substring()->child;
+ }
+ return (r->tag >= FLAT ? r->flat()->Data() : r->external()->base) + offset;
+}
+
+inline y_absl::string_view CordRepBtree::EdgeData(const CordRep* r) {
+ return y_absl::string_view(EdgeDataPtr(r), r->length);
+}
+
+inline y_absl::string_view CordRepBtree::Data(size_t index) const {
+ assert(height() == 0);
+ return EdgeData(Edge(index));
+}
+
+inline bool CordRepBtree::IsDataEdge(const CordRep* rep) {
+ // The fast path is that `rep` is an EXTERNAL or FLAT node, making the below
+ // if a single, well predicted branch. We then repeat the FLAT or EXTERNAL
+ // check in the slow path the SUBSTRING check to optimize for the hot path.
+ if (rep->tag == EXTERNAL || rep->tag >= FLAT) return true;
+ if (rep->tag == SUBSTRING) rep = rep->substring()->child;
+ return rep->tag == EXTERNAL || rep->tag >= FLAT;
+}
+
+inline CordRepBtree* CordRepBtree::New(int height) {
+ CordRepBtree* tree = new CordRepBtree;
+ tree->length = 0;
+ tree->InitInstance(height);
+ return tree;
+}
+
+inline CordRepBtree* CordRepBtree::New(CordRep* rep) {
+ CordRepBtree* tree = new CordRepBtree;
+ int height = rep->IsBtree() ? rep->btree()->height() + 1 : 0;
+ tree->length = rep->length;
+ tree->InitInstance(height, /*begin=*/0, /*end=*/1);
+ tree->edges_[0] = rep;
+ return tree;
+}
+
+inline CordRepBtree* CordRepBtree::New(CordRepBtree* front,
+ CordRepBtree* back) {
+ assert(front->height() == back->height());
+ CordRepBtree* tree = new CordRepBtree;
+ tree->length = front->length + back->length;
+ tree->InitInstance(front->height() + 1, /*begin=*/0, /*end=*/2);
+ tree->edges_[0] = front;
+ tree->edges_[1] = back;
+ return tree;
+}
+
+inline void CordRepBtree::DestroyTree(CordRepBtree* tree, size_t begin,
+ size_t end) {
+ if (tree->height() == 0) {
+ DestroyLeaf(tree, begin, end);
+ } else {
+ DestroyNonLeaf(tree, begin, end);
+ }
+}
+
+inline void CordRepBtree::Destroy(CordRepBtree* tree) {
+ DestroyTree(tree, tree->begin(), tree->end());
+}
+
+inline void CordRepBtree::Unref(y_absl::Span<CordRep* const> edges) {
+ for (CordRep* edge : edges) {
+ if (ABSL_PREDICT_FALSE(!edge->refcount.Decrement())) {
+ CordRep::Destroy(edge);
+ }
+ }
+}
+
+inline CordRepBtree* CordRepBtree::CopyRaw() const {
+ auto* tree = static_cast<CordRepBtree*>(::operator new(sizeof(CordRepBtree)));
+ memcpy(static_cast<void*>(tree), this, sizeof(CordRepBtree));
+ new (&tree->refcount) RefcountAndFlags;
+ return tree;
+}
+
+inline CordRepBtree* CordRepBtree::Copy() const {
+ CordRepBtree* tree = CopyRaw();
+ for (CordRep* rep : Edges()) CordRep::Ref(rep);
+ return tree;
+}
+
+inline CordRepBtree* CordRepBtree::CopyToEndFrom(size_t begin,
+ size_t new_length) const {
+ assert(begin >= this->begin());
+ assert(begin <= this->end());
+ CordRepBtree* tree = CopyRaw();
+ tree->length = new_length;
+ tree->set_begin(begin);
+ for (CordRep* edge : tree->Edges()) CordRep::Ref(edge);
+ return tree;
+}
+
+inline CordRepBtree* CordRepBtree::CopyBeginTo(size_t end,
+ size_t new_length) const {
+ assert(end <= capacity());
+ assert(end >= this->begin());
+ CordRepBtree* tree = CopyRaw();
+ tree->length = new_length;
+ tree->set_end(end);
+ for (CordRep* edge : tree->Edges()) CordRep::Ref(edge);
+ return tree;
+}
+
+inline void CordRepBtree::AlignBegin() {
+ // The below code itself does not need to be fast as typically we have
+ // mono-directional append/prepend calls, and `begin` / `end` are typically
+ // adjusted no more than once. But we want to avoid potential register clobber
+ // effects, making the compiler emit register save/store/spills, and minimize
+ // the size of code.
+ const size_t delta = begin();
+ if (ABSL_PREDICT_FALSE(delta != 0)) {
+ const size_t new_end = end() - delta;
+ set_begin(0);
+ set_end(new_end);
+ // TODO(mvels): we can write this using 2 loads / 2 stores depending on
+ // total size for the kMaxCapacity = 6 case. I.e., we can branch (switch) on
+ // size, and then do overlapping load/store of up to 4 pointers (inlined as
+ // XMM, YMM or ZMM load/store) and up to 2 pointers (XMM / YMM), which is a)
+ // compact and b) not clobbering any registers.
+ ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity);
+#ifdef __clang__
+#pragma unroll 1
+#endif
+ for (size_t i = 0; i < new_end; ++i) {
+ edges_[i] = edges_[i + delta];
+ }
+ }
+}
+
+inline void CordRepBtree::AlignEnd() {
+ // See comments in `AlignBegin` for motivation on the hand-rolled for loops.
+ const size_t delta = capacity() - end();
+ if (delta != 0) {
+ const size_t new_begin = begin() + delta;
+ const size_t new_end = end() + delta;
+ set_begin(new_begin);
+ set_end(new_end);
+ ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity);
+#ifdef __clang__
+#pragma unroll 1
+#endif
+ for (size_t i = new_end - 1; i >= new_begin; --i) {
+ edges_[i] = edges_[i - delta];
+ }
+ }
+}
+
+template <>
+inline void CordRepBtree::Add<CordRepBtree::kBack>(CordRep* rep) {
+ AlignBegin();
+ edges_[fetch_add_end(1)] = rep;
+}
+
+template <>
+inline void CordRepBtree::Add<CordRepBtree::kBack>(
+ y_absl::Span<CordRep* const> edges) {
+ AlignBegin();
+ size_t new_end = end();
+ for (CordRep* edge : edges) edges_[new_end++] = edge;
+ set_end(new_end);
+}
+
+template <>
+inline void CordRepBtree::Add<CordRepBtree::kFront>(CordRep* rep) {
+ AlignEnd();
+ edges_[sub_fetch_begin(1)] = rep;
+}
+
+template <>
+inline void CordRepBtree::Add<CordRepBtree::kFront>(
+ y_absl::Span<CordRep* const> edges) {
+ AlignEnd();
+ size_t new_begin = begin() - edges.size();
+ set_begin(new_begin);
+ for (CordRep* edge : edges) edges_[new_begin++] = edge;
+}
+
+template <CordRepBtree::EdgeType edge_type>
+inline void CordRepBtree::SetEdge(CordRep* edge) {
+ const int idx = edge_type == kFront ? begin() : back();
+ CordRep::Unref(edges_[idx]);
+ edges_[idx] = edge;
+}
+
+inline CordRepBtree::OpResult CordRepBtree::ToOpResult(bool owned) {
+ return owned ? OpResult{this, kSelf} : OpResult{Copy(), kCopied};
+}
+
+inline CordRepBtree::Position CordRepBtree::IndexOf(size_t offset) const {
+ assert(offset < length);
+ size_t index = begin();
+ while (offset >= edges_[index]->length) offset -= edges_[index++]->length;
+ return {index, offset};
+}
+
+inline CordRepBtree::Position CordRepBtree::IndexBefore(size_t offset) const {
+ assert(offset > 0);
+ assert(offset <= length);
+ size_t index = begin();
+ while (offset > edges_[index]->length) offset -= edges_[index++]->length;
+ return {index, offset};
+}
+
+inline CordRepBtree::Position CordRepBtree::IndexBefore(Position front,
+ size_t offset) const {
+ size_t index = front.index;
+ offset = offset + front.n;
+ while (offset > edges_[index]->length) offset -= edges_[index++]->length;
+ return {index, offset};
+}
+
+inline CordRepBtree::Position CordRepBtree::IndexOfLength(size_t n) const {
+ assert(n <= length);
+ size_t index = back();
+ size_t strip = length - n;
+ while (strip >= edges_[index]->length) strip -= edges_[index--]->length;
+ return {index, edges_[index]->length - strip};
+}
+
+inline CordRepBtree::Position CordRepBtree::IndexBeyond(
+ const size_t offset) const {
+ // We need to find the edge which `starting offset` is beyond (>=)`offset`.
+ // For this we can't use the `offset -= length` logic of IndexOf. Instead, we
+ // track the offset of the `current edge` in `off`, which we increase as we
+ // iterate over the edges until we find the matching edge.
+ size_t off = 0;
+ size_t index = begin();
+ while (offset > off) off += edges_[index++]->length;
+ return {index, off - offset};
+}
+
+inline CordRepBtree* CordRepBtree::Create(CordRep* rep) {
+ if (IsDataEdge(rep)) return New(rep);
+ return CreateSlow(rep);
+}
+
+inline Span<char> CordRepBtree::GetAppendBuffer(size_t size) {
+ assert(refcount.IsMutable());
+ CordRepBtree* tree = this;
+ const int height = this->height();
+ CordRepBtree* n1 = tree;
+ CordRepBtree* n2 = tree;
+ CordRepBtree* n3 = tree;
+ switch (height) {
+ case 3:
+ tree = tree->Edge(kBack)->btree();
+ if (!tree->refcount.IsMutable()) return {};
+ n2 = tree;
+ ABSL_FALLTHROUGH_INTENDED;
+ case 2:
+ tree = tree->Edge(kBack)->btree();
+ if (!tree->refcount.IsMutable()) return {};
+ n1 = tree;
+ ABSL_FALLTHROUGH_INTENDED;
+ case 1:
+ tree = tree->Edge(kBack)->btree();
+ if (!tree->refcount.IsMutable()) return {};
+ ABSL_FALLTHROUGH_INTENDED;
+ case 0:
+ CordRep* edge = tree->Edge(kBack);
+ if (!edge->refcount.IsMutable()) return {};
+ if (edge->tag < FLAT) return {};
+ size_t avail = edge->flat()->Capacity() - edge->length;
+ if (avail == 0) return {};
+ size_t delta = (std::min)(size, avail);
+ Span<char> span = {edge->flat()->Data() + edge->length, delta};
+ edge->length += delta;
+ switch (height) {
+ case 3:
+ n3->length += delta;
+ ABSL_FALLTHROUGH_INTENDED;
+ case 2:
+ n2->length += delta;
+ ABSL_FALLTHROUGH_INTENDED;
+ case 1:
+ n1->length += delta;
+ ABSL_FALLTHROUGH_INTENDED;
+ case 0:
+ tree->length += delta;
+ return span;
+ }
+ break;
+ }
+ return GetAppendBufferSlow(size);
+}
+
+extern template CordRepBtree* CordRepBtree::AddCordRep<CordRepBtree::kBack>(
+ CordRepBtree* tree, CordRep* rep);
+
+extern template CordRepBtree* CordRepBtree::AddCordRep<CordRepBtree::kFront>(
+ CordRepBtree* tree, CordRep* rep);
+
+inline CordRepBtree* CordRepBtree::Append(CordRepBtree* tree, CordRep* rep) {
+ if (ABSL_PREDICT_TRUE(IsDataEdge(rep))) {
+ return CordRepBtree::AddCordRep<kBack>(tree, rep);
+ }
+ return AppendSlow(tree, rep);
+}
+
+inline CordRepBtree* CordRepBtree::Prepend(CordRepBtree* tree, CordRep* rep) {
+ if (ABSL_PREDICT_TRUE(IsDataEdge(rep))) {
+ return CordRepBtree::AddCordRep<kFront>(tree, rep);
+ }
+ return PrependSlow(tree, rep);
+}
+
+#ifdef NDEBUG
+
+inline CordRepBtree* CordRepBtree::AssertValid(CordRepBtree* tree,
+ bool /* shallow */) {
+ return tree;
+}
+
+inline const CordRepBtree* CordRepBtree::AssertValid(const CordRepBtree* tree,
+ bool /* shallow */) {
+ return tree;
+}
+
+#endif
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.cc
index 6dae7bcd3e..9ed428681a 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.cc
@@ -1,185 +1,185 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cord_rep_btree_navigator.h"
-
-#include <cassert>
-
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-using ReadResult = CordRepBtreeNavigator::ReadResult;
-
-namespace {
-
-// Returns a `CordRepSubstring` from `rep` starting at `offset` of size `n`.
-// If `rep` is already a `CordRepSubstring` instance, an adjusted instance is
-// created based on the old offset and new offset.
-// Adopts a reference on `rep`. Rep must be a valid data edge. Returns
-// nullptr if `n == 0`, `rep` if `n == rep->length`.
-// Requires `offset < rep->length` and `offset + n <= rep->length`.
-// TODO(192061034): move to utility library in internal and optimize for small
-// substrings of larger reps.
-inline CordRep* Substring(CordRep* rep, size_t offset, size_t n) {
- assert(n <= rep->length);
- assert(offset < rep->length);
- assert(offset <= rep->length - n);
- assert(CordRepBtree::IsDataEdge(rep));
-
- if (n == 0) return nullptr;
- if (n == rep->length) return CordRep::Ref(rep);
-
- if (rep->tag == SUBSTRING) {
- offset += rep->substring()->start;
- rep = rep->substring()->child;
- }
-
- CordRepSubstring* substring = new CordRepSubstring();
- substring->length = n;
- substring->tag = SUBSTRING;
- substring->start = offset;
- substring->child = CordRep::Ref(rep);
- return substring;
-}
-
-inline CordRep* Substring(CordRep* rep, size_t offset) {
- return Substring(rep, offset, rep->length - offset);
-}
-
-} // namespace
-
-CordRepBtreeNavigator::Position CordRepBtreeNavigator::Skip(size_t n) {
- int height = 0;
- size_t index = index_[0];
- CordRepBtree* node = node_[0];
- CordRep* edge = node->Edge(index);
-
- // Overall logic: Find an edge of at least the length we need to skip.
- // We consume all edges which are smaller (i.e., must be 100% skipped).
- // If we exhausted all edges on the current level, we move one level
- // up the tree, and repeat until we either find the edge, or until we hit
- // the top of the tree meaning the skip exceeds tree->length.
- while (n >= edge->length) {
- n -= edge->length;
- while (++index == node->end()) {
- if (++height > height_) return {nullptr, n};
- node = node_[height];
- index = index_[height];
- }
- edge = node->Edge(index);
- }
-
- // If we moved up the tree, descend down to the leaf level, consuming all
- // edges that must be skipped.
- while (height > 0) {
- node = edge->btree();
- index_[height] = index;
- node_[--height] = node;
- index = node->begin();
- edge = node->Edge(index);
- while (n >= edge->length) {
- n -= edge->length;
- ++index;
- assert(index != node->end());
- edge = node->Edge(index);
- }
- }
- index_[0] = index;
- return {edge, n};
-}
-
-ReadResult CordRepBtreeNavigator::Read(size_t edge_offset, size_t n) {
- int height = 0;
- size_t length = edge_offset + n;
- size_t index = index_[0];
- CordRepBtree* node = node_[0];
- CordRep* edge = node->Edge(index);
- assert(edge_offset < edge->length);
-
- if (length < edge->length) {
- return {Substring(edge, edge_offset, n), length};
- }
-
- // Similar to 'Skip', we consume all edges that are inside the 'length' of
- // data that needs to be read. If we exhaust the current level, we move one
- // level up the tree and repeat until we hit the final edge that must be
- // (partially) read. We consume all edges into `subtree`.
- CordRepBtree* subtree = CordRepBtree::New(Substring(edge, edge_offset));
- size_t subtree_end = 1;
- do {
- length -= edge->length;
- while (++index == node->end()) {
- index_[height] = index;
- if (++height > height_) {
- subtree->set_end(subtree_end);
- if (length == 0) return {subtree, 0};
- CordRep::Unref(subtree);
- return {nullptr, length};
- }
- if (length != 0) {
- subtree->set_end(subtree_end);
- subtree = CordRepBtree::New(subtree);
- subtree_end = 1;
- }
- node = node_[height];
- index = index_[height];
- }
- edge = node->Edge(index);
- if (length >= edge->length) {
- subtree->length += edge->length;
- subtree->edges_[subtree_end++] = CordRep::Ref(edge);
- }
- } while (length >= edge->length);
- CordRepBtree* tree = subtree;
- subtree->length += length;
-
- // If we moved up the tree, descend down to the leaf level, consuming all
- // edges that must be read, adding 'down' nodes to `subtree`.
- while (height > 0) {
- node = edge->btree();
- index_[height] = index;
- node_[--height] = node;
- index = node->begin();
- edge = node->Edge(index);
-
- if (length != 0) {
- CordRepBtree* right = CordRepBtree::New(height);
- right->length = length;
- subtree->edges_[subtree_end++] = right;
- subtree->set_end(subtree_end);
- subtree = right;
- subtree_end = 0;
- while (length >= edge->length) {
- subtree->edges_[subtree_end++] = CordRep::Ref(edge);
- length -= edge->length;
- edge = node->Edge(++index);
- }
- }
- }
- // Add any (partial) edge still remaining at the leaf level.
- if (length != 0) {
- subtree->edges_[subtree_end++] = Substring(edge, 0, length);
- }
- subtree->set_end(subtree_end);
- index_[0] = index;
- return {tree, length};
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cord_rep_btree_navigator.h"
+
+#include <cassert>
+
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ReadResult = CordRepBtreeNavigator::ReadResult;
+
+namespace {
+
+// Returns a `CordRepSubstring` from `rep` starting at `offset` of size `n`.
+// If `rep` is already a `CordRepSubstring` instance, an adjusted instance is
+// created based on the old offset and new offset.
+// Adopts a reference on `rep`. Rep must be a valid data edge. Returns
+// nullptr if `n == 0`, `rep` if `n == rep->length`.
+// Requires `offset < rep->length` and `offset + n <= rep->length`.
+// TODO(192061034): move to utility library in internal and optimize for small
+// substrings of larger reps.
+inline CordRep* Substring(CordRep* rep, size_t offset, size_t n) {
+ assert(n <= rep->length);
+ assert(offset < rep->length);
+ assert(offset <= rep->length - n);
+ assert(CordRepBtree::IsDataEdge(rep));
+
+ if (n == 0) return nullptr;
+ if (n == rep->length) return CordRep::Ref(rep);
+
+ if (rep->tag == SUBSTRING) {
+ offset += rep->substring()->start;
+ rep = rep->substring()->child;
+ }
+
+ CordRepSubstring* substring = new CordRepSubstring();
+ substring->length = n;
+ substring->tag = SUBSTRING;
+ substring->start = offset;
+ substring->child = CordRep::Ref(rep);
+ return substring;
+}
+
+inline CordRep* Substring(CordRep* rep, size_t offset) {
+ return Substring(rep, offset, rep->length - offset);
+}
+
+} // namespace
+
+CordRepBtreeNavigator::Position CordRepBtreeNavigator::Skip(size_t n) {
+ int height = 0;
+ size_t index = index_[0];
+ CordRepBtree* node = node_[0];
+ CordRep* edge = node->Edge(index);
+
+ // Overall logic: Find an edge of at least the length we need to skip.
+ // We consume all edges which are smaller (i.e., must be 100% skipped).
+ // If we exhausted all edges on the current level, we move one level
+ // up the tree, and repeat until we either find the edge, or until we hit
+ // the top of the tree meaning the skip exceeds tree->length.
+ while (n >= edge->length) {
+ n -= edge->length;
+ while (++index == node->end()) {
+ if (++height > height_) return {nullptr, n};
+ node = node_[height];
+ index = index_[height];
+ }
+ edge = node->Edge(index);
+ }
+
+ // If we moved up the tree, descend down to the leaf level, consuming all
+ // edges that must be skipped.
+ while (height > 0) {
+ node = edge->btree();
+ index_[height] = index;
+ node_[--height] = node;
+ index = node->begin();
+ edge = node->Edge(index);
+ while (n >= edge->length) {
+ n -= edge->length;
+ ++index;
+ assert(index != node->end());
+ edge = node->Edge(index);
+ }
+ }
+ index_[0] = index;
+ return {edge, n};
+}
+
+ReadResult CordRepBtreeNavigator::Read(size_t edge_offset, size_t n) {
+ int height = 0;
+ size_t length = edge_offset + n;
+ size_t index = index_[0];
+ CordRepBtree* node = node_[0];
+ CordRep* edge = node->Edge(index);
+ assert(edge_offset < edge->length);
+
+ if (length < edge->length) {
+ return {Substring(edge, edge_offset, n), length};
+ }
+
+ // Similar to 'Skip', we consume all edges that are inside the 'length' of
+ // data that needs to be read. If we exhaust the current level, we move one
+ // level up the tree and repeat until we hit the final edge that must be
+ // (partially) read. We consume all edges into `subtree`.
+ CordRepBtree* subtree = CordRepBtree::New(Substring(edge, edge_offset));
+ size_t subtree_end = 1;
+ do {
+ length -= edge->length;
+ while (++index == node->end()) {
+ index_[height] = index;
+ if (++height > height_) {
+ subtree->set_end(subtree_end);
+ if (length == 0) return {subtree, 0};
+ CordRep::Unref(subtree);
+ return {nullptr, length};
+ }
+ if (length != 0) {
+ subtree->set_end(subtree_end);
+ subtree = CordRepBtree::New(subtree);
+ subtree_end = 1;
+ }
+ node = node_[height];
+ index = index_[height];
+ }
+ edge = node->Edge(index);
+ if (length >= edge->length) {
+ subtree->length += edge->length;
+ subtree->edges_[subtree_end++] = CordRep::Ref(edge);
+ }
+ } while (length >= edge->length);
+ CordRepBtree* tree = subtree;
+ subtree->length += length;
+
+ // If we moved up the tree, descend down to the leaf level, consuming all
+ // edges that must be read, adding 'down' nodes to `subtree`.
+ while (height > 0) {
+ node = edge->btree();
+ index_[height] = index;
+ node_[--height] = node;
+ index = node->begin();
+ edge = node->Edge(index);
+
+ if (length != 0) {
+ CordRepBtree* right = CordRepBtree::New(height);
+ right->length = length;
+ subtree->edges_[subtree_end++] = right;
+ subtree->set_end(subtree_end);
+ subtree = right;
+ subtree_end = 0;
+ while (length >= edge->length) {
+ subtree->edges_[subtree_end++] = CordRep::Ref(edge);
+ length -= edge->length;
+ edge = node->Edge(++index);
+ }
+ }
+ }
+ // Add any (partial) edge still remaining at the leaf level.
+ if (length != 0) {
+ subtree->edges_[subtree_end++] = Substring(edge, 0, length);
+ }
+ subtree->set_end(subtree_end);
+ index_[0] = index;
+ return {tree, length};
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.h
index 40c58e3b3c..8ea166f16e 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_navigator.h
@@ -1,265 +1,265 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_NAVIGATOR_H_
-#define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_NAVIGATOR_H_
-
-#include <cassert>
-#include <iostream>
-
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// CordRepBtreeNavigator is a bi-directional navigator allowing callers to
-// navigate all the (leaf) data edges in a CordRepBtree instance.
-//
-// A CordRepBtreeNavigator instance is by default empty. Callers initialize a
-// navigator instance by calling one of `InitFirst()`, `InitLast()` or
-// `InitOffset()`, which establishes a current position. Callers can then
-// navigate using the `Next`, `Previous`, `Skip` and `Seek` methods.
-//
-// The navigator instance does not take or adopt a reference on the provided
-// `tree` on any of the initialization calls. Callers are responsible for
-// guaranteeing the lifecycle of the provided tree. A navigator instance can
-// be reset to the empty state by calling `Reset`.
-//
-// A navigator only keeps positional state on the 'current data edge', it does
-// explicitly not keep any 'offset' state. The class does accept and return
-// offsets in the `Read()`, `Skip()` and 'Seek()` methods as these would
-// otherwise put a big burden on callers. Callers are expected to maintain
-// (returned) offset info if they require such granular state.
-class CordRepBtreeNavigator {
- public:
- // The logical position as returned by the Seek() and Skip() functions.
- // Returns the current leaf edge for the desired seek or skip position and
- // the offset of that position inside that edge.
- struct Position {
- CordRep* edge;
- size_t offset;
- };
-
- // The read result as returned by the Read() function.
- // `tree` contains the resulting tree which is identical to the result
- // of calling CordRepBtree::SubTree(...) on the tree being navigated.
- // `n` contains the number of bytes used from the last navigated to
- // edge of the tree.
- struct ReadResult {
- CordRep* tree;
- size_t n;
- };
-
- // Returns true if this instance is not empty.
- explicit operator bool() const;
-
- // Returns the tree for this instance or nullptr if empty.
- CordRepBtree* btree() const;
-
- // Returns the data edge of the current position.
- // Requires this instance to not be empty.
- CordRep* Current() const;
-
- // Resets this navigator to `tree`, returning the first data edge in the tree.
- CordRep* InitFirst(CordRepBtree* tree);
-
- // Resets this navigator to `tree`, returning the last data edge in the tree.
- CordRep* InitLast(CordRepBtree* tree);
-
- // Resets this navigator to `tree` returning the data edge at position
- // `offset` and the relative offset of `offset` into that data edge.
- // Returns `Position.edge = nullptr` if the provided offset is greater
- // than or equal to the length of the tree, in which case the state of
- // the navigator instance remains unchanged.
- Position InitOffset(CordRepBtree* tree, size_t offset);
-
- // Navigates to the next data edge.
- // Returns the next data edge or nullptr if there is no next data edge, in
- // which case the current position remains unchanged.
- CordRep* Next();
-
- // Navigates to the previous data edge.
- // Returns the previous data edge or nullptr if there is no previous data
- // edge, in which case the current position remains unchanged.
- CordRep* Previous();
-
- // Navigates to the data edge at position `offset`. Returns the navigated to
- // data edge in `Position.edge` and the relative offset of `offset` into that
- // data edge in `Position.offset`. Returns `Position.edge = nullptr` if the
- // provide offset is greater than or equal to the tree's length.
- Position Seek(size_t offset);
-
- // Reads `n` bytes of data starting at offset `edge_offset` of the current
- // data edge, and returns the result in `ReadResult.tree`. `ReadResult.n`
- // contains the 'bytes used` from the last / current data edge in the tree.
- // This allows users that mix regular navigation (using string views) and
- // 'read into cord' navigation to keep track of the current state, and which
- // bytes have been consumed from a navigator.
- // This function returns `ReadResult.tree = nullptr` if the requested length
- // exceeds the length of the tree starting at the current data edge.
- ReadResult Read(size_t edge_offset, size_t n);
-
- // Skips `n` bytes forward from the current data edge, returning the navigated
- // to data edge in `Position.edge` and `Position.offset` containing the offset
- // inside that data edge. Note that the state of the navigator is left
- // unchanged if `n` is smaller than the length of the current data edge.
- Position Skip(size_t n);
-
- // Resets this instance to the default / empty state.
- void Reset();
-
- private:
- // Slow path for Next() if Next() reached the end of a leaf node. Backtracks
- // up the stack until it finds a node that has a 'next' position available,
- // and then does a 'front dive' towards the next leaf node.
- CordRep* NextUp();
-
- // Slow path for Previous() if Previous() reached the beginning of a leaf
- // node. Backtracks up the stack until it finds a node that has a 'previous'
- // position available, and then does a 'back dive' towards the previous leaf
- // node.
- CordRep* PreviousUp();
-
- // Generic implementation of InitFirst() and InitLast().
- template <CordRepBtree::EdgeType edge_type>
- CordRep* Init(CordRepBtree* tree);
-
- // `height_` contains the height of the current tree, or -1 if empty.
- int height_ = -1;
-
- // `index_` and `node_` contain the navigation state as the 'path' to the
- // current data edge which is at `node_[0]->Edge(index_[0])`. The contents
- // of these are undefined until the instance is initialized (`height_ >= 0`).
- uint8_t index_[CordRepBtree::kMaxHeight];
- CordRepBtree* node_[CordRepBtree::kMaxHeight];
-};
-
-// Returns true if this instance is not empty.
-inline CordRepBtreeNavigator::operator bool() const { return height_ >= 0; }
-
-inline CordRepBtree* CordRepBtreeNavigator::btree() const {
- return height_ >= 0 ? node_[height_] : nullptr;
-}
-
-inline CordRep* CordRepBtreeNavigator::Current() const {
- assert(height_ >= 0);
- return node_[0]->Edge(index_[0]);
-}
-
-inline void CordRepBtreeNavigator::Reset() { height_ = -1; }
-
-inline CordRep* CordRepBtreeNavigator::InitFirst(CordRepBtree* tree) {
- return Init<CordRepBtree::kFront>(tree);
-}
-
-inline CordRep* CordRepBtreeNavigator::InitLast(CordRepBtree* tree) {
- return Init<CordRepBtree::kBack>(tree);
-}
-
-template <CordRepBtree::EdgeType edge_type>
-inline CordRep* CordRepBtreeNavigator::Init(CordRepBtree* tree) {
- assert(tree != nullptr);
- assert(tree->size() > 0);
- int height = height_ = tree->height();
- size_t index = tree->index(edge_type);
- node_[height] = tree;
- index_[height] = static_cast<uint8_t>(index);
- while (--height >= 0) {
- tree = tree->Edge(index)->btree();
- node_[height] = tree;
- index = tree->index(edge_type);
- index_[height] = static_cast<uint8_t>(index);
- }
- return node_[0]->Edge(index);
-}
-
-inline CordRepBtreeNavigator::Position CordRepBtreeNavigator::Seek(
- size_t offset) {
- assert(btree() != nullptr);
- int height = height_;
- CordRepBtree* edge = node_[height];
- if (ABSL_PREDICT_FALSE(offset >= edge->length)) return {nullptr, 0};
- CordRepBtree::Position index = edge->IndexOf(offset);
- index_[height] = static_cast<uint8_t>(index.index);
- while (--height >= 0) {
- edge = edge->Edge(index.index)->btree();
- node_[height] = edge;
- index = edge->IndexOf(index.n);
- index_[height] = static_cast<uint8_t>(index.index);
- }
- return {edge->Edge(index.index), index.n};
-}
-
-inline CordRepBtreeNavigator::Position CordRepBtreeNavigator::InitOffset(
- CordRepBtree* tree, size_t offset) {
- assert(tree != nullptr);
- if (ABSL_PREDICT_FALSE(offset >= tree->length)) return {nullptr, 0};
- height_ = tree->height();
- node_[height_] = tree;
- return Seek(offset);
-}
-
-inline CordRep* CordRepBtreeNavigator::Next() {
- CordRepBtree* edge = node_[0];
- return index_[0] == edge->back() ? NextUp() : edge->Edge(++index_[0]);
-}
-
-inline CordRep* CordRepBtreeNavigator::Previous() {
- CordRepBtree* edge = node_[0];
- return index_[0] == edge->begin() ? PreviousUp() : edge->Edge(--index_[0]);
-}
-
-inline CordRep* CordRepBtreeNavigator::NextUp() {
- assert(index_[0] == node_[0]->back());
- CordRepBtree* edge;
- size_t index;
- int height = 0;
- do {
- if (++height > height_) return nullptr;
- edge = node_[height];
- index = index_[height] + 1;
- } while (index == edge->end());
- index_[height] = static_cast<uint8_t>(index);
- do {
- node_[--height] = edge = edge->Edge(index)->btree();
- index_[height] = static_cast<uint8_t>(index = edge->begin());
- } while (height > 0);
- return edge->Edge(index);
-}
-
-inline CordRep* CordRepBtreeNavigator::PreviousUp() {
- assert(index_[0] == node_[0]->begin());
- CordRepBtree* edge;
- size_t index;
- int height = 0;
- do {
- if (++height > height_) return nullptr;
- edge = node_[height];
- index = index_[height];
- } while (index == edge->begin());
- index_[height] = static_cast<uint8_t>(--index);
- do {
- node_[--height] = edge = edge->Edge(index)->btree();
- index_[height] = static_cast<uint8_t>(index = edge->back());
- } while (height > 0);
- return edge->Edge(index);
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_NAVIGATOR_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_NAVIGATOR_H_
+#define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_NAVIGATOR_H_
+
+#include <cassert>
+#include <iostream>
+
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordRepBtreeNavigator is a bi-directional navigator allowing callers to
+// navigate all the (leaf) data edges in a CordRepBtree instance.
+//
+// A CordRepBtreeNavigator instance is by default empty. Callers initialize a
+// navigator instance by calling one of `InitFirst()`, `InitLast()` or
+// `InitOffset()`, which establishes a current position. Callers can then
+// navigate using the `Next`, `Previous`, `Skip` and `Seek` methods.
+//
+// The navigator instance does not take or adopt a reference on the provided
+// `tree` on any of the initialization calls. Callers are responsible for
+// guaranteeing the lifecycle of the provided tree. A navigator instance can
+// be reset to the empty state by calling `Reset`.
+//
+// A navigator only keeps positional state on the 'current data edge', it does
+// explicitly not keep any 'offset' state. The class does accept and return
+// offsets in the `Read()`, `Skip()` and 'Seek()` methods as these would
+// otherwise put a big burden on callers. Callers are expected to maintain
+// (returned) offset info if they require such granular state.
+class CordRepBtreeNavigator {
+ public:
+ // The logical position as returned by the Seek() and Skip() functions.
+ // Returns the current leaf edge for the desired seek or skip position and
+ // the offset of that position inside that edge.
+ struct Position {
+ CordRep* edge;
+ size_t offset;
+ };
+
+ // The read result as returned by the Read() function.
+ // `tree` contains the resulting tree which is identical to the result
+ // of calling CordRepBtree::SubTree(...) on the tree being navigated.
+ // `n` contains the number of bytes used from the last navigated to
+ // edge of the tree.
+ struct ReadResult {
+ CordRep* tree;
+ size_t n;
+ };
+
+ // Returns true if this instance is not empty.
+ explicit operator bool() const;
+
+ // Returns the tree for this instance or nullptr if empty.
+ CordRepBtree* btree() const;
+
+ // Returns the data edge of the current position.
+ // Requires this instance to not be empty.
+ CordRep* Current() const;
+
+ // Resets this navigator to `tree`, returning the first data edge in the tree.
+ CordRep* InitFirst(CordRepBtree* tree);
+
+ // Resets this navigator to `tree`, returning the last data edge in the tree.
+ CordRep* InitLast(CordRepBtree* tree);
+
+ // Resets this navigator to `tree` returning the data edge at position
+ // `offset` and the relative offset of `offset` into that data edge.
+ // Returns `Position.edge = nullptr` if the provided offset is greater
+ // than or equal to the length of the tree, in which case the state of
+ // the navigator instance remains unchanged.
+ Position InitOffset(CordRepBtree* tree, size_t offset);
+
+ // Navigates to the next data edge.
+ // Returns the next data edge or nullptr if there is no next data edge, in
+ // which case the current position remains unchanged.
+ CordRep* Next();
+
+ // Navigates to the previous data edge.
+ // Returns the previous data edge or nullptr if there is no previous data
+ // edge, in which case the current position remains unchanged.
+ CordRep* Previous();
+
+ // Navigates to the data edge at position `offset`. Returns the navigated to
+ // data edge in `Position.edge` and the relative offset of `offset` into that
+ // data edge in `Position.offset`. Returns `Position.edge = nullptr` if the
+ // provide offset is greater than or equal to the tree's length.
+ Position Seek(size_t offset);
+
+ // Reads `n` bytes of data starting at offset `edge_offset` of the current
+ // data edge, and returns the result in `ReadResult.tree`. `ReadResult.n`
+ // contains the 'bytes used` from the last / current data edge in the tree.
+ // This allows users that mix regular navigation (using string views) and
+ // 'read into cord' navigation to keep track of the current state, and which
+ // bytes have been consumed from a navigator.
+ // This function returns `ReadResult.tree = nullptr` if the requested length
+ // exceeds the length of the tree starting at the current data edge.
+ ReadResult Read(size_t edge_offset, size_t n);
+
+ // Skips `n` bytes forward from the current data edge, returning the navigated
+ // to data edge in `Position.edge` and `Position.offset` containing the offset
+ // inside that data edge. Note that the state of the navigator is left
+ // unchanged if `n` is smaller than the length of the current data edge.
+ Position Skip(size_t n);
+
+ // Resets this instance to the default / empty state.
+ void Reset();
+
+ private:
+ // Slow path for Next() if Next() reached the end of a leaf node. Backtracks
+ // up the stack until it finds a node that has a 'next' position available,
+ // and then does a 'front dive' towards the next leaf node.
+ CordRep* NextUp();
+
+ // Slow path for Previous() if Previous() reached the beginning of a leaf
+ // node. Backtracks up the stack until it finds a node that has a 'previous'
+ // position available, and then does a 'back dive' towards the previous leaf
+ // node.
+ CordRep* PreviousUp();
+
+ // Generic implementation of InitFirst() and InitLast().
+ template <CordRepBtree::EdgeType edge_type>
+ CordRep* Init(CordRepBtree* tree);
+
+ // `height_` contains the height of the current tree, or -1 if empty.
+ int height_ = -1;
+
+ // `index_` and `node_` contain the navigation state as the 'path' to the
+ // current data edge which is at `node_[0]->Edge(index_[0])`. The contents
+ // of these are undefined until the instance is initialized (`height_ >= 0`).
+ uint8_t index_[CordRepBtree::kMaxHeight];
+ CordRepBtree* node_[CordRepBtree::kMaxHeight];
+};
+
+// Returns true if this instance is not empty.
+inline CordRepBtreeNavigator::operator bool() const { return height_ >= 0; }
+
+inline CordRepBtree* CordRepBtreeNavigator::btree() const {
+ return height_ >= 0 ? node_[height_] : nullptr;
+}
+
+inline CordRep* CordRepBtreeNavigator::Current() const {
+ assert(height_ >= 0);
+ return node_[0]->Edge(index_[0]);
+}
+
+inline void CordRepBtreeNavigator::Reset() { height_ = -1; }
+
+inline CordRep* CordRepBtreeNavigator::InitFirst(CordRepBtree* tree) {
+ return Init<CordRepBtree::kFront>(tree);
+}
+
+inline CordRep* CordRepBtreeNavigator::InitLast(CordRepBtree* tree) {
+ return Init<CordRepBtree::kBack>(tree);
+}
+
+template <CordRepBtree::EdgeType edge_type>
+inline CordRep* CordRepBtreeNavigator::Init(CordRepBtree* tree) {
+ assert(tree != nullptr);
+ assert(tree->size() > 0);
+ int height = height_ = tree->height();
+ size_t index = tree->index(edge_type);
+ node_[height] = tree;
+ index_[height] = static_cast<uint8_t>(index);
+ while (--height >= 0) {
+ tree = tree->Edge(index)->btree();
+ node_[height] = tree;
+ index = tree->index(edge_type);
+ index_[height] = static_cast<uint8_t>(index);
+ }
+ return node_[0]->Edge(index);
+}
+
+inline CordRepBtreeNavigator::Position CordRepBtreeNavigator::Seek(
+ size_t offset) {
+ assert(btree() != nullptr);
+ int height = height_;
+ CordRepBtree* edge = node_[height];
+ if (ABSL_PREDICT_FALSE(offset >= edge->length)) return {nullptr, 0};
+ CordRepBtree::Position index = edge->IndexOf(offset);
+ index_[height] = static_cast<uint8_t>(index.index);
+ while (--height >= 0) {
+ edge = edge->Edge(index.index)->btree();
+ node_[height] = edge;
+ index = edge->IndexOf(index.n);
+ index_[height] = static_cast<uint8_t>(index.index);
+ }
+ return {edge->Edge(index.index), index.n};
+}
+
+inline CordRepBtreeNavigator::Position CordRepBtreeNavigator::InitOffset(
+ CordRepBtree* tree, size_t offset) {
+ assert(tree != nullptr);
+ if (ABSL_PREDICT_FALSE(offset >= tree->length)) return {nullptr, 0};
+ height_ = tree->height();
+ node_[height_] = tree;
+ return Seek(offset);
+}
+
+inline CordRep* CordRepBtreeNavigator::Next() {
+ CordRepBtree* edge = node_[0];
+ return index_[0] == edge->back() ? NextUp() : edge->Edge(++index_[0]);
+}
+
+inline CordRep* CordRepBtreeNavigator::Previous() {
+ CordRepBtree* edge = node_[0];
+ return index_[0] == edge->begin() ? PreviousUp() : edge->Edge(--index_[0]);
+}
+
+inline CordRep* CordRepBtreeNavigator::NextUp() {
+ assert(index_[0] == node_[0]->back());
+ CordRepBtree* edge;
+ size_t index;
+ int height = 0;
+ do {
+ if (++height > height_) return nullptr;
+ edge = node_[height];
+ index = index_[height] + 1;
+ } while (index == edge->end());
+ index_[height] = static_cast<uint8_t>(index);
+ do {
+ node_[--height] = edge = edge->Edge(index)->btree();
+ index_[height] = static_cast<uint8_t>(index = edge->begin());
+ } while (height > 0);
+ return edge->Edge(index);
+}
+
+inline CordRep* CordRepBtreeNavigator::PreviousUp() {
+ assert(index_[0] == node_[0]->begin());
+ CordRepBtree* edge;
+ size_t index;
+ int height = 0;
+ do {
+ if (++height > height_) return nullptr;
+ edge = node_[height];
+ index = index_[height];
+ } while (index == edge->begin());
+ index_[height] = static_cast<uint8_t>(--index);
+ do {
+ node_[--height] = edge = edge->Edge(index)->btree();
+ index_[height] = static_cast<uint8_t>(index = edge->back());
+ } while (height > 0);
+ return edge->Edge(index);
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_NAVIGATOR_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.cc
index 0bc9dba2e6..f1bc2d6bdb 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.cc
@@ -1,68 +1,68 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cord_rep_btree_reader.h"
-
-#include <cassert>
-
-#include "y_absl/base/config.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
-#include "y_absl/strings/internal/cord_rep_btree_navigator.h"
-#include "y_absl/strings/internal/cord_rep_flat.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-y_absl::string_view CordRepBtreeReader::Read(size_t n, size_t chunk_size,
- CordRep*& tree) {
- assert(chunk_size <= navigator_.Current()->length);
-
- // If chunk_size is non-zero, we need to start inside last returned edge.
- // Else we start reading at the next data edge of the tree.
- CordRep* edge = chunk_size ? navigator_.Current() : navigator_.Next();
- const size_t offset = chunk_size ? edge->length - chunk_size : 0;
-
- // Read the sub tree and verify we got what we wanted.
- ReadResult result = navigator_.Read(offset, n);
- tree = result.tree;
-
- // If the data returned in `tree` was covered entirely by `chunk_size`, i.e.,
- // read from the 'previous' edge, we did not consume any additional data, and
- // can directly return the substring into the current data edge as the next
- // chunk. We can easily establish from the above code that `navigator_.Next()`
- // has not been called as that requires `chunk_size` to be zero.
- if (n < chunk_size) return CordRepBtree::EdgeData(edge).substr(result.n);
-
- // The amount of data taken from the last edge is `chunk_size` and `result.n`
- // contains the offset into the current edge trailing the read data (which can
- // be 0). As the call to `navigator_.Read()` could have consumed all remaining
- // data, calling `navigator_.Current()` is not safe before checking if we
- // already consumed all remaining data.
- const size_t consumed_by_read = n - chunk_size - result.n;
- if (consumed_by_read >= remaining_) {
- remaining_ = 0;
- return {};
- }
-
- // We did not read all data, return remaining data from current edge.
- edge = navigator_.Current();
- remaining_ -= consumed_by_read + edge->length;
- return CordRepBtree::EdgeData(edge).substr(result.n);
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cord_rep_btree_reader.h"
+
+#include <cassert>
+
+#include "y_absl/base/config.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
+#include "y_absl/strings/internal/cord_rep_btree_navigator.h"
+#include "y_absl/strings/internal/cord_rep_flat.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+y_absl::string_view CordRepBtreeReader::Read(size_t n, size_t chunk_size,
+ CordRep*& tree) {
+ assert(chunk_size <= navigator_.Current()->length);
+
+ // If chunk_size is non-zero, we need to start inside last returned edge.
+ // Else we start reading at the next data edge of the tree.
+ CordRep* edge = chunk_size ? navigator_.Current() : navigator_.Next();
+ const size_t offset = chunk_size ? edge->length - chunk_size : 0;
+
+ // Read the sub tree and verify we got what we wanted.
+ ReadResult result = navigator_.Read(offset, n);
+ tree = result.tree;
+
+ // If the data returned in `tree` was covered entirely by `chunk_size`, i.e.,
+ // read from the 'previous' edge, we did not consume any additional data, and
+ // can directly return the substring into the current data edge as the next
+ // chunk. We can easily establish from the above code that `navigator_.Next()`
+ // has not been called as that requires `chunk_size` to be zero.
+ if (n < chunk_size) return CordRepBtree::EdgeData(edge).substr(result.n);
+
+ // The amount of data taken from the last edge is `chunk_size` and `result.n`
+ // contains the offset into the current edge trailing the read data (which can
+ // be 0). As the call to `navigator_.Read()` could have consumed all remaining
+ // data, calling `navigator_.Current()` is not safe before checking if we
+ // already consumed all remaining data.
+ const size_t consumed_by_read = n - chunk_size - result.n;
+ if (consumed_by_read >= remaining_) {
+ remaining_ = 0;
+ return {};
+ }
+
+ // We did not read all data, return remaining data from current edge.
+ edge = navigator_.Current();
+ remaining_ -= consumed_by_read + edge->length;
+ return CordRepBtree::EdgeData(edge).substr(result.n);
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.h
index 00b2261f71..23f729fc3d 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_btree_reader.h
@@ -1,211 +1,211 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_READER_H_
-#define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_READER_H_
-
-#include <cassert>
-
-#include "y_absl/base/config.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
-#include "y_absl/strings/internal/cord_rep_btree_navigator.h"
-#include "y_absl/strings/internal/cord_rep_flat.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// CordRepBtreeReader implements logic to iterate over cord btrees.
-// References to the underlying data are returned as y_absl::string_view values.
-// The most typical use case is a forward only iteration over tree data.
-// The class also provides `Skip()`, `Seek()` and `Read()` methods similar to
-// CordRepBtreeNavigator that allow more advanced navigation.
-//
-// Example: iterate over all data inside a cord btree:
-//
-// CordRepBtreeReader reader;
-// for (string_view sv = reader.Init(tree); !sv.Empty(); sv = sv.Next()) {
-// DoSomethingWithDataIn(sv);
-// }
-//
-// All navigation methods always return the next 'chunk' of data. The class
-// assumes that all data is directly 'consumed' by the caller. For example:
-// invoking `Skip()` will skip the desired number of bytes, and directly
-// read and return the next chunk of data directly after the skipped bytes.
-//
-// Example: iterate over all data inside a btree skipping the first 100 bytes:
-//
-// CordRepBtreeReader reader;
-// y_absl::string_view sv = reader.Init(tree);
-// if (sv.length() > 100) {
-// sv.RemovePrefix(100);
-// } else {
-// sv = reader.Skip(100 - sv.length());
-// }
-// while (!sv.empty()) {
-// DoSomethingWithDataIn(sv);
-// y_absl::string_view sv = reader.Next();
-// }
-//
-// It is important to notice that `remaining` is based on the end position of
-// the last data edge returned to the caller, not the cumulative data returned
-// to the caller which can be less in cases of skipping or seeking over data.
-//
-// For example, consider a cord btree with five data edges: "abc", "def", "ghi",
-// "jkl" and "mno":
-//
-// y_absl::string_view sv;
-// CordRepBtreeReader reader;
-//
-// sv = reader.Init(tree); // sv = "abc", remaining = 12
-// sv = reader.Skip(4); // sv = "hi", remaining = 6
-// sv = reader.Skip(2); // sv = "l", remaining = 3
-// sv = reader.Next(); // sv = "mno", remaining = 0
-// sv = reader.Seek(1); // sv = "bc", remaining = 12
-//
-class CordRepBtreeReader {
- public:
- using ReadResult = CordRepBtreeNavigator::ReadResult;
- using Position = CordRepBtreeNavigator::Position;
-
- // Returns true if this instance is not empty.
- explicit operator bool() const { return navigator_.btree() != nullptr; }
-
- // Returns the tree referenced by this instance or nullptr if empty.
- CordRepBtree* btree() const { return navigator_.btree(); }
-
- // Returns the current data edge inside the referenced btree.
- // Requires that the current instance is not empty.
- CordRep* node() const { return navigator_.Current(); }
-
- // Returns the length of the referenced tree.
- // Requires that the current instance is not empty.
- size_t length() const;
-
- // Returns the number of remaining bytes available for iteration, which is the
- // number of bytes directly following the end of the last chunk returned.
- // This value will be zero if we iterated over the last edge in the bound
- // tree, in which case any call to Next() or Skip() will return an empty
- // string_view reflecting the EOF state.
- // Note that a call to `Seek()` resets `remaining` to a value based on the
- // end position of the chunk returned by that call.
- size_t remaining() const { return remaining_; }
-
- // Resets this instance to an empty value.
- void Reset() { navigator_.Reset(); }
-
- // Initializes this instance with `tree`. `tree` must not be null.
- // Returns a reference to the first data edge of the provided tree.
- y_absl::string_view Init(CordRepBtree* tree);
-
- // Navigates to and returns the next data edge of the referenced tree.
- // Returns an empty string_view if an attempt is made to read beyond the end
- // of the tree, i.e.: if `remaining()` is zero indicating an EOF condition.
- // Requires that the current instance is not empty.
- y_absl::string_view Next();
-
- // Skips the provided amount of bytes and returns a reference to the data
- // directly following the skipped bytes.
- y_absl::string_view Skip(size_t skip);
-
- // Reads `n` bytes into `tree`.
- // If `chunk_size` is zero, starts reading at the next data edge. If
- // `chunk_size` is non zero, the read starts at the last `chunk_size` bytes of
- // the last returned data edge. Effectively, this means that the read starts
- // at offset `consumed() - chunk_size`.
- // Requires that `chunk_size` is less than or equal to the length of the
- // last returned data edge. The purpose of `chunk_size` is to simplify code
- // partially consuming a returned chunk and wanting to include the remaining
- // bytes in the Read call. For example, the below code will read 1000 bytes of
- // data into a cord tree if the first chunk starts with "big:":
- //
- // CordRepBtreeReader reader;
- // y_absl::string_view sv = reader.Init(tree);
- // if (y_absl::StartsWith(sv, "big:")) {
- // CordRepBtree tree;
- // sv = reader.Read(1000, sv.size() - 4 /* "big:" */, &tree);
- // }
- //
- // This method will return an empty string view if all remaining data was
- // read. If `n` exceeded the amount of remaining data this function will
- // return an empty string view and `tree` will be set to nullptr.
- // In both cases, `consumed` will be set to `length`.
- y_absl::string_view Read(size_t n, size_t chunk_size, CordRep*& tree);
-
- // Navigates to the chunk at offset `offset`.
- // Returns a reference into the navigated to chunk, adjusted for the relative
- // position of `offset` into that chunk. For example, calling `Seek(13)` on a
- // cord tree containing 2 chunks of 10 and 20 bytes respectively will return
- // a string view into the second chunk starting at offset 3 with a size of 17.
- // Returns an empty string view if `offset` is equal to or greater than the
- // length of the referenced tree.
- y_absl::string_view Seek(size_t offset);
-
- private:
- size_t remaining_ = 0;
- CordRepBtreeNavigator navigator_;
-};
-
-inline size_t CordRepBtreeReader::length() const {
- assert(btree() != nullptr);
- return btree()->length;
-}
-
-inline y_absl::string_view CordRepBtreeReader::Init(CordRepBtree* tree) {
- assert(tree != nullptr);
- const CordRep* edge = navigator_.InitFirst(tree);
- remaining_ = tree->length - edge->length;
- return CordRepBtree::EdgeData(edge);
-}
-
-inline y_absl::string_view CordRepBtreeReader::Next() {
- if (remaining_ == 0) return {};
- const CordRep* edge = navigator_.Next();
- assert(edge != nullptr);
- remaining_ -= edge->length;
- return CordRepBtree::EdgeData(edge);
-}
-
-inline y_absl::string_view CordRepBtreeReader::Skip(size_t skip) {
- // As we are always positioned on the last 'consumed' edge, we
- // need to skip the current edge as well as `skip`.
- const size_t edge_length = navigator_.Current()->length;
- CordRepBtreeNavigator::Position pos = navigator_.Skip(skip + edge_length);
- if (ABSL_PREDICT_FALSE(pos.edge == nullptr)) {
- remaining_ = 0;
- return {};
- }
- // The combined length of all edges skipped before `pos.edge` is `skip -
- // pos.offset`, all of which are 'consumed', as well as the current edge.
- remaining_ -= skip - pos.offset + pos.edge->length;
- return CordRepBtree::EdgeData(pos.edge).substr(pos.offset);
-}
-
-inline y_absl::string_view CordRepBtreeReader::Seek(size_t offset) {
- const CordRepBtreeNavigator::Position pos = navigator_.Seek(offset);
- if (ABSL_PREDICT_FALSE(pos.edge == nullptr)) {
- remaining_ = 0;
- return {};
- }
- y_absl::string_view chunk = CordRepBtree::EdgeData(pos.edge).substr(pos.offset);
- remaining_ = length() - offset - chunk.length();
- return chunk;
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_READER_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_READER_H_
+#define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_READER_H_
+
+#include <cassert>
+
+#include "y_absl/base/config.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
+#include "y_absl/strings/internal/cord_rep_btree_navigator.h"
+#include "y_absl/strings/internal/cord_rep_flat.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordRepBtreeReader implements logic to iterate over cord btrees.
+// References to the underlying data are returned as y_absl::string_view values.
+// The most typical use case is a forward only iteration over tree data.
+// The class also provides `Skip()`, `Seek()` and `Read()` methods similar to
+// CordRepBtreeNavigator that allow more advanced navigation.
+//
+// Example: iterate over all data inside a cord btree:
+//
+// CordRepBtreeReader reader;
+// for (string_view sv = reader.Init(tree); !sv.Empty(); sv = sv.Next()) {
+// DoSomethingWithDataIn(sv);
+// }
+//
+// All navigation methods always return the next 'chunk' of data. The class
+// assumes that all data is directly 'consumed' by the caller. For example:
+// invoking `Skip()` will skip the desired number of bytes, and directly
+// read and return the next chunk of data directly after the skipped bytes.
+//
+// Example: iterate over all data inside a btree skipping the first 100 bytes:
+//
+// CordRepBtreeReader reader;
+// y_absl::string_view sv = reader.Init(tree);
+// if (sv.length() > 100) {
+// sv.RemovePrefix(100);
+// } else {
+// sv = reader.Skip(100 - sv.length());
+// }
+// while (!sv.empty()) {
+// DoSomethingWithDataIn(sv);
+// y_absl::string_view sv = reader.Next();
+// }
+//
+// It is important to notice that `remaining` is based on the end position of
+// the last data edge returned to the caller, not the cumulative data returned
+// to the caller which can be less in cases of skipping or seeking over data.
+//
+// For example, consider a cord btree with five data edges: "abc", "def", "ghi",
+// "jkl" and "mno":
+//
+// y_absl::string_view sv;
+// CordRepBtreeReader reader;
+//
+// sv = reader.Init(tree); // sv = "abc", remaining = 12
+// sv = reader.Skip(4); // sv = "hi", remaining = 6
+// sv = reader.Skip(2); // sv = "l", remaining = 3
+// sv = reader.Next(); // sv = "mno", remaining = 0
+// sv = reader.Seek(1); // sv = "bc", remaining = 12
+//
+class CordRepBtreeReader {
+ public:
+ using ReadResult = CordRepBtreeNavigator::ReadResult;
+ using Position = CordRepBtreeNavigator::Position;
+
+ // Returns true if this instance is not empty.
+ explicit operator bool() const { return navigator_.btree() != nullptr; }
+
+ // Returns the tree referenced by this instance or nullptr if empty.
+ CordRepBtree* btree() const { return navigator_.btree(); }
+
+ // Returns the current data edge inside the referenced btree.
+ // Requires that the current instance is not empty.
+ CordRep* node() const { return navigator_.Current(); }
+
+ // Returns the length of the referenced tree.
+ // Requires that the current instance is not empty.
+ size_t length() const;
+
+ // Returns the number of remaining bytes available for iteration, which is the
+ // number of bytes directly following the end of the last chunk returned.
+ // This value will be zero if we iterated over the last edge in the bound
+ // tree, in which case any call to Next() or Skip() will return an empty
+ // string_view reflecting the EOF state.
+ // Note that a call to `Seek()` resets `remaining` to a value based on the
+ // end position of the chunk returned by that call.
+ size_t remaining() const { return remaining_; }
+
+ // Resets this instance to an empty value.
+ void Reset() { navigator_.Reset(); }
+
+ // Initializes this instance with `tree`. `tree` must not be null.
+ // Returns a reference to the first data edge of the provided tree.
+ y_absl::string_view Init(CordRepBtree* tree);
+
+ // Navigates to and returns the next data edge of the referenced tree.
+ // Returns an empty string_view if an attempt is made to read beyond the end
+ // of the tree, i.e.: if `remaining()` is zero indicating an EOF condition.
+ // Requires that the current instance is not empty.
+ y_absl::string_view Next();
+
+ // Skips the provided amount of bytes and returns a reference to the data
+ // directly following the skipped bytes.
+ y_absl::string_view Skip(size_t skip);
+
+ // Reads `n` bytes into `tree`.
+ // If `chunk_size` is zero, starts reading at the next data edge. If
+ // `chunk_size` is non zero, the read starts at the last `chunk_size` bytes of
+ // the last returned data edge. Effectively, this means that the read starts
+ // at offset `consumed() - chunk_size`.
+ // Requires that `chunk_size` is less than or equal to the length of the
+ // last returned data edge. The purpose of `chunk_size` is to simplify code
+ // partially consuming a returned chunk and wanting to include the remaining
+ // bytes in the Read call. For example, the below code will read 1000 bytes of
+ // data into a cord tree if the first chunk starts with "big:":
+ //
+ // CordRepBtreeReader reader;
+ // y_absl::string_view sv = reader.Init(tree);
+ // if (y_absl::StartsWith(sv, "big:")) {
+ // CordRepBtree tree;
+ // sv = reader.Read(1000, sv.size() - 4 /* "big:" */, &tree);
+ // }
+ //
+ // This method will return an empty string view if all remaining data was
+ // read. If `n` exceeded the amount of remaining data this function will
+ // return an empty string view and `tree` will be set to nullptr.
+ // In both cases, `consumed` will be set to `length`.
+ y_absl::string_view Read(size_t n, size_t chunk_size, CordRep*& tree);
+
+ // Navigates to the chunk at offset `offset`.
+ // Returns a reference into the navigated to chunk, adjusted for the relative
+ // position of `offset` into that chunk. For example, calling `Seek(13)` on a
+ // cord tree containing 2 chunks of 10 and 20 bytes respectively will return
+ // a string view into the second chunk starting at offset 3 with a size of 17.
+ // Returns an empty string view if `offset` is equal to or greater than the
+ // length of the referenced tree.
+ y_absl::string_view Seek(size_t offset);
+
+ private:
+ size_t remaining_ = 0;
+ CordRepBtreeNavigator navigator_;
+};
+
+inline size_t CordRepBtreeReader::length() const {
+ assert(btree() != nullptr);
+ return btree()->length;
+}
+
+inline y_absl::string_view CordRepBtreeReader::Init(CordRepBtree* tree) {
+ assert(tree != nullptr);
+ const CordRep* edge = navigator_.InitFirst(tree);
+ remaining_ = tree->length - edge->length;
+ return CordRepBtree::EdgeData(edge);
+}
+
+inline y_absl::string_view CordRepBtreeReader::Next() {
+ if (remaining_ == 0) return {};
+ const CordRep* edge = navigator_.Next();
+ assert(edge != nullptr);
+ remaining_ -= edge->length;
+ return CordRepBtree::EdgeData(edge);
+}
+
+inline y_absl::string_view CordRepBtreeReader::Skip(size_t skip) {
+ // As we are always positioned on the last 'consumed' edge, we
+ // need to skip the current edge as well as `skip`.
+ const size_t edge_length = navigator_.Current()->length;
+ CordRepBtreeNavigator::Position pos = navigator_.Skip(skip + edge_length);
+ if (ABSL_PREDICT_FALSE(pos.edge == nullptr)) {
+ remaining_ = 0;
+ return {};
+ }
+ // The combined length of all edges skipped before `pos.edge` is `skip -
+ // pos.offset`, all of which are 'consumed', as well as the current edge.
+ remaining_ -= skip - pos.offset + pos.edge->length;
+ return CordRepBtree::EdgeData(pos.edge).substr(pos.offset);
+}
+
+inline y_absl::string_view CordRepBtreeReader::Seek(size_t offset) {
+ const CordRepBtreeNavigator::Position pos = navigator_.Seek(offset);
+ if (ABSL_PREDICT_FALSE(pos.edge == nullptr)) {
+ remaining_ = 0;
+ return {};
+ }
+ y_absl::string_view chunk = CordRepBtree::EdgeData(pos.edge).substr(pos.offset);
+ remaining_ = length() - offset - chunk.length();
+ return chunk;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_READER_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.cc
index ffc0179e52..1cc83d7f26 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.cc
@@ -1,129 +1,129 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cord_rep_consume.h"
-
-#include <array>
-#include <utility>
-
-#include "y_absl/container/inlined_vector.h"
-#include "y_absl/functional/function_ref.h"
-#include "y_absl/strings/internal/cord_internal.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-namespace {
-
-// Unrefs the provided `substring`, and returns `substring->child`
-// Adds or assumes a reference on `substring->child`
-CordRep* ClipSubstring(CordRepSubstring* substring) {
- CordRep* child = substring->child;
- if (substring->refcount.IsOne()) {
- delete substring;
- } else {
- CordRep::Ref(child);
- CordRep::Unref(substring);
- }
- return child;
-}
-
-// Unrefs the provided `concat`, and returns `{concat->left, concat->right}`
-// Adds or assumes a reference on `concat->left` and `concat->right`.
-// Returns an array of 2 elements containing the left and right nodes.
-std::array<CordRep*, 2> ClipConcat(CordRepConcat* concat) {
- std::array<CordRep*, 2> result{concat->left, concat->right};
- if (concat->refcount.IsOne()) {
- delete concat;
- } else {
- CordRep::Ref(result[0]);
- CordRep::Ref(result[1]);
- CordRep::Unref(concat);
- }
- return result;
-}
-
-void Consume(bool forward, CordRep* rep, ConsumeFn consume_fn) {
- size_t offset = 0;
- size_t length = rep->length;
- struct Entry {
- CordRep* rep;
- size_t offset;
- size_t length;
- };
- y_absl::InlinedVector<Entry, 40> stack;
-
- for (;;) {
- if (rep->tag == CONCAT) {
- std::array<CordRep*, 2> res = ClipConcat(rep->concat());
- CordRep* left = res[0];
- CordRep* right = res[1];
-
- if (left->length <= offset) {
- // Don't need left node
- offset -= left->length;
- CordRep::Unref(left);
- rep = right;
- continue;
- }
-
- size_t length_left = left->length - offset;
- if (length_left >= length) {
- // Don't need right node
- CordRep::Unref(right);
- rep = left;
- continue;
- }
-
- // Need both nodes
- size_t length_right = length - length_left;
- if (forward) {
- stack.push_back({right, 0, length_right});
- rep = left;
- length = length_left;
- } else {
- stack.push_back({left, offset, length_left});
- rep = right;
- offset = 0;
- length = length_right;
- }
- } else if (rep->tag == SUBSTRING) {
- offset += rep->substring()->start;
- rep = ClipSubstring(rep->substring());
- } else {
- consume_fn(rep, offset, length);
- if (stack.empty()) return;
-
- rep = stack.back().rep;
- offset = stack.back().offset;
- length = stack.back().length;
- stack.pop_back();
- }
- }
-}
-
-} // namespace
-
-void Consume(CordRep* rep, ConsumeFn consume_fn) {
- return Consume(true, rep, std::move(consume_fn));
-}
-
-void ReverseConsume(CordRep* rep, ConsumeFn consume_fn) {
- return Consume(false, rep, std::move(consume_fn));
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cord_rep_consume.h"
+
+#include <array>
+#include <utility>
+
+#include "y_absl/container/inlined_vector.h"
+#include "y_absl/functional/function_ref.h"
+#include "y_absl/strings/internal/cord_internal.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+namespace {
+
+// Unrefs the provided `substring`, and returns `substring->child`
+// Adds or assumes a reference on `substring->child`
+CordRep* ClipSubstring(CordRepSubstring* substring) {
+ CordRep* child = substring->child;
+ if (substring->refcount.IsOne()) {
+ delete substring;
+ } else {
+ CordRep::Ref(child);
+ CordRep::Unref(substring);
+ }
+ return child;
+}
+
+// Unrefs the provided `concat`, and returns `{concat->left, concat->right}`
+// Adds or assumes a reference on `concat->left` and `concat->right`.
+// Returns an array of 2 elements containing the left and right nodes.
+std::array<CordRep*, 2> ClipConcat(CordRepConcat* concat) {
+ std::array<CordRep*, 2> result{concat->left, concat->right};
+ if (concat->refcount.IsOne()) {
+ delete concat;
+ } else {
+ CordRep::Ref(result[0]);
+ CordRep::Ref(result[1]);
+ CordRep::Unref(concat);
+ }
+ return result;
+}
+
+void Consume(bool forward, CordRep* rep, ConsumeFn consume_fn) {
+ size_t offset = 0;
+ size_t length = rep->length;
+ struct Entry {
+ CordRep* rep;
+ size_t offset;
+ size_t length;
+ };
+ y_absl::InlinedVector<Entry, 40> stack;
+
+ for (;;) {
+ if (rep->tag == CONCAT) {
+ std::array<CordRep*, 2> res = ClipConcat(rep->concat());
+ CordRep* left = res[0];
+ CordRep* right = res[1];
+
+ if (left->length <= offset) {
+ // Don't need left node
+ offset -= left->length;
+ CordRep::Unref(left);
+ rep = right;
+ continue;
+ }
+
+ size_t length_left = left->length - offset;
+ if (length_left >= length) {
+ // Don't need right node
+ CordRep::Unref(right);
+ rep = left;
+ continue;
+ }
+
+ // Need both nodes
+ size_t length_right = length - length_left;
+ if (forward) {
+ stack.push_back({right, 0, length_right});
+ rep = left;
+ length = length_left;
+ } else {
+ stack.push_back({left, offset, length_left});
+ rep = right;
+ offset = 0;
+ length = length_right;
+ }
+ } else if (rep->tag == SUBSTRING) {
+ offset += rep->substring()->start;
+ rep = ClipSubstring(rep->substring());
+ } else {
+ consume_fn(rep, offset, length);
+ if (stack.empty()) return;
+
+ rep = stack.back().rep;
+ offset = stack.back().offset;
+ length = stack.back().length;
+ stack.pop_back();
+ }
+ }
+}
+
+} // namespace
+
+void Consume(CordRep* rep, ConsumeFn consume_fn) {
+ return Consume(true, rep, std::move(consume_fn));
+}
+
+void ReverseConsume(CordRep* rep, ConsumeFn consume_fn) {
+ return Consume(false, rep, std::move(consume_fn));
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.h
index 7f6e5584f4..f6329df545 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_consume.h
@@ -1,50 +1,50 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_CONSUME_H_
-#define ABSL_STRINGS_INTERNAL_CORD_REP_CONSUME_H_
-
-#include <functional>
-
-#include "y_absl/functional/function_ref.h"
-#include "y_absl/strings/internal/cord_internal.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// Functor for the Consume() and ReverseConsume() functions:
-// void ConsumeFunc(CordRep* rep, size_t offset, size_t length);
-// See the Consume() and ReverseConsume() function comments for documentation.
-using ConsumeFn = FunctionRef<void(CordRep*, size_t, size_t)>;
-
-// Consume() and ReverseConsume() consume CONCAT based trees and invoke the
-// provided functor with the contained nodes in the proper forward or reverse
-// order, which is used to convert CONCAT trees into other tree or cord data.
-// All CONCAT and SUBSTRING nodes are processed internally. The 'offset`
-// parameter of the functor is non-zero for any nodes below SUBSTRING nodes.
-// It's up to the caller to form these back into SUBSTRING nodes or otherwise
-// store offset / prefix information. These functions are intended to be used
-// only for migration / transitional code where due to factors such as ODR
-// violations, we can not 100% guarantee that all code respects 'new format'
-// settings and flags, so we need to be able to parse old data on the fly until
-// all old code is deprecated / no longer the default format.
-void Consume(CordRep* rep, ConsumeFn consume_fn);
-void ReverseConsume(CordRep* rep, ConsumeFn consume_fn);
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORD_REP_CONSUME_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_CONSUME_H_
+#define ABSL_STRINGS_INTERNAL_CORD_REP_CONSUME_H_
+
+#include <functional>
+
+#include "y_absl/functional/function_ref.h"
+#include "y_absl/strings/internal/cord_internal.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Functor for the Consume() and ReverseConsume() functions:
+// void ConsumeFunc(CordRep* rep, size_t offset, size_t length);
+// See the Consume() and ReverseConsume() function comments for documentation.
+using ConsumeFn = FunctionRef<void(CordRep*, size_t, size_t)>;
+
+// Consume() and ReverseConsume() consume CONCAT based trees and invoke the
+// provided functor with the contained nodes in the proper forward or reverse
+// order, which is used to convert CONCAT trees into other tree or cord data.
+// All CONCAT and SUBSTRING nodes are processed internally. The 'offset`
+// parameter of the functor is non-zero for any nodes below SUBSTRING nodes.
+// It's up to the caller to form these back into SUBSTRING nodes or otherwise
+// store offset / prefix information. These functions are intended to be used
+// only for migration / transitional code where due to factors such as ODR
+// violations, we can not 100% guarantee that all code respects 'new format'
+// settings and flags, so we need to be able to parse old data on the fly until
+// all old code is deprecated / no longer the default format.
+void Consume(CordRep* rep, ConsumeFn consume_fn);
+void ReverseConsume(CordRep* rep, ConsumeFn consume_fn);
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORD_REP_CONSUME_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_flat.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_flat.h
index 976613031c..74841c5955 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_flat.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_flat.h
@@ -44,11 +44,11 @@ static constexpr size_t kMaxFlatLength = kMaxFlatSize - kFlatOverhead;
static constexpr size_t kMinFlatLength = kMinFlatSize - kFlatOverhead;
constexpr uint8_t AllocatedSizeToTagUnchecked(size_t size) {
- return static_cast<uint8_t>((size <= 1024) ? size / 8 + 1
- : 129 + size / 32 - 1024 / 32);
+ return static_cast<uint8_t>((size <= 1024) ? size / 8 + 1
+ : 129 + size / 32 - 1024 / 32);
}
-static_assert(kMinFlatSize / 8 + 1 >= FLAT, "");
+static_assert(kMinFlatSize / 8 + 1 >= FLAT, "");
static_assert(AllocatedSizeToTagUnchecked(kMaxFlatSize) <= MAX_FLAT_TAG, "");
// Helper functions for rounded div, and rounding to exact sizes.
@@ -73,7 +73,7 @@ inline uint8_t AllocatedSizeToTag(size_t size) {
// Converts the provided tag to the corresponding allocated size
constexpr size_t TagToAllocatedSize(uint8_t tag) {
- return (tag <= 129) ? ((tag - 1) * 8) : (1024 + (tag - 129) * 32);
+ return (tag <= 129) ? ((tag - 1) * 8) : (1024 + (tag - 129) * 32);
}
// Converts the provided tag to the corresponding available data length
@@ -82,7 +82,7 @@ constexpr size_t TagToLength(uint8_t tag) {
}
// Enforce that kMaxFlatSize maps to a well-known exact tag value.
-static_assert(TagToAllocatedSize(225) == kMaxFlatSize, "Bad tag logic");
+static_assert(TagToAllocatedSize(225) == kMaxFlatSize, "Bad tag logic");
struct CordRepFlat : public CordRep {
// Creates a new flat node.
@@ -118,8 +118,8 @@ struct CordRepFlat : public CordRep {
}
// Returns a pointer to the data inside this flat rep.
- char* Data() { return reinterpret_cast<char*>(storage); }
- const char* Data() const { return reinterpret_cast<const char*>(storage); }
+ char* Data() { return reinterpret_cast<char*>(storage); }
+ const char* Data() const { return reinterpret_cast<const char*>(storage); }
// Returns the maximum capacity (payload size) of this instance.
size_t Capacity() const { return TagToLength(tag); }
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.cc
index 06c7e75bd8..be7703d4ab 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.cc
@@ -26,7 +26,7 @@
#include "y_absl/base/macros.h"
#include "y_absl/container/inlined_vector.h"
#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_consume.h"
+#include "y_absl/strings/internal/cord_rep_consume.h"
#include "y_absl/strings/internal/cord_rep_flat.h"
namespace y_absl {
@@ -40,7 +40,7 @@ using index_type = CordRepRing::index_type;
enum class Direction { kForward, kReversed };
inline bool IsFlatOrExternal(CordRep* rep) {
- return rep->IsFlat() || rep->IsExternal();
+ return rep->IsFlat() || rep->IsExternal();
}
// Verifies that n + extra <= kMaxCapacity: throws std::length_error otherwise.
@@ -178,7 +178,7 @@ bool CordRepRing::IsValid(std::ostream& output) const {
if (offset >= child->length || entry_length > child->length - offset) {
output << "entry[" << head << "] has offset " << offset
<< " and entry length " << entry_length
- << " which are outside of the child's length of " << child->length;
+ << " which are outside of the child's length of " << child->length;
return false;
}
@@ -229,7 +229,7 @@ void CordRepRing::SetCapacityForTesting(size_t capacity) {
}
void CordRepRing::Delete(CordRepRing* rep) {
- assert(rep != nullptr && rep->IsRing());
+ assert(rep != nullptr && rep->IsRing());
#if defined(__cpp_sized_deallocation)
size_t size = AllocSize(rep->capacity_);
rep->~CordRepRing();
@@ -277,11 +277,11 @@ CordRepRing* CordRepRing::Mutable(CordRepRing* rep, size_t extra) {
// Get current number of entries, and check for max capacity.
size_t entries = rep->entries();
- if (!rep->refcount.IsMutable()) {
- return Copy(rep, rep->head(), rep->tail(), extra);
+ if (!rep->refcount.IsMutable()) {
+ return Copy(rep, rep->head(), rep->tail(), extra);
} else if (entries + extra > rep->capacity()) {
- const size_t min_grow = rep->capacity() + rep->capacity() / 2;
- const size_t min_extra = (std::max)(extra, min_grow - entries);
+ const size_t min_grow = rep->capacity() + rep->capacity() / 2;
+ const size_t min_extra = (std::max)(extra, min_grow - entries);
CordRepRing* newrep = CordRepRing::New(entries, min_extra);
newrep->Fill<false>(rep, rep->head(), rep->tail());
CordRepRing::Delete(rep);
@@ -292,10 +292,10 @@ CordRepRing* CordRepRing::Mutable(CordRepRing* rep, size_t extra) {
}
Span<char> CordRepRing::GetAppendBuffer(size_t size) {
- assert(refcount.IsMutable());
+ assert(refcount.IsMutable());
index_type back = retreat(tail_);
CordRep* child = entry_child(back);
- if (child->tag >= FLAT && child->refcount.IsMutable()) {
+ if (child->tag >= FLAT && child->refcount.IsMutable()) {
size_t capacity = child->flat()->Capacity();
pos_type end_pos = entry_end_pos(back);
size_t data_offset = entry_data_offset(back);
@@ -312,10 +312,10 @@ Span<char> CordRepRing::GetAppendBuffer(size_t size) {
}
Span<char> CordRepRing::GetPrependBuffer(size_t size) {
- assert(refcount.IsMutable());
+ assert(refcount.IsMutable());
CordRep* child = entry_child(head_);
size_t data_offset = entry_data_offset(head_);
- if (data_offset && child->refcount.IsMutable() && child->tag >= FLAT) {
+ if (data_offset && child->refcount.IsMutable() && child->tag >= FLAT) {
size_t n = (std::min)(data_offset, size);
this->length += n;
begin_pos_ -= n;
@@ -327,12 +327,12 @@ Span<char> CordRepRing::GetPrependBuffer(size_t size) {
}
CordRepRing* CordRepRing::CreateFromLeaf(CordRep* child, size_t offset,
- size_t len, size_t extra) {
+ size_t len, size_t extra) {
CordRepRing* rep = CordRepRing::New(1, extra);
rep->head_ = 0;
rep->tail_ = rep->advance(0);
- rep->length = len;
- rep->entry_end_pos()[0] = len;
+ rep->length = len;
+ rep->entry_end_pos()[0] = len;
rep->entry_child()[0] = child;
rep->entry_data_offset()[0] = static_cast<offset_type>(offset);
return Validate(rep);
@@ -340,16 +340,16 @@ CordRepRing* CordRepRing::CreateFromLeaf(CordRep* child, size_t offset,
CordRepRing* CordRepRing::CreateSlow(CordRep* child, size_t extra) {
CordRepRing* rep = nullptr;
- Consume(child, [&](CordRep* child_arg, size_t offset, size_t len) {
- if (IsFlatOrExternal(child_arg)) {
- rep = rep ? AppendLeaf(rep, child_arg, offset, len)
- : CreateFromLeaf(child_arg, offset, len, extra);
+ Consume(child, [&](CordRep* child_arg, size_t offset, size_t len) {
+ if (IsFlatOrExternal(child_arg)) {
+ rep = rep ? AppendLeaf(rep, child_arg, offset, len)
+ : CreateFromLeaf(child_arg, offset, len, extra);
} else if (rep) {
- rep = AddRing<AddMode::kAppend>(rep, child_arg->ring(), offset, len);
- } else if (offset == 0 && child_arg->length == len) {
- rep = Mutable(child_arg->ring(), extra);
+ rep = AddRing<AddMode::kAppend>(rep, child_arg->ring(), offset, len);
+ } else if (offset == 0 && child_arg->length == len) {
+ rep = Mutable(child_arg->ring(), extra);
} else {
- rep = SubRing(child_arg->ring(), offset, len, extra);
+ rep = SubRing(child_arg->ring(), offset, len, extra);
}
});
return Validate(rep, nullptr, __LINE__);
@@ -360,7 +360,7 @@ CordRepRing* CordRepRing::Create(CordRep* child, size_t extra) {
if (IsFlatOrExternal(child)) {
return CreateFromLeaf(child, 0, length, extra);
}
- if (child->IsRing()) {
+ if (child->IsRing()) {
return Mutable(child->ring(), extra);
}
return CreateSlow(child, extra);
@@ -368,18 +368,18 @@ CordRepRing* CordRepRing::Create(CordRep* child, size_t extra) {
template <CordRepRing::AddMode mode>
CordRepRing* CordRepRing::AddRing(CordRepRing* rep, CordRepRing* ring,
- size_t offset, size_t len) {
+ size_t offset, size_t len) {
assert(offset < ring->length);
constexpr bool append = mode == AddMode::kAppend;
Position head = ring->Find(offset);
- Position tail = ring->FindTail(head.index, offset + len);
+ Position tail = ring->FindTail(head.index, offset + len);
const index_type entries = ring->entries(head.index, tail.index);
rep = Mutable(rep, entries);
// The delta for making ring[head].end_pos into 'len - offset'
const pos_type delta_length =
- (append ? rep->begin_pos_ + rep->length : rep->begin_pos_ - len) -
+ (append ? rep->begin_pos_ + rep->length : rep->begin_pos_ - len) -
ring->entry_begin_pos(head.index) - head.offset;
// Start filling at `tail`, or `entries` before `head`
@@ -420,36 +420,36 @@ CordRepRing* CordRepRing::AddRing(CordRepRing* rep, CordRepRing* ring,
}
// Commit changes
- rep->length += len;
+ rep->length += len;
if (append) {
rep->tail_ = filler.pos();
} else {
rep->head_ = filler.head();
- rep->begin_pos_ -= len;
+ rep->begin_pos_ -= len;
}
return Validate(rep);
}
CordRepRing* CordRepRing::AppendSlow(CordRepRing* rep, CordRep* child) {
- Consume(child, [&rep](CordRep* child_arg, size_t offset, size_t len) {
- if (child_arg->IsRing()) {
- rep = AddRing<AddMode::kAppend>(rep, child_arg->ring(), offset, len);
+ Consume(child, [&rep](CordRep* child_arg, size_t offset, size_t len) {
+ if (child_arg->IsRing()) {
+ rep = AddRing<AddMode::kAppend>(rep, child_arg->ring(), offset, len);
} else {
- rep = AppendLeaf(rep, child_arg, offset, len);
+ rep = AppendLeaf(rep, child_arg, offset, len);
}
});
return rep;
}
CordRepRing* CordRepRing::AppendLeaf(CordRepRing* rep, CordRep* child,
- size_t offset, size_t len) {
+ size_t offset, size_t len) {
rep = Mutable(rep, 1);
index_type back = rep->tail_;
const pos_type begin_pos = rep->begin_pos_ + rep->length;
rep->tail_ = rep->advance(rep->tail_);
- rep->length += len;
- rep->entry_end_pos()[back] = begin_pos + len;
+ rep->length += len;
+ rep->entry_end_pos()[back] = begin_pos + len;
rep->entry_child()[back] = child;
rep->entry_data_offset()[back] = static_cast<offset_type>(offset);
return Validate(rep, nullptr, __LINE__);
@@ -460,31 +460,31 @@ CordRepRing* CordRepRing::Append(CordRepRing* rep, CordRep* child) {
if (IsFlatOrExternal(child)) {
return AppendLeaf(rep, child, 0, length);
}
- if (child->IsRing()) {
+ if (child->IsRing()) {
return AddRing<AddMode::kAppend>(rep, child->ring(), 0, length);
}
return AppendSlow(rep, child);
}
CordRepRing* CordRepRing::PrependSlow(CordRepRing* rep, CordRep* child) {
- ReverseConsume(child, [&](CordRep* child_arg, size_t offset, size_t len) {
- if (IsFlatOrExternal(child_arg)) {
- rep = PrependLeaf(rep, child_arg, offset, len);
+ ReverseConsume(child, [&](CordRep* child_arg, size_t offset, size_t len) {
+ if (IsFlatOrExternal(child_arg)) {
+ rep = PrependLeaf(rep, child_arg, offset, len);
} else {
- rep = AddRing<AddMode::kPrepend>(rep, child_arg->ring(), offset, len);
+ rep = AddRing<AddMode::kPrepend>(rep, child_arg->ring(), offset, len);
}
});
return Validate(rep);
}
CordRepRing* CordRepRing::PrependLeaf(CordRepRing* rep, CordRep* child,
- size_t offset, size_t len) {
+ size_t offset, size_t len) {
rep = Mutable(rep, 1);
index_type head = rep->retreat(rep->head_);
pos_type end_pos = rep->begin_pos_;
rep->head_ = head;
- rep->length += len;
- rep->begin_pos_ -= len;
+ rep->length += len;
+ rep->begin_pos_ -= len;
rep->entry_end_pos()[head] = end_pos;
rep->entry_child()[head] = child;
rep->entry_data_offset()[head] = static_cast<offset_type>(offset);
@@ -496,7 +496,7 @@ CordRepRing* CordRepRing::Prepend(CordRepRing* rep, CordRep* child) {
if (IsFlatOrExternal(child)) {
return PrependLeaf(rep, child, 0, length);
}
- if (child->IsRing()) {
+ if (child->IsRing()) {
return AddRing<AddMode::kPrepend>(rep, child->ring(), 0, length);
}
return PrependSlow(rep, child);
@@ -504,7 +504,7 @@ CordRepRing* CordRepRing::Prepend(CordRepRing* rep, CordRep* child) {
CordRepRing* CordRepRing::Append(CordRepRing* rep, y_absl::string_view data,
size_t extra) {
- if (rep->refcount.IsMutable()) {
+ if (rep->refcount.IsMutable()) {
Span<char> avail = rep->GetAppendBuffer(data.length());
if (!avail.empty()) {
memcpy(avail.data(), data.data(), avail.length());
@@ -538,7 +538,7 @@ CordRepRing* CordRepRing::Append(CordRepRing* rep, y_absl::string_view data,
CordRepRing* CordRepRing::Prepend(CordRepRing* rep, y_absl::string_view data,
size_t extra) {
- if (rep->refcount.IsMutable()) {
+ if (rep->refcount.IsMutable()) {
Span<char> avail = rep->GetPrependBuffer(data.length());
if (!avail.empty()) {
const char* tail = data.data() + data.length() - avail.length();
@@ -664,21 +664,21 @@ char CordRepRing::GetCharacter(size_t offset) const {
}
CordRepRing* CordRepRing::SubRing(CordRepRing* rep, size_t offset,
- size_t len, size_t extra) {
+ size_t len, size_t extra) {
assert(offset <= rep->length);
- assert(offset <= rep->length - len);
+ assert(offset <= rep->length - len);
- if (len == 0) {
+ if (len == 0) {
CordRep::Unref(rep);
return nullptr;
}
// Find position of first byte
Position head = rep->Find(offset);
- Position tail = rep->FindTail(head.index, offset + len);
+ Position tail = rep->FindTail(head.index, offset + len);
const size_t new_entries = rep->entries(head.index, tail.index);
- if (rep->refcount.IsMutable() && extra <= (rep->capacity() - new_entries)) {
+ if (rep->refcount.IsMutable() && extra <= (rep->capacity() - new_entries)) {
// We adopt a privately owned rep and no extra entries needed.
if (head.index != rep->head_) UnrefEntries(rep, rep->head_, head.index);
if (tail.index != rep->tail_) UnrefEntries(rep, tail.index, rep->tail_);
@@ -692,7 +692,7 @@ CordRepRing* CordRepRing::SubRing(CordRepRing* rep, size_t offset,
}
// Adjust begin_pos and length
- rep->length = len;
+ rep->length = len;
rep->begin_pos_ += offset;
// Adjust head and tail blocks
@@ -715,7 +715,7 @@ CordRepRing* CordRepRing::RemovePrefix(CordRepRing* rep, size_t len,
}
Position head = rep->Find(len);
- if (rep->refcount.IsMutable()) {
+ if (rep->refcount.IsMutable()) {
if (head.index != rep->head_) UnrefEntries(rep, rep->head_, head.index);
rep->head_ = head.index;
} else {
@@ -745,7 +745,7 @@ CordRepRing* CordRepRing::RemoveSuffix(CordRepRing* rep, size_t len,
}
Position tail = rep->FindTail(rep->length - len);
- if (rep->refcount.IsMutable()) {
+ if (rep->refcount.IsMutable()) {
// We adopt a privately owned rep, scrub.
if (tail.index != rep->tail_) UnrefEntries(rep, tail.index, rep->tail_);
rep->tail_ = tail.index;
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.h
index 5f9784d8da..30018aa3d5 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring.h
@@ -201,23 +201,23 @@ class CordRepRing : public CordRep {
// referencing up to `size` capacity directly before the existing data.
Span<char> GetPrependBuffer(size_t size);
- // Returns a cord ring buffer containing `len` bytes of data starting at
+ // Returns a cord ring buffer containing `len` bytes of data starting at
// `offset`. If the input is not shared, this function will remove all head
// and tail child nodes outside of the requested range, and adjust the new
// head and tail nodes as required. If the input is shared, this function
// returns a new instance sharing some or all of the nodes from the input.
- static CordRepRing* SubRing(CordRepRing* r, size_t offset, size_t len,
+ static CordRepRing* SubRing(CordRepRing* r, size_t offset, size_t len,
size_t extra = 0);
- // Returns a cord ring buffer with the first `len` bytes removed.
+ // Returns a cord ring buffer with the first `len` bytes removed.
// If the input is not shared, this function will remove all head child nodes
// fully inside the first `length` bytes, and adjust the new head as required.
// If the input is shared, this function returns a new instance sharing some
// or all of the nodes from the input.
- static CordRepRing* RemoveSuffix(CordRepRing* r, size_t len,
+ static CordRepRing* RemoveSuffix(CordRepRing* r, size_t len,
size_t extra = 0);
- // Returns a cord ring buffer with the last `len` bytes removed.
+ // Returns a cord ring buffer with the last `len` bytes removed.
// If the input is not shared, this function will remove all head child nodes
// fully inside the first `length` bytes, and adjust the new head as required.
// If the input is shared, this function returns a new instance sharing some
@@ -228,18 +228,18 @@ class CordRepRing : public CordRep {
// Returns the character at `offset`. Requires that `offset < length`.
char GetCharacter(size_t offset) const;
- // Returns true if this instance manages a single contiguous buffer, in which
- // case the (optional) output parameter `fragment` is set. Otherwise, the
- // function returns false, and `fragment` is left unchanged.
- bool IsFlat(y_absl::string_view* fragment) const;
-
- // Returns true if the data starting at `offset` with length `len` is
- // managed by this instance inside a single contiguous buffer, in which case
- // the (optional) output parameter `fragment` is set to the contiguous memory
- // starting at offset `offset` with length `length`. Otherwise, the function
- // returns false, and `fragment` is left unchanged.
- bool IsFlat(size_t offset, size_t len, y_absl::string_view* fragment) const;
-
+ // Returns true if this instance manages a single contiguous buffer, in which
+ // case the (optional) output parameter `fragment` is set. Otherwise, the
+ // function returns false, and `fragment` is left unchanged.
+ bool IsFlat(y_absl::string_view* fragment) const;
+
+ // Returns true if the data starting at `offset` with length `len` is
+ // managed by this instance inside a single contiguous buffer, in which case
+ // the (optional) output parameter `fragment` is set to the contiguous memory
+ // starting at offset `offset` with length `length`. Otherwise, the function
+ // returns false, and `fragment` is left unchanged.
+ bool IsFlat(size_t offset, size_t len, y_absl::string_view* fragment) const;
+
// Testing only: set capacity to requested capacity.
void SetCapacityForTesting(size_t capacity);
@@ -383,8 +383,8 @@ class CordRepRing : public CordRep {
// Destroys the provided ring buffer, decrementing the reference count of all
// contained child CordReps. The provided 1\`rep` should have a ref count of
- // one (pre decrement destroy call observing `refcount.IsOne()`) or zero
- // (post decrement destroy call observing `!refcount.Decrement()`).
+ // one (pre decrement destroy call observing `refcount.IsOne()`) or zero
+ // (post decrement destroy call observing `!refcount.Decrement()`).
static void Destroy(CordRepRing* rep);
// Returns a mutable reference to the logical end position array.
@@ -464,10 +464,10 @@ class CordRepRing : public CordRep {
size_t length, size_t extra);
// Appends or prepends (depending on AddMode) the ring buffer in `ring' to
- // `rep` starting at `offset` with length `len`.
+ // `rep` starting at `offset` with length `len`.
template <AddMode mode>
static CordRepRing* AddRing(CordRepRing* rep, CordRepRing* ring,
- size_t offset, size_t len);
+ size_t offset, size_t len);
// Increases the data offset for entry `index` by `n`.
void AddDataOffset(index_type index, size_t n);
@@ -570,34 +570,34 @@ inline CordRepRing::Position CordRepRing::FindTail(index_type head,
// Now that CordRepRing is defined, we can define CordRep's helper casts:
inline CordRepRing* CordRep::ring() {
- assert(IsRing());
+ assert(IsRing());
return static_cast<CordRepRing*>(this);
}
inline const CordRepRing* CordRep::ring() const {
- assert(IsRing());
+ assert(IsRing());
return static_cast<const CordRepRing*>(this);
}
-inline bool CordRepRing::IsFlat(y_absl::string_view* fragment) const {
- if (entries() == 1) {
- if (fragment) *fragment = entry_data(head());
- return true;
- }
- return false;
-}
-
-inline bool CordRepRing::IsFlat(size_t offset, size_t len,
- y_absl::string_view* fragment) const {
- const Position pos = Find(offset);
- const y_absl::string_view data = entry_data(pos.index);
- if (data.length() >= len && data.length() - len >= pos.offset) {
- if (fragment) *fragment = data.substr(pos.offset, len);
- return true;
- }
- return false;
-}
-
+inline bool CordRepRing::IsFlat(y_absl::string_view* fragment) const {
+ if (entries() == 1) {
+ if (fragment) *fragment = entry_data(head());
+ return true;
+ }
+ return false;
+}
+
+inline bool CordRepRing::IsFlat(size_t offset, size_t len,
+ y_absl::string_view* fragment) const {
+ const Position pos = Find(offset);
+ const y_absl::string_view data = entry_data(pos.index);
+ if (data.length() >= len && data.length() - len >= pos.offset) {
+ if (fragment) *fragment = data.substr(pos.offset, len);
+ return true;
+ }
+ return false;
+}
+
std::ostream& operator<<(std::ostream& s, const CordRepRing& rep);
} // namespace cord_internal
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring_reader.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring_reader.h
index 3f64d04fae..0ce03abdac 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring_reader.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_ring_reader.h
@@ -40,10 +40,10 @@ class CordRepRingReader {
// The returned value is undefined if this instance is empty.
CordRepRing::index_type index() const { return index_; }
- // Returns the current node inside the ring buffer for this instance.
- // The returned value is undefined if this instance is empty.
- CordRep* node() const { return ring_->entry_child(index_); }
-
+ // Returns the current node inside the ring buffer for this instance.
+ // The returned value is undefined if this instance is empty.
+ CordRep* node() const { return ring_->entry_child(index_); }
+
// Returns the length of the referenced ring buffer.
// Requires the current instance to be non empty.
size_t length() const {
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_test_util.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_test_util.h
index 98dcc0d649..3aa041449b 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_test_util.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cord_rep_test_util.h
@@ -1,220 +1,220 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_TEST_UTIL_H_
-#define ABSL_STRINGS_INTERNAL_CORD_REP_TEST_UTIL_H_
-
-#include <cassert>
-#include <memory>
-#include <random>
-#include <util/generic/string.h>
-#include <vector>
-
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/raw_logging.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
-#include "y_absl/strings/internal/cord_rep_flat.h"
-#include "y_absl/strings/string_view.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cordrep_testing {
-
-inline cord_internal::CordRepSubstring* MakeSubstring(
- size_t start, size_t len, cord_internal::CordRep* rep) {
- auto* sub = new cord_internal::CordRepSubstring;
- sub->tag = cord_internal::SUBSTRING;
- sub->start = start;
- sub->length = len <= 0 ? rep->length - start + len : len;
- sub->child = rep;
- return sub;
-}
-
-inline cord_internal::CordRepConcat* MakeConcat(cord_internal::CordRep* left,
- cord_internal::CordRep* right,
- int depth = 0) {
- auto* concat = new cord_internal::CordRepConcat;
- concat->tag = cord_internal::CONCAT;
- concat->length = left->length + right->length;
- concat->left = left;
- concat->right = right;
- concat->set_depth(depth);
- return concat;
-}
-
-inline cord_internal::CordRepFlat* MakeFlat(y_absl::string_view value) {
- assert(value.length() <= cord_internal::kMaxFlatLength);
- auto* flat = cord_internal::CordRepFlat::New(value.length());
- flat->length = value.length();
- memcpy(flat->Data(), value.data(), value.length());
- return flat;
-}
-
-// Creates an external node for testing
-inline cord_internal::CordRepExternal* MakeExternal(y_absl::string_view s) {
- struct Rep : public cord_internal::CordRepExternal {
- TString s;
- explicit Rep(y_absl::string_view sv) : s(sv) {
- this->tag = cord_internal::EXTERNAL;
- this->base = s.data();
- this->length = s.length();
- this->releaser_invoker = [](cord_internal::CordRepExternal* self) {
- delete static_cast<Rep*>(self);
- };
- }
- };
- return new Rep(s);
-}
-
-inline TString CreateRandomString(size_t n) {
- y_absl::string_view data =
- "abcdefghijklmnopqrstuvwxyz"
- "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
- "0123456789~!@#$%^&*()_+=-<>?:\"{}[]|";
- std::minstd_rand rnd;
- std::uniform_int_distribution<size_t> dist(0, data.size() - 1);
- TString s(n, ' ');
- for (size_t i = 0; i < n; ++i) {
- s[i] = data[dist(rnd)];
- }
- return s;
-}
-
-// Creates an array of flats from the provided string, chopping
-// the provided string up into flats of size `chunk_size` characters
-// resulting in roughly `data.size() / chunk_size` total flats.
-inline std::vector<cord_internal::CordRep*> CreateFlatsFromString(
- y_absl::string_view data, size_t chunk_size) {
- assert(chunk_size > 0);
- std::vector<cord_internal::CordRep*> flats;
- for (y_absl::string_view s = data; !s.empty(); s.remove_prefix(chunk_size)) {
- flats.push_back(MakeFlat(s.substr(0, chunk_size)));
- }
- return flats;
-}
-
-inline cord_internal::CordRepBtree* CordRepBtreeFromFlats(
- y_absl::Span<cord_internal::CordRep* const> flats) {
- assert(!flats.empty());
- auto* node = cord_internal::CordRepBtree::Create(flats[0]);
- for (size_t i = 1; i < flats.size(); ++i) {
- node = cord_internal::CordRepBtree::Append(node, flats[i]);
- }
- return node;
-}
-
-template <typename Fn>
-inline void CordVisitReps(cord_internal::CordRep* rep, Fn&& fn) {
- fn(rep);
- while (rep->tag == cord_internal::SUBSTRING) {
- rep = rep->substring()->child;
- fn(rep);
- }
- if (rep->tag == cord_internal::BTREE) {
- for (cord_internal::CordRep* edge : rep->btree()->Edges()) {
- CordVisitReps(edge, fn);
- }
- } else if (rep->tag == cord_internal::CONCAT) {
- CordVisitReps(rep->concat()->left, fn);
- CordVisitReps(rep->concat()->right, fn);
- }
-}
-
-template <typename Predicate>
-inline std::vector<cord_internal::CordRep*> CordCollectRepsIf(
- Predicate&& predicate, cord_internal::CordRep* rep) {
- std::vector<cord_internal::CordRep*> reps;
- CordVisitReps(rep, [&reps, &predicate](cord_internal::CordRep* rep) {
- if (predicate(rep)) reps.push_back(rep);
- });
- return reps;
-}
-
-inline std::vector<cord_internal::CordRep*> CordCollectReps(
- cord_internal::CordRep* rep) {
- std::vector<cord_internal::CordRep*> reps;
- auto fn = [&reps](cord_internal::CordRep* rep) { reps.push_back(rep); };
- CordVisitReps(rep, fn);
- return reps;
-}
-
-inline void CordToString(cord_internal::CordRep* rep, TString& s) {
- size_t offset = 0;
- size_t length = rep->length;
- while (rep->tag == cord_internal::SUBSTRING) {
- offset += rep->substring()->start;
- rep = rep->substring()->child;
- }
- if (rep->tag == cord_internal::BTREE) {
- for (cord_internal::CordRep* edge : rep->btree()->Edges()) {
- CordToString(edge, s);
- }
- } else if (rep->tag >= cord_internal::FLAT) {
- s.append(rep->flat()->Data() + offset, length);
- } else if (rep->tag == cord_internal::EXTERNAL) {
- s.append(rep->external()->base + offset, length);
- } else {
- ABSL_RAW_LOG(FATAL, "Unsupported tag %d", rep->tag);
- }
-}
-
-inline TString CordToString(cord_internal::CordRep* rep) {
- TString s;
- s.reserve(rep->length);
- CordToString(rep, s);
- return s;
-}
-
-// RAII Helper class to automatically unref reps on destruction.
-class AutoUnref {
- public:
- ~AutoUnref() {
- for (CordRep* rep : unrefs_) CordRep::Unref(rep);
- }
-
- // Adds `rep` to the list of reps to be unreffed at destruction.
- template <typename CordRepType>
- CordRepType* Add(CordRepType* rep) {
- unrefs_.push_back(rep);
- return rep;
- }
-
- // Increments the reference count of `rep` by one, and adds it to
- // the list of reps to be unreffed at destruction.
- template <typename CordRepType>
- CordRepType* Ref(CordRepType* rep) {
- unrefs_.push_back(CordRep::Ref(rep));
- return rep;
- }
-
- // Increments the reference count of `rep` by one if `condition` is true,
- // and adds it to the list of reps to be unreffed at destruction.
- template <typename CordRepType>
- CordRepType* RefIf(bool condition, CordRepType* rep) {
- if (condition) unrefs_.push_back(CordRep::Ref(rep));
- return rep;
- }
-
- private:
- using CordRep = y_absl::cord_internal::CordRep;
-
- std::vector<CordRep*> unrefs_;
-};
-
-} // namespace cordrep_testing
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORD_REP_TEST_UTIL_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORD_REP_TEST_UTIL_H_
+#define ABSL_STRINGS_INTERNAL_CORD_REP_TEST_UTIL_H_
+
+#include <cassert>
+#include <memory>
+#include <random>
+#include <util/generic/string.h>
+#include <vector>
+
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/raw_logging.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
+#include "y_absl/strings/internal/cord_rep_flat.h"
+#include "y_absl/strings/string_view.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cordrep_testing {
+
+inline cord_internal::CordRepSubstring* MakeSubstring(
+ size_t start, size_t len, cord_internal::CordRep* rep) {
+ auto* sub = new cord_internal::CordRepSubstring;
+ sub->tag = cord_internal::SUBSTRING;
+ sub->start = start;
+ sub->length = len <= 0 ? rep->length - start + len : len;
+ sub->child = rep;
+ return sub;
+}
+
+inline cord_internal::CordRepConcat* MakeConcat(cord_internal::CordRep* left,
+ cord_internal::CordRep* right,
+ int depth = 0) {
+ auto* concat = new cord_internal::CordRepConcat;
+ concat->tag = cord_internal::CONCAT;
+ concat->length = left->length + right->length;
+ concat->left = left;
+ concat->right = right;
+ concat->set_depth(depth);
+ return concat;
+}
+
+inline cord_internal::CordRepFlat* MakeFlat(y_absl::string_view value) {
+ assert(value.length() <= cord_internal::kMaxFlatLength);
+ auto* flat = cord_internal::CordRepFlat::New(value.length());
+ flat->length = value.length();
+ memcpy(flat->Data(), value.data(), value.length());
+ return flat;
+}
+
+// Creates an external node for testing
+inline cord_internal::CordRepExternal* MakeExternal(y_absl::string_view s) {
+ struct Rep : public cord_internal::CordRepExternal {
+ TString s;
+ explicit Rep(y_absl::string_view sv) : s(sv) {
+ this->tag = cord_internal::EXTERNAL;
+ this->base = s.data();
+ this->length = s.length();
+ this->releaser_invoker = [](cord_internal::CordRepExternal* self) {
+ delete static_cast<Rep*>(self);
+ };
+ }
+ };
+ return new Rep(s);
+}
+
+inline TString CreateRandomString(size_t n) {
+ y_absl::string_view data =
+ "abcdefghijklmnopqrstuvwxyz"
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "0123456789~!@#$%^&*()_+=-<>?:\"{}[]|";
+ std::minstd_rand rnd;
+ std::uniform_int_distribution<size_t> dist(0, data.size() - 1);
+ TString s(n, ' ');
+ for (size_t i = 0; i < n; ++i) {
+ s[i] = data[dist(rnd)];
+ }
+ return s;
+}
+
+// Creates an array of flats from the provided string, chopping
+// the provided string up into flats of size `chunk_size` characters
+// resulting in roughly `data.size() / chunk_size` total flats.
+inline std::vector<cord_internal::CordRep*> CreateFlatsFromString(
+ y_absl::string_view data, size_t chunk_size) {
+ assert(chunk_size > 0);
+ std::vector<cord_internal::CordRep*> flats;
+ for (y_absl::string_view s = data; !s.empty(); s.remove_prefix(chunk_size)) {
+ flats.push_back(MakeFlat(s.substr(0, chunk_size)));
+ }
+ return flats;
+}
+
+inline cord_internal::CordRepBtree* CordRepBtreeFromFlats(
+ y_absl::Span<cord_internal::CordRep* const> flats) {
+ assert(!flats.empty());
+ auto* node = cord_internal::CordRepBtree::Create(flats[0]);
+ for (size_t i = 1; i < flats.size(); ++i) {
+ node = cord_internal::CordRepBtree::Append(node, flats[i]);
+ }
+ return node;
+}
+
+template <typename Fn>
+inline void CordVisitReps(cord_internal::CordRep* rep, Fn&& fn) {
+ fn(rep);
+ while (rep->tag == cord_internal::SUBSTRING) {
+ rep = rep->substring()->child;
+ fn(rep);
+ }
+ if (rep->tag == cord_internal::BTREE) {
+ for (cord_internal::CordRep* edge : rep->btree()->Edges()) {
+ CordVisitReps(edge, fn);
+ }
+ } else if (rep->tag == cord_internal::CONCAT) {
+ CordVisitReps(rep->concat()->left, fn);
+ CordVisitReps(rep->concat()->right, fn);
+ }
+}
+
+template <typename Predicate>
+inline std::vector<cord_internal::CordRep*> CordCollectRepsIf(
+ Predicate&& predicate, cord_internal::CordRep* rep) {
+ std::vector<cord_internal::CordRep*> reps;
+ CordVisitReps(rep, [&reps, &predicate](cord_internal::CordRep* rep) {
+ if (predicate(rep)) reps.push_back(rep);
+ });
+ return reps;
+}
+
+inline std::vector<cord_internal::CordRep*> CordCollectReps(
+ cord_internal::CordRep* rep) {
+ std::vector<cord_internal::CordRep*> reps;
+ auto fn = [&reps](cord_internal::CordRep* rep) { reps.push_back(rep); };
+ CordVisitReps(rep, fn);
+ return reps;
+}
+
+inline void CordToString(cord_internal::CordRep* rep, TString& s) {
+ size_t offset = 0;
+ size_t length = rep->length;
+ while (rep->tag == cord_internal::SUBSTRING) {
+ offset += rep->substring()->start;
+ rep = rep->substring()->child;
+ }
+ if (rep->tag == cord_internal::BTREE) {
+ for (cord_internal::CordRep* edge : rep->btree()->Edges()) {
+ CordToString(edge, s);
+ }
+ } else if (rep->tag >= cord_internal::FLAT) {
+ s.append(rep->flat()->Data() + offset, length);
+ } else if (rep->tag == cord_internal::EXTERNAL) {
+ s.append(rep->external()->base + offset, length);
+ } else {
+ ABSL_RAW_LOG(FATAL, "Unsupported tag %d", rep->tag);
+ }
+}
+
+inline TString CordToString(cord_internal::CordRep* rep) {
+ TString s;
+ s.reserve(rep->length);
+ CordToString(rep, s);
+ return s;
+}
+
+// RAII Helper class to automatically unref reps on destruction.
+class AutoUnref {
+ public:
+ ~AutoUnref() {
+ for (CordRep* rep : unrefs_) CordRep::Unref(rep);
+ }
+
+ // Adds `rep` to the list of reps to be unreffed at destruction.
+ template <typename CordRepType>
+ CordRepType* Add(CordRepType* rep) {
+ unrefs_.push_back(rep);
+ return rep;
+ }
+
+ // Increments the reference count of `rep` by one, and adds it to
+ // the list of reps to be unreffed at destruction.
+ template <typename CordRepType>
+ CordRepType* Ref(CordRepType* rep) {
+ unrefs_.push_back(CordRep::Ref(rep));
+ return rep;
+ }
+
+ // Increments the reference count of `rep` by one if `condition` is true,
+ // and adds it to the list of reps to be unreffed at destruction.
+ template <typename CordRepType>
+ CordRepType* RefIf(bool condition, CordRepType* rep) {
+ if (condition) unrefs_.push_back(CordRep::Ref(rep));
+ return rep;
+ }
+
+ private:
+ using CordRep = y_absl::cord_internal::CordRep;
+
+ std::vector<CordRep*> unrefs_;
+};
+
+} // namespace cordrep_testing
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORD_REP_TEST_UTIL_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.cc
index e9936f22fe..843641c779 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.cc
@@ -1,96 +1,96 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cordz_functions.h"
-
-#include <atomic>
-#include <cmath>
-#include <limits>
-#include <random>
-
-#include "y_absl/base/attributes.h"
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/raw_logging.h"
-#include "y_absl/profiling/internal/exponential_biased.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-namespace {
-
-// The average interval until the next sample. A value of 0 disables profiling
-// while a value of 1 will profile all Cords.
-std::atomic<int> g_cordz_mean_interval(50000);
-
-} // namespace
-
-#ifdef ABSL_INTERNAL_CORDZ_ENABLED
-
-// Special negative 'not initialized' per thread value for cordz_next_sample.
-static constexpr int64_t kInitCordzNextSample = -1;
-
-ABSL_CONST_INIT thread_local int64_t cordz_next_sample = kInitCordzNextSample;
-
-// kIntervalIfDisabled is the number of profile-eligible events need to occur
-// before the code will confirm that cordz is still disabled.
-constexpr int64_t kIntervalIfDisabled = 1 << 16;
-
-ABSL_ATTRIBUTE_NOINLINE bool cordz_should_profile_slow() {
-
- thread_local y_absl::profiling_internal::ExponentialBiased
- exponential_biased_generator;
- int32_t mean_interval = get_cordz_mean_interval();
-
- // Check if we disabled profiling. If so, set the next sample to a "large"
- // number to minimize the overhead of the should_profile codepath.
- if (mean_interval <= 0) {
- cordz_next_sample = kIntervalIfDisabled;
- return false;
- }
-
- // Check if we're always sampling.
- if (mean_interval == 1) {
- cordz_next_sample = 1;
- return true;
- }
-
- if (cordz_next_sample <= 0) {
- // If first check on current thread, check cordz_should_profile()
- // again using the created (initial) stride in cordz_next_sample.
- const bool initialized = cordz_next_sample != kInitCordzNextSample;
- cordz_next_sample = exponential_biased_generator.GetStride(mean_interval);
- return initialized || cordz_should_profile();
- }
-
- --cordz_next_sample;
- return false;
-}
-
-void cordz_set_next_sample_for_testing(int64_t next_sample) {
- cordz_next_sample = next_sample;
-}
-
-#endif // ABSL_INTERNAL_CORDZ_ENABLED
-
-int32_t get_cordz_mean_interval() {
- return g_cordz_mean_interval.load(std::memory_order_acquire);
-}
-
-void set_cordz_mean_interval(int32_t mean_interval) {
- g_cordz_mean_interval.store(mean_interval, std::memory_order_release);
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cordz_functions.h"
+
+#include <atomic>
+#include <cmath>
+#include <limits>
+#include <random>
+
+#include "y_absl/base/attributes.h"
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/raw_logging.h"
+#include "y_absl/profiling/internal/exponential_biased.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+namespace {
+
+// The average interval until the next sample. A value of 0 disables profiling
+// while a value of 1 will profile all Cords.
+std::atomic<int> g_cordz_mean_interval(50000);
+
+} // namespace
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+// Special negative 'not initialized' per thread value for cordz_next_sample.
+static constexpr int64_t kInitCordzNextSample = -1;
+
+ABSL_CONST_INIT thread_local int64_t cordz_next_sample = kInitCordzNextSample;
+
+// kIntervalIfDisabled is the number of profile-eligible events need to occur
+// before the code will confirm that cordz is still disabled.
+constexpr int64_t kIntervalIfDisabled = 1 << 16;
+
+ABSL_ATTRIBUTE_NOINLINE bool cordz_should_profile_slow() {
+
+ thread_local y_absl::profiling_internal::ExponentialBiased
+ exponential_biased_generator;
+ int32_t mean_interval = get_cordz_mean_interval();
+
+ // Check if we disabled profiling. If so, set the next sample to a "large"
+ // number to minimize the overhead of the should_profile codepath.
+ if (mean_interval <= 0) {
+ cordz_next_sample = kIntervalIfDisabled;
+ return false;
+ }
+
+ // Check if we're always sampling.
+ if (mean_interval == 1) {
+ cordz_next_sample = 1;
+ return true;
+ }
+
+ if (cordz_next_sample <= 0) {
+ // If first check on current thread, check cordz_should_profile()
+ // again using the created (initial) stride in cordz_next_sample.
+ const bool initialized = cordz_next_sample != kInitCordzNextSample;
+ cordz_next_sample = exponential_biased_generator.GetStride(mean_interval);
+ return initialized || cordz_should_profile();
+ }
+
+ --cordz_next_sample;
+ return false;
+}
+
+void cordz_set_next_sample_for_testing(int64_t next_sample) {
+ cordz_next_sample = next_sample;
+}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+int32_t get_cordz_mean_interval() {
+ return g_cordz_mean_interval.load(std::memory_order_acquire);
+}
+
+void set_cordz_mean_interval(int32_t mean_interval) {
+ g_cordz_mean_interval.store(mean_interval, std::memory_order_release);
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.h
index 802efaa976..b3d70b8316 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions.h
@@ -1,85 +1,85 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_CORDZ_FUNCTIONS_H_
-#define ABSL_STRINGS_CORDZ_FUNCTIONS_H_
-
-#include <stdint.h>
-
-#include "y_absl/base/attributes.h"
-#include "y_absl/base/config.h"
-#include "y_absl/base/optimization.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// Returns the current sample rate. This represents the average interval
-// between samples.
-int32_t get_cordz_mean_interval();
-
-// Sets the sample rate with the average interval between samples.
-void set_cordz_mean_interval(int32_t mean_interval);
-
-// Enable cordz unless any of the following applies:
-// - no thread local support
-// - MSVC build
-// - Android build
-// - Apple build
-// - DLL build
-// Hashtablez is turned off completely in opensource builds.
-// MSVC's static atomics are dynamically initialized in debug mode, which breaks
-// sampling.
-#if defined(ABSL_HAVE_THREAD_LOCAL) && !defined(_MSC_VER) && \
- !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL) && \
- !defined(__ANDROID__) && !defined(__APPLE__)
-#define ABSL_INTERNAL_CORDZ_ENABLED 1
-#endif
-
-#ifdef ABSL_INTERNAL_CORDZ_ENABLED
-
-// cordz_next_sample is the number of events until the next sample event. If
-// the value is 1 or less, the code will check on the next event if cordz is
-// enabled, and if so, will sample the Cord. cordz is only enabled when we can
-// use thread locals.
-ABSL_CONST_INIT extern thread_local int64_t cordz_next_sample;
-
-// Determines if the next sample should be profiled. If it is, the value pointed
-// at by next_sample will be set with the interval until the next sample.
-bool cordz_should_profile_slow();
-
-// Returns true if the next cord should be sampled.
-inline bool cordz_should_profile() {
- if (ABSL_PREDICT_TRUE(cordz_next_sample > 1)) {
- cordz_next_sample--;
- return false;
- }
- return cordz_should_profile_slow();
-}
-
-// Sets the interval until the next sample (for testing only)
-void cordz_set_next_sample_for_testing(int64_t next_sample);
-
-#else // ABSL_INTERNAL_CORDZ_ENABLED
-
-inline bool cordz_should_profile() { return false; }
-inline void cordz_set_next_sample_for_testing(int64_t) {}
-
-#endif // ABSL_INTERNAL_CORDZ_ENABLED
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+#define ABSL_STRINGS_CORDZ_FUNCTIONS_H_
+
+#include <stdint.h>
+
+#include "y_absl/base/attributes.h"
+#include "y_absl/base/config.h"
+#include "y_absl/base/optimization.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// Returns the current sample rate. This represents the average interval
+// between samples.
+int32_t get_cordz_mean_interval();
+
+// Sets the sample rate with the average interval between samples.
+void set_cordz_mean_interval(int32_t mean_interval);
+
+// Enable cordz unless any of the following applies:
+// - no thread local support
+// - MSVC build
+// - Android build
+// - Apple build
+// - DLL build
+// Hashtablez is turned off completely in opensource builds.
+// MSVC's static atomics are dynamically initialized in debug mode, which breaks
+// sampling.
+#if defined(ABSL_HAVE_THREAD_LOCAL) && !defined(_MSC_VER) && \
+ !defined(ABSL_BUILD_DLL) && !defined(ABSL_CONSUME_DLL) && \
+ !defined(__ANDROID__) && !defined(__APPLE__)
+#define ABSL_INTERNAL_CORDZ_ENABLED 1
+#endif
+
+#ifdef ABSL_INTERNAL_CORDZ_ENABLED
+
+// cordz_next_sample is the number of events until the next sample event. If
+// the value is 1 or less, the code will check on the next event if cordz is
+// enabled, and if so, will sample the Cord. cordz is only enabled when we can
+// use thread locals.
+ABSL_CONST_INIT extern thread_local int64_t cordz_next_sample;
+
+// Determines if the next sample should be profiled. If it is, the value pointed
+// at by next_sample will be set with the interval until the next sample.
+bool cordz_should_profile_slow();
+
+// Returns true if the next cord should be sampled.
+inline bool cordz_should_profile() {
+ if (ABSL_PREDICT_TRUE(cordz_next_sample > 1)) {
+ cordz_next_sample--;
+ return false;
+ }
+ return cordz_should_profile_slow();
+}
+
+// Sets the interval until the next sample (for testing only)
+void cordz_set_next_sample_for_testing(int64_t next_sample);
+
+#else // ABSL_INTERNAL_CORDZ_ENABLED
+
+inline bool cordz_should_profile() { return false; }
+inline void cordz_set_next_sample_for_testing(int64_t) {}
+
+#endif // ABSL_INTERNAL_CORDZ_ENABLED
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_CORDZ_FUNCTIONS_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions/ya.make
index 06e99346da..7afa600674 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions/ya.make
@@ -11,22 +11,22 @@ OWNER(
LICENSE(Apache-2.0)
-PEERDIR(
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging
- contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
- contrib/restricted/abseil-cpp-tstring/y_absl/profiling/internal/exponential_biased
-)
-
+PEERDIR(
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
+ contrib/restricted/abseil-cpp-tstring/y_absl/profiling/internal/exponential_biased
+)
+
ADDINCL(
GLOBAL contrib/restricted/abseil-cpp-tstring
)
NO_COMPILER_WARNINGS()
-SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
+SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
SRCS(
- cordz_functions.cc
+ cordz_functions.cc
)
END()
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.cc
index 707c6d2a9b..cf9903d4bb 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.cc
@@ -1,139 +1,139 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-#include "y_absl/strings/internal/cordz_handle.h"
-
-#include <atomic>
-
-#include "y_absl/base/internal/raw_logging.h" // For ABSL_RAW_CHECK
-#include "y_absl/base/internal/spinlock.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-using ::y_absl::base_internal::SpinLockHolder;
-
-ABSL_CONST_INIT CordzHandle::Queue CordzHandle::global_queue_(y_absl::kConstInit);
-
-CordzHandle::CordzHandle(bool is_snapshot) : is_snapshot_(is_snapshot) {
- if (is_snapshot) {
- SpinLockHolder lock(&queue_->mutex);
- CordzHandle* dq_tail = queue_->dq_tail.load(std::memory_order_acquire);
- if (dq_tail != nullptr) {
- dq_prev_ = dq_tail;
- dq_tail->dq_next_ = this;
- }
- queue_->dq_tail.store(this, std::memory_order_release);
- }
-}
-
-CordzHandle::~CordzHandle() {
- ODRCheck();
- if (is_snapshot_) {
- std::vector<CordzHandle*> to_delete;
- {
- SpinLockHolder lock(&queue_->mutex);
- CordzHandle* next = dq_next_;
- if (dq_prev_ == nullptr) {
- // We were head of the queue, delete every CordzHandle until we reach
- // either the end of the list, or a snapshot handle.
- while (next && !next->is_snapshot_) {
- to_delete.push_back(next);
- next = next->dq_next_;
- }
- } else {
- // Another CordzHandle existed before this one, don't delete anything.
- dq_prev_->dq_next_ = next;
- }
- if (next) {
- next->dq_prev_ = dq_prev_;
- } else {
- queue_->dq_tail.store(dq_prev_, std::memory_order_release);
- }
- }
- for (CordzHandle* handle : to_delete) {
- delete handle;
- }
- }
-}
-
-bool CordzHandle::SafeToDelete() const {
- return is_snapshot_ || queue_->IsEmpty();
-}
-
-void CordzHandle::Delete(CordzHandle* handle) {
- assert(handle);
- if (handle) {
- handle->ODRCheck();
- Queue* const queue = handle->queue_;
- if (!handle->SafeToDelete()) {
- SpinLockHolder lock(&queue->mutex);
- CordzHandle* dq_tail = queue->dq_tail.load(std::memory_order_acquire);
- if (dq_tail != nullptr) {
- handle->dq_prev_ = dq_tail;
- dq_tail->dq_next_ = handle;
- queue->dq_tail.store(handle, std::memory_order_release);
- return;
- }
- }
- delete handle;
- }
-}
-
-std::vector<const CordzHandle*> CordzHandle::DiagnosticsGetDeleteQueue() {
- std::vector<const CordzHandle*> handles;
- SpinLockHolder lock(&global_queue_.mutex);
- CordzHandle* dq_tail = global_queue_.dq_tail.load(std::memory_order_acquire);
- for (const CordzHandle* p = dq_tail; p; p = p->dq_prev_) {
- handles.push_back(p);
- }
- return handles;
-}
-
-bool CordzHandle::DiagnosticsHandleIsSafeToInspect(
- const CordzHandle* handle) const {
- ODRCheck();
- if (!is_snapshot_) return false;
- if (handle == nullptr) return true;
- if (handle->is_snapshot_) return false;
- bool snapshot_found = false;
- SpinLockHolder lock(&queue_->mutex);
- for (const CordzHandle* p = queue_->dq_tail; p; p = p->dq_prev_) {
- if (p == handle) return !snapshot_found;
- if (p == this) snapshot_found = true;
- }
- ABSL_ASSERT(snapshot_found); // Assert that 'this' is in delete queue.
- return true;
-}
-
-std::vector<const CordzHandle*>
-CordzHandle::DiagnosticsGetSafeToInspectDeletedHandles() {
- ODRCheck();
- std::vector<const CordzHandle*> handles;
- if (!is_snapshot()) {
- return handles;
- }
-
- SpinLockHolder lock(&queue_->mutex);
- for (const CordzHandle* p = dq_next_; p != nullptr; p = p->dq_next_) {
- if (!p->is_snapshot()) {
- handles.push_back(p);
- }
- }
- return handles;
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+#include "y_absl/strings/internal/cordz_handle.h"
+
+#include <atomic>
+
+#include "y_absl/base/internal/raw_logging.h" // For ABSL_RAW_CHECK
+#include "y_absl/base/internal/spinlock.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ::y_absl::base_internal::SpinLockHolder;
+
+ABSL_CONST_INIT CordzHandle::Queue CordzHandle::global_queue_(y_absl::kConstInit);
+
+CordzHandle::CordzHandle(bool is_snapshot) : is_snapshot_(is_snapshot) {
+ if (is_snapshot) {
+ SpinLockHolder lock(&queue_->mutex);
+ CordzHandle* dq_tail = queue_->dq_tail.load(std::memory_order_acquire);
+ if (dq_tail != nullptr) {
+ dq_prev_ = dq_tail;
+ dq_tail->dq_next_ = this;
+ }
+ queue_->dq_tail.store(this, std::memory_order_release);
+ }
+}
+
+CordzHandle::~CordzHandle() {
+ ODRCheck();
+ if (is_snapshot_) {
+ std::vector<CordzHandle*> to_delete;
+ {
+ SpinLockHolder lock(&queue_->mutex);
+ CordzHandle* next = dq_next_;
+ if (dq_prev_ == nullptr) {
+ // We were head of the queue, delete every CordzHandle until we reach
+ // either the end of the list, or a snapshot handle.
+ while (next && !next->is_snapshot_) {
+ to_delete.push_back(next);
+ next = next->dq_next_;
+ }
+ } else {
+ // Another CordzHandle existed before this one, don't delete anything.
+ dq_prev_->dq_next_ = next;
+ }
+ if (next) {
+ next->dq_prev_ = dq_prev_;
+ } else {
+ queue_->dq_tail.store(dq_prev_, std::memory_order_release);
+ }
+ }
+ for (CordzHandle* handle : to_delete) {
+ delete handle;
+ }
+ }
+}
+
+bool CordzHandle::SafeToDelete() const {
+ return is_snapshot_ || queue_->IsEmpty();
+}
+
+void CordzHandle::Delete(CordzHandle* handle) {
+ assert(handle);
+ if (handle) {
+ handle->ODRCheck();
+ Queue* const queue = handle->queue_;
+ if (!handle->SafeToDelete()) {
+ SpinLockHolder lock(&queue->mutex);
+ CordzHandle* dq_tail = queue->dq_tail.load(std::memory_order_acquire);
+ if (dq_tail != nullptr) {
+ handle->dq_prev_ = dq_tail;
+ dq_tail->dq_next_ = handle;
+ queue->dq_tail.store(handle, std::memory_order_release);
+ return;
+ }
+ }
+ delete handle;
+ }
+}
+
+std::vector<const CordzHandle*> CordzHandle::DiagnosticsGetDeleteQueue() {
+ std::vector<const CordzHandle*> handles;
+ SpinLockHolder lock(&global_queue_.mutex);
+ CordzHandle* dq_tail = global_queue_.dq_tail.load(std::memory_order_acquire);
+ for (const CordzHandle* p = dq_tail; p; p = p->dq_prev_) {
+ handles.push_back(p);
+ }
+ return handles;
+}
+
+bool CordzHandle::DiagnosticsHandleIsSafeToInspect(
+ const CordzHandle* handle) const {
+ ODRCheck();
+ if (!is_snapshot_) return false;
+ if (handle == nullptr) return true;
+ if (handle->is_snapshot_) return false;
+ bool snapshot_found = false;
+ SpinLockHolder lock(&queue_->mutex);
+ for (const CordzHandle* p = queue_->dq_tail; p; p = p->dq_prev_) {
+ if (p == handle) return !snapshot_found;
+ if (p == this) snapshot_found = true;
+ }
+ ABSL_ASSERT(snapshot_found); // Assert that 'this' is in delete queue.
+ return true;
+}
+
+std::vector<const CordzHandle*>
+CordzHandle::DiagnosticsGetSafeToInspectDeletedHandles() {
+ ODRCheck();
+ std::vector<const CordzHandle*> handles;
+ if (!is_snapshot()) {
+ return handles;
+ }
+
+ SpinLockHolder lock(&queue_->mutex);
+ for (const CordzHandle* p = dq_next_; p != nullptr; p = p->dq_next_) {
+ if (!p->is_snapshot()) {
+ handles.push_back(p);
+ }
+ }
+ return handles;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.h
index f181bc7d6b..4c79308947 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle.h
@@ -1,131 +1,131 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_CORDZ_HANDLE_H_
-#define ABSL_STRINGS_CORDZ_HANDLE_H_
-
-#include <atomic>
-#include <vector>
-
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/raw_logging.h"
-#include "y_absl/base/internal/spinlock.h"
-#include "y_absl/synchronization/mutex.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// This base class allows multiple types of object (CordzInfo and
-// CordzSampleToken) to exist simultaneously on the delete queue (pointed to by
-// global_dq_tail and traversed using dq_prev_ and dq_next_). The
-// delete queue guarantees that once a profiler creates a CordzSampleToken and
-// has gained visibility into a CordzInfo object, that CordzInfo object will not
-// be deleted prematurely. This allows the profiler to inspect all CordzInfo
-// objects that are alive without needing to hold a global lock.
-class CordzHandle {
- public:
- CordzHandle() : CordzHandle(false) {}
-
- bool is_snapshot() const { return is_snapshot_; }
-
- // Returns true if this instance is safe to be deleted because it is either a
- // snapshot, which is always safe to delete, or not included in the global
- // delete queue and thus not included in any snapshot.
- // Callers are responsible for making sure this instance can not be newly
- // discovered by other threads. For example, CordzInfo instances first de-list
- // themselves from the global CordzInfo list before determining if they are
- // safe to be deleted directly.
- // If SafeToDelete returns false, callers MUST use the Delete() method to
- // safely queue CordzHandle instances for deletion.
- bool SafeToDelete() const;
-
- // Deletes the provided instance, or puts it on the delete queue to be deleted
- // once there are no more sample tokens (snapshot) instances potentially
- // referencing the instance. `handle` should not be null.
- static void Delete(CordzHandle* handle);
-
- // Returns the current entries in the delete queue in LIFO order.
- static std::vector<const CordzHandle*> DiagnosticsGetDeleteQueue();
-
- // Returns true if the provided handle is nullptr or guarded by this handle.
- // Since the CordzSnapshot token is itself a CordzHandle, this method will
- // allow tests to check if that token is keeping an arbitrary CordzHandle
- // alive.
- bool DiagnosticsHandleIsSafeToInspect(const CordzHandle* handle) const;
-
- // Returns the current entries in the delete queue, in LIFO order, that are
- // protected by this. CordzHandle objects are only placed on the delete queue
- // after CordzHandle::Delete is called with them as an argument. Only
- // CordzHandle objects that are not also CordzSnapshot objects will be
- // included in the return vector. For each of the handles in the return
- // vector, the earliest that their memory can be freed is when this
- // CordzSnapshot object is deleted.
- std::vector<const CordzHandle*> DiagnosticsGetSafeToInspectDeletedHandles();
-
- protected:
- explicit CordzHandle(bool is_snapshot);
- virtual ~CordzHandle();
-
- private:
- // Global queue data. CordzHandle stores a pointer to the global queue
- // instance to harden against ODR violations.
- struct Queue {
- constexpr explicit Queue(y_absl::ConstInitType)
- : mutex(y_absl::kConstInit,
- y_absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
-
- y_absl::base_internal::SpinLock mutex;
- std::atomic<CordzHandle*> dq_tail ABSL_GUARDED_BY(mutex){nullptr};
-
- // Returns true if this delete queue is empty. This method does not acquire
- // the lock, but does a 'load acquire' observation on the delete queue tail.
- // It is used inside Delete() to check for the presence of a delete queue
- // without holding the lock. The assumption is that the caller is in the
- // state of 'being deleted', and can not be newly discovered by a concurrent
- // 'being constructed' snapshot instance. Practically, this means that any
- // such discovery (`find`, 'first' or 'next', etc) must have proper 'happens
- // before / after' semantics and atomic fences.
- bool IsEmpty() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
- return dq_tail.load(std::memory_order_acquire) == nullptr;
- }
- };
-
- void ODRCheck() const {
-#ifndef NDEBUG
- ABSL_RAW_CHECK(queue_ == &global_queue_, "ODR violation in Cord");
-#endif
- }
-
- ABSL_CONST_INIT static Queue global_queue_;
- Queue* const queue_ = &global_queue_;
- const bool is_snapshot_;
-
- // dq_prev_ and dq_next_ require the global queue mutex to be held.
- // Unfortunately we can't use thread annotations such that the thread safety
- // analysis understands that queue_ and global_queue_ are one and the same.
- CordzHandle* dq_prev_ = nullptr;
- CordzHandle* dq_next_ = nullptr;
-};
-
-class CordzSnapshot : public CordzHandle {
- public:
- CordzSnapshot() : CordzHandle(true) {}
-};
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_CORDZ_HANDLE_H_
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_HANDLE_H_
+#define ABSL_STRINGS_CORDZ_HANDLE_H_
+
+#include <atomic>
+#include <vector>
+
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/raw_logging.h"
+#include "y_absl/base/internal/spinlock.h"
+#include "y_absl/synchronization/mutex.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// This base class allows multiple types of object (CordzInfo and
+// CordzSampleToken) to exist simultaneously on the delete queue (pointed to by
+// global_dq_tail and traversed using dq_prev_ and dq_next_). The
+// delete queue guarantees that once a profiler creates a CordzSampleToken and
+// has gained visibility into a CordzInfo object, that CordzInfo object will not
+// be deleted prematurely. This allows the profiler to inspect all CordzInfo
+// objects that are alive without needing to hold a global lock.
+class CordzHandle {
+ public:
+ CordzHandle() : CordzHandle(false) {}
+
+ bool is_snapshot() const { return is_snapshot_; }
+
+ // Returns true if this instance is safe to be deleted because it is either a
+ // snapshot, which is always safe to delete, or not included in the global
+ // delete queue and thus not included in any snapshot.
+ // Callers are responsible for making sure this instance can not be newly
+ // discovered by other threads. For example, CordzInfo instances first de-list
+ // themselves from the global CordzInfo list before determining if they are
+ // safe to be deleted directly.
+ // If SafeToDelete returns false, callers MUST use the Delete() method to
+ // safely queue CordzHandle instances for deletion.
+ bool SafeToDelete() const;
+
+ // Deletes the provided instance, or puts it on the delete queue to be deleted
+ // once there are no more sample tokens (snapshot) instances potentially
+ // referencing the instance. `handle` should not be null.
+ static void Delete(CordzHandle* handle);
+
+ // Returns the current entries in the delete queue in LIFO order.
+ static std::vector<const CordzHandle*> DiagnosticsGetDeleteQueue();
+
+ // Returns true if the provided handle is nullptr or guarded by this handle.
+ // Since the CordzSnapshot token is itself a CordzHandle, this method will
+ // allow tests to check if that token is keeping an arbitrary CordzHandle
+ // alive.
+ bool DiagnosticsHandleIsSafeToInspect(const CordzHandle* handle) const;
+
+ // Returns the current entries in the delete queue, in LIFO order, that are
+ // protected by this. CordzHandle objects are only placed on the delete queue
+ // after CordzHandle::Delete is called with them as an argument. Only
+ // CordzHandle objects that are not also CordzSnapshot objects will be
+ // included in the return vector. For each of the handles in the return
+ // vector, the earliest that their memory can be freed is when this
+ // CordzSnapshot object is deleted.
+ std::vector<const CordzHandle*> DiagnosticsGetSafeToInspectDeletedHandles();
+
+ protected:
+ explicit CordzHandle(bool is_snapshot);
+ virtual ~CordzHandle();
+
+ private:
+ // Global queue data. CordzHandle stores a pointer to the global queue
+ // instance to harden against ODR violations.
+ struct Queue {
+ constexpr explicit Queue(y_absl::ConstInitType)
+ : mutex(y_absl::kConstInit,
+ y_absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
+
+ y_absl::base_internal::SpinLock mutex;
+ std::atomic<CordzHandle*> dq_tail ABSL_GUARDED_BY(mutex){nullptr};
+
+ // Returns true if this delete queue is empty. This method does not acquire
+ // the lock, but does a 'load acquire' observation on the delete queue tail.
+ // It is used inside Delete() to check for the presence of a delete queue
+ // without holding the lock. The assumption is that the caller is in the
+ // state of 'being deleted', and can not be newly discovered by a concurrent
+ // 'being constructed' snapshot instance. Practically, this means that any
+ // such discovery (`find`, 'first' or 'next', etc) must have proper 'happens
+ // before / after' semantics and atomic fences.
+ bool IsEmpty() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ return dq_tail.load(std::memory_order_acquire) == nullptr;
+ }
+ };
+
+ void ODRCheck() const {
+#ifndef NDEBUG
+ ABSL_RAW_CHECK(queue_ == &global_queue_, "ODR violation in Cord");
+#endif
+ }
+
+ ABSL_CONST_INIT static Queue global_queue_;
+ Queue* const queue_ = &global_queue_;
+ const bool is_snapshot_;
+
+ // dq_prev_ and dq_next_ require the global queue mutex to be held.
+ // Unfortunately we can't use thread annotations such that the thread safety
+ // analysis understands that queue_ and global_queue_ are one and the same.
+ CordzHandle* dq_prev_ = nullptr;
+ CordzHandle* dq_next_ = nullptr;
+};
+
+class CordzSnapshot : public CordzHandle {
+ public:
+ CordzSnapshot() : CordzHandle(true) {}
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_CORDZ_HANDLE_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle/ya.make
index e181217139..5c4f81d727 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle/ya.make
@@ -13,23 +13,23 @@ LICENSE(Apache-2.0)
PEERDIR(
contrib/restricted/abseil-cpp-tstring/y_absl/base
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc
contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging
contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/spinlock_wait
contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/throw_delegate
contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize
- contrib/restricted/abseil-cpp-tstring/y_absl/demangle
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize
+ contrib/restricted/abseil-cpp-tstring/y_absl/demangle
contrib/restricted/abseil-cpp-tstring/y_absl/numeric
contrib/restricted/abseil-cpp-tstring/y_absl/strings
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
- contrib/restricted/abseil-cpp-tstring/y_absl/synchronization
- contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal
- contrib/restricted/abseil-cpp-tstring/y_absl/time
- contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time
- contrib/restricted/abseil-cpp-tstring/y_absl/time/time_zone
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/synchronization
+ contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/time
+ contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time
+ contrib/restricted/abseil-cpp-tstring/y_absl/time/time_zone
)
ADDINCL(
@@ -38,10 +38,10 @@ ADDINCL(
NO_COMPILER_WARNINGS()
-SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
+SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
SRCS(
- cordz_handle.cc
+ cordz_handle.cc
)
END()
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.cc
index e3849a0b49..c45fff490a 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.cc
@@ -1,445 +1,445 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cordz_info.h"
-
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/spinlock.h"
-#include "y_absl/container/inlined_vector.h"
-#include "y_absl/debugging/stacktrace.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cord_rep_btree.h"
-#include "y_absl/strings/internal/cord_rep_ring.h"
-#include "y_absl/strings/internal/cordz_handle.h"
-#include "y_absl/strings/internal/cordz_statistics.h"
-#include "y_absl/strings/internal/cordz_update_tracker.h"
-#include "y_absl/synchronization/mutex.h"
-#include "y_absl/types/span.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-using ::y_absl::base_internal::SpinLockHolder;
-
-constexpr int CordzInfo::kMaxStackDepth;
-
-ABSL_CONST_INIT CordzInfo::List CordzInfo::global_list_{y_absl::kConstInit};
-
-namespace {
-
-// CordRepAnalyzer performs the analysis of a cord.
-//
-// It computes absolute node counts and total memory usage, and an 'estimated
-// fair share memory usage` statistic.
-// Conceptually, it divides the 'memory usage' at each location in the 'cord
-// graph' by the cumulative reference count of that location. The cumulative
-// reference count is the factored total of all edges leading into that node.
-//
-// The top level node is treated specially: we assume the current thread
-// (typically called from the CordzHandler) to hold a reference purely to
-// perform a safe analysis, and not being part of the application. So we
-// substract 1 from the reference count of the top node to compute the
-// 'application fair share' excluding the reference of the current thread.
-//
-// An example of fair sharing, and why we multiply reference counts:
-// Assume we have 2 CordReps, both being a Substring referencing a Flat:
-// CordSubstring A (refcount = 5) --> child Flat C (refcount = 2)
-// CordSubstring B (refcount = 9) --> child Flat C (refcount = 2)
-//
-// Flat C has 2 incoming edges from the 2 substrings (refcount = 2) and is not
-// referenced directly anywhere else. Translated into a 'fair share', we then
-// attribute 50% of the memory (memory / refcount = 2) to each incoming edge.
-// Rep A has a refcount of 5, so we attribute each incoming edge 1 / 5th of the
-// memory cost below it, i.e.: the fair share of Rep A of the memory used by C
-// is then 'memory C / (refcount C * refcount A) + (memory A / refcount A)'.
-// It is also easy to see how all incoming edges add up to 100%.
-class CordRepAnalyzer {
- public:
- // Creates an analyzer instance binding to `statistics`.
- explicit CordRepAnalyzer(CordzStatistics& statistics)
- : statistics_(statistics) {}
-
- // Analyzes the memory statistics and node counts for the provided `rep`, and
- // adds the results to `statistics`. Note that node counts and memory sizes
- // are not initialized, computed values are added to any existing values.
- void AnalyzeCordRep(const CordRep* rep) {
- // Process all linear nodes.
- // As per the class comments, use refcout - 1 on the top level node, as the
- // top level node is assumed to be referenced only for analysis purposes.
- size_t refcount = rep->refcount.Get();
- RepRef repref{rep, (refcount > 1) ? refcount - 1 : 1};
-
- // Process all top level linear nodes (substrings and flats).
- repref = CountLinearReps(repref, memory_usage_);
-
- if (repref.rep != nullptr) {
- if (repref.rep->tag == RING) {
- AnalyzeRing(repref);
- } else if (repref.rep->tag == BTREE) {
- AnalyzeBtree(repref);
- } else if (repref.rep->tag == CONCAT) {
- AnalyzeConcat(repref);
- } else {
- // We should have either a concat, btree, or ring node if not null.
- assert(false);
- }
- }
-
- // Adds values to output
- statistics_.estimated_memory_usage += memory_usage_.total;
- statistics_.estimated_fair_share_memory_usage +=
- static_cast<size_t>(memory_usage_.fair_share);
- }
-
- private:
- // RepRef identifies a CordRep* inside the Cord tree with its cumulative
- // refcount including itself. For example, a tree consisting of a substring
- // with a refcount of 3 and a child flat with a refcount of 4 will have RepRef
- // refcounts of 3 and 12 respectively.
- struct RepRef {
- const CordRep* rep;
- size_t refcount;
-
- // Returns a 'child' RepRef which contains the cumulative reference count of
- // this instance multiplied by the child's reference count.
- RepRef Child(const CordRep* child) const {
- return RepRef{child, refcount * child->refcount.Get()};
- }
- };
-
- // Memory usage values
- struct MemoryUsage {
- size_t total = 0;
- double fair_share = 0.0;
-
- // Adds 'size` memory usage to this class, with a cumulative (recursive)
- // reference count of `refcount`
- void Add(size_t size, size_t refcount) {
- total += size;
- fair_share += static_cast<double>(size) / refcount;
- }
- };
-
- // Returns `rr` if `rr.rep` is not null and a CONCAT type.
- // Asserts that `rr.rep` is a concat node or null.
- static RepRef AssertConcat(RepRef repref) {
- const CordRep* rep = repref.rep;
- assert(rep == nullptr || rep->tag == CONCAT);
- return (rep != nullptr && rep->tag == CONCAT) ? repref : RepRef{nullptr, 0};
- }
-
- // Counts a flat of the provide allocated size
- void CountFlat(size_t size) {
- statistics_.node_count++;
- statistics_.node_counts.flat++;
- if (size <= 64) {
- statistics_.node_counts.flat_64++;
- } else if (size <= 128) {
- statistics_.node_counts.flat_128++;
- } else if (size <= 256) {
- statistics_.node_counts.flat_256++;
- } else if (size <= 512) {
- statistics_.node_counts.flat_512++;
- } else if (size <= 1024) {
- statistics_.node_counts.flat_1k++;
- }
- }
-
- // Processes 'linear' reps (substring, flat, external) not requiring iteration
- // or recursion. Returns RefRep{null} if all reps were processed, else returns
- // the top-most non-linear concat or ring cordrep.
- // Node counts are updated into `statistics_`, memory usage is update into
- // `memory_usage`, which typically references `memory_usage_` except for ring
- // buffers where we count children unrounded.
- RepRef CountLinearReps(RepRef rep, MemoryUsage& memory_usage) {
- // Consume all substrings
- while (rep.rep->tag == SUBSTRING) {
- statistics_.node_count++;
- statistics_.node_counts.substring++;
- memory_usage.Add(sizeof(CordRepSubstring), rep.refcount);
- rep = rep.Child(rep.rep->substring()->child);
- }
-
- // Consume possible FLAT
- if (rep.rep->tag >= FLAT) {
- size_t size = rep.rep->flat()->AllocatedSize();
- CountFlat(size);
- memory_usage.Add(size, rep.refcount);
- return RepRef{nullptr, 0};
- }
-
- // Consume possible external
- if (rep.rep->tag == EXTERNAL) {
- statistics_.node_count++;
- statistics_.node_counts.external++;
- size_t size = rep.rep->length + sizeof(CordRepExternalImpl<intptr_t>);
- memory_usage.Add(size, rep.refcount);
- return RepRef{nullptr, 0};
- }
-
- return rep;
- }
-
- // Analyzes the provided concat node in a flattened recursive way.
- void AnalyzeConcat(RepRef rep) {
- y_absl::InlinedVector<RepRef, 47> pending;
-
- while (rep.rep != nullptr) {
- const CordRepConcat* concat = rep.rep->concat();
- RepRef left = rep.Child(concat->left);
- RepRef right = rep.Child(concat->right);
-
- statistics_.node_count++;
- statistics_.node_counts.concat++;
- memory_usage_.Add(sizeof(CordRepConcat), rep.refcount);
-
- right = AssertConcat(CountLinearReps(right, memory_usage_));
- rep = AssertConcat(CountLinearReps(left, memory_usage_));
- if (rep.rep != nullptr) {
- if (right.rep != nullptr) {
- pending.push_back(right);
- }
- } else if (right.rep != nullptr) {
- rep = right;
- } else if (!pending.empty()) {
- rep = pending.back();
- pending.pop_back();
- }
- }
- }
-
- // Analyzes the provided ring.
- void AnalyzeRing(RepRef rep) {
- statistics_.node_count++;
- statistics_.node_counts.ring++;
- const CordRepRing* ring = rep.rep->ring();
- memory_usage_.Add(CordRepRing::AllocSize(ring->capacity()), rep.refcount);
- ring->ForEach([&](CordRepRing::index_type pos) {
- CountLinearReps(rep.Child(ring->entry_child(pos)), memory_usage_);
- });
- }
-
- // Analyzes the provided btree.
- void AnalyzeBtree(RepRef rep) {
- statistics_.node_count++;
- statistics_.node_counts.btree++;
- memory_usage_.Add(sizeof(CordRepBtree), rep.refcount);
- const CordRepBtree* tree = rep.rep->btree();
- if (tree->height() > 0) {
- for (CordRep* edge : tree->Edges()) {
- AnalyzeBtree(rep.Child(edge));
- }
- } else {
- for (CordRep* edge : tree->Edges()) {
- CountLinearReps(rep.Child(edge), memory_usage_);
- }
- }
- }
-
- CordzStatistics& statistics_;
- MemoryUsage memory_usage_;
-};
-
-} // namespace
-
-CordzInfo* CordzInfo::Head(const CordzSnapshot& snapshot) {
- ABSL_ASSERT(snapshot.is_snapshot());
-
- // We can do an 'unsafe' load of 'head', as we are guaranteed that the
- // instance it points to is kept alive by the provided CordzSnapshot, so we
- // can simply return the current value using an acquire load.
- // We do enforce in DEBUG builds that the 'head' value is present in the
- // delete queue: ODR violations may lead to 'snapshot' and 'global_list_'
- // being in different libraries / modules.
- CordzInfo* head = global_list_.head.load(std::memory_order_acquire);
- ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(head));
- return head;
-}
-
-CordzInfo* CordzInfo::Next(const CordzSnapshot& snapshot) const {
- ABSL_ASSERT(snapshot.is_snapshot());
-
- // Similar to the 'Head()' function, we do not need a mutex here.
- CordzInfo* next = ci_next_.load(std::memory_order_acquire);
- ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(this));
- ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(next));
- return next;
-}
-
-void CordzInfo::TrackCord(InlineData& cord, MethodIdentifier method) {
- assert(cord.is_tree());
- assert(!cord.is_profiled());
- CordzInfo* cordz_info = new CordzInfo(cord.as_tree(), nullptr, method);
- cord.set_cordz_info(cordz_info);
- cordz_info->Track();
-}
-
-void CordzInfo::TrackCord(InlineData& cord, const InlineData& src,
- MethodIdentifier method) {
- assert(cord.is_tree());
- assert(src.is_tree());
-
- // Unsample current as we the current cord is being replaced with 'src',
- // so any method history is no longer relevant.
- CordzInfo* cordz_info = cord.cordz_info();
- if (cordz_info != nullptr) cordz_info->Untrack();
-
- // Start new cord sample
- cordz_info = new CordzInfo(cord.as_tree(), src.cordz_info(), method);
- cord.set_cordz_info(cordz_info);
- cordz_info->Track();
-}
-
-void CordzInfo::MaybeTrackCordImpl(InlineData& cord, const InlineData& src,
- MethodIdentifier method) {
- if (src.is_profiled()) {
- TrackCord(cord, src, method);
- } else if (cord.is_profiled()) {
- cord.cordz_info()->Untrack();
- cord.clear_cordz_info();
- }
-}
-
-CordzInfo::MethodIdentifier CordzInfo::GetParentMethod(const CordzInfo* src) {
- if (src == nullptr) return MethodIdentifier::kUnknown;
- return src->parent_method_ != MethodIdentifier::kUnknown ? src->parent_method_
- : src->method_;
-}
-
-int CordzInfo::FillParentStack(const CordzInfo* src, void** stack) {
- assert(stack);
- if (src == nullptr) return 0;
- if (src->parent_stack_depth_) {
- memcpy(stack, src->parent_stack_, src->parent_stack_depth_ * sizeof(void*));
- return src->parent_stack_depth_;
- }
- memcpy(stack, src->stack_, src->stack_depth_ * sizeof(void*));
- return src->stack_depth_;
-}
-
-CordzInfo::CordzInfo(CordRep* rep, const CordzInfo* src,
- MethodIdentifier method)
- : rep_(rep),
- stack_depth_(y_absl::GetStackTrace(stack_, /*max_depth=*/kMaxStackDepth,
- /*skip_count=*/1)),
- parent_stack_depth_(FillParentStack(src, parent_stack_)),
- method_(method),
- parent_method_(GetParentMethod(src)),
- create_time_(y_absl::Now()) {
- update_tracker_.LossyAdd(method);
- if (src) {
- // Copy parent counters.
- update_tracker_.LossyAdd(src->update_tracker_);
- }
-}
-
-CordzInfo::~CordzInfo() {
- // `rep_` is potentially kept alive if CordzInfo is included
- // in a collection snapshot (which should be rare).
- if (ABSL_PREDICT_FALSE(rep_)) {
- CordRep::Unref(rep_);
- }
-}
-
-void CordzInfo::Track() {
- SpinLockHolder l(&list_->mutex);
-
- CordzInfo* const head = list_->head.load(std::memory_order_acquire);
- if (head != nullptr) {
- head->ci_prev_.store(this, std::memory_order_release);
- }
- ci_next_.store(head, std::memory_order_release);
- list_->head.store(this, std::memory_order_release);
-}
-
-void CordzInfo::Untrack() {
- ODRCheck();
- {
- SpinLockHolder l(&list_->mutex);
-
- CordzInfo* const head = list_->head.load(std::memory_order_acquire);
- CordzInfo* const next = ci_next_.load(std::memory_order_acquire);
- CordzInfo* const prev = ci_prev_.load(std::memory_order_acquire);
-
- if (next) {
- ABSL_ASSERT(next->ci_prev_.load(std::memory_order_acquire) == this);
- next->ci_prev_.store(prev, std::memory_order_release);
- }
- if (prev) {
- ABSL_ASSERT(head != this);
- ABSL_ASSERT(prev->ci_next_.load(std::memory_order_acquire) == this);
- prev->ci_next_.store(next, std::memory_order_release);
- } else {
- ABSL_ASSERT(head == this);
- list_->head.store(next, std::memory_order_release);
- }
- }
-
- // We can no longer be discovered: perform a fast path check if we are not
- // listed on any delete queue, so we can directly delete this instance.
- if (SafeToDelete()) {
- UnsafeSetCordRep(nullptr);
- delete this;
- return;
- }
-
- // We are likely part of a snapshot, extend the life of the CordRep
- {
- y_absl::MutexLock lock(&mutex_);
- if (rep_) CordRep::Ref(rep_);
- }
- CordzHandle::Delete(this);
-}
-
-void CordzInfo::Lock(MethodIdentifier method)
- ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_) {
- mutex_.Lock();
- update_tracker_.LossyAdd(method);
- assert(rep_);
-}
-
-void CordzInfo::Unlock() ABSL_UNLOCK_FUNCTION(mutex_) {
- bool tracked = rep_ != nullptr;
- mutex_.Unlock();
- if (!tracked) {
- Untrack();
- }
-}
-
-y_absl::Span<void* const> CordzInfo::GetStack() const {
- return y_absl::MakeConstSpan(stack_, stack_depth_);
-}
-
-y_absl::Span<void* const> CordzInfo::GetParentStack() const {
- return y_absl::MakeConstSpan(parent_stack_, parent_stack_depth_);
-}
-
-CordzStatistics CordzInfo::GetCordzStatistics() const {
- CordzStatistics stats;
- stats.method = method_;
- stats.parent_method = parent_method_;
- stats.update_tracker = update_tracker_;
- if (CordRep* rep = RefCordRep()) {
- stats.size = rep->length;
- CordRepAnalyzer analyzer(stats);
- analyzer.AnalyzeCordRep(rep);
- CordRep::Unref(rep);
- }
- return stats;
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cordz_info.h"
+
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/spinlock.h"
+#include "y_absl/container/inlined_vector.h"
+#include "y_absl/debugging/stacktrace.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cord_rep_btree.h"
+#include "y_absl/strings/internal/cord_rep_ring.h"
+#include "y_absl/strings/internal/cordz_handle.h"
+#include "y_absl/strings/internal/cordz_statistics.h"
+#include "y_absl/strings/internal/cordz_update_tracker.h"
+#include "y_absl/synchronization/mutex.h"
+#include "y_absl/types/span.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+using ::y_absl::base_internal::SpinLockHolder;
+
+constexpr int CordzInfo::kMaxStackDepth;
+
+ABSL_CONST_INIT CordzInfo::List CordzInfo::global_list_{y_absl::kConstInit};
+
+namespace {
+
+// CordRepAnalyzer performs the analysis of a cord.
+//
+// It computes absolute node counts and total memory usage, and an 'estimated
+// fair share memory usage` statistic.
+// Conceptually, it divides the 'memory usage' at each location in the 'cord
+// graph' by the cumulative reference count of that location. The cumulative
+// reference count is the factored total of all edges leading into that node.
+//
+// The top level node is treated specially: we assume the current thread
+// (typically called from the CordzHandler) to hold a reference purely to
+// perform a safe analysis, and not being part of the application. So we
+// substract 1 from the reference count of the top node to compute the
+// 'application fair share' excluding the reference of the current thread.
+//
+// An example of fair sharing, and why we multiply reference counts:
+// Assume we have 2 CordReps, both being a Substring referencing a Flat:
+// CordSubstring A (refcount = 5) --> child Flat C (refcount = 2)
+// CordSubstring B (refcount = 9) --> child Flat C (refcount = 2)
+//
+// Flat C has 2 incoming edges from the 2 substrings (refcount = 2) and is not
+// referenced directly anywhere else. Translated into a 'fair share', we then
+// attribute 50% of the memory (memory / refcount = 2) to each incoming edge.
+// Rep A has a refcount of 5, so we attribute each incoming edge 1 / 5th of the
+// memory cost below it, i.e.: the fair share of Rep A of the memory used by C
+// is then 'memory C / (refcount C * refcount A) + (memory A / refcount A)'.
+// It is also easy to see how all incoming edges add up to 100%.
+class CordRepAnalyzer {
+ public:
+ // Creates an analyzer instance binding to `statistics`.
+ explicit CordRepAnalyzer(CordzStatistics& statistics)
+ : statistics_(statistics) {}
+
+ // Analyzes the memory statistics and node counts for the provided `rep`, and
+ // adds the results to `statistics`. Note that node counts and memory sizes
+ // are not initialized, computed values are added to any existing values.
+ void AnalyzeCordRep(const CordRep* rep) {
+ // Process all linear nodes.
+ // As per the class comments, use refcout - 1 on the top level node, as the
+ // top level node is assumed to be referenced only for analysis purposes.
+ size_t refcount = rep->refcount.Get();
+ RepRef repref{rep, (refcount > 1) ? refcount - 1 : 1};
+
+ // Process all top level linear nodes (substrings and flats).
+ repref = CountLinearReps(repref, memory_usage_);
+
+ if (repref.rep != nullptr) {
+ if (repref.rep->tag == RING) {
+ AnalyzeRing(repref);
+ } else if (repref.rep->tag == BTREE) {
+ AnalyzeBtree(repref);
+ } else if (repref.rep->tag == CONCAT) {
+ AnalyzeConcat(repref);
+ } else {
+ // We should have either a concat, btree, or ring node if not null.
+ assert(false);
+ }
+ }
+
+ // Adds values to output
+ statistics_.estimated_memory_usage += memory_usage_.total;
+ statistics_.estimated_fair_share_memory_usage +=
+ static_cast<size_t>(memory_usage_.fair_share);
+ }
+
+ private:
+ // RepRef identifies a CordRep* inside the Cord tree with its cumulative
+ // refcount including itself. For example, a tree consisting of a substring
+ // with a refcount of 3 and a child flat with a refcount of 4 will have RepRef
+ // refcounts of 3 and 12 respectively.
+ struct RepRef {
+ const CordRep* rep;
+ size_t refcount;
+
+ // Returns a 'child' RepRef which contains the cumulative reference count of
+ // this instance multiplied by the child's reference count.
+ RepRef Child(const CordRep* child) const {
+ return RepRef{child, refcount * child->refcount.Get()};
+ }
+ };
+
+ // Memory usage values
+ struct MemoryUsage {
+ size_t total = 0;
+ double fair_share = 0.0;
+
+ // Adds 'size` memory usage to this class, with a cumulative (recursive)
+ // reference count of `refcount`
+ void Add(size_t size, size_t refcount) {
+ total += size;
+ fair_share += static_cast<double>(size) / refcount;
+ }
+ };
+
+ // Returns `rr` if `rr.rep` is not null and a CONCAT type.
+ // Asserts that `rr.rep` is a concat node or null.
+ static RepRef AssertConcat(RepRef repref) {
+ const CordRep* rep = repref.rep;
+ assert(rep == nullptr || rep->tag == CONCAT);
+ return (rep != nullptr && rep->tag == CONCAT) ? repref : RepRef{nullptr, 0};
+ }
+
+ // Counts a flat of the provide allocated size
+ void CountFlat(size_t size) {
+ statistics_.node_count++;
+ statistics_.node_counts.flat++;
+ if (size <= 64) {
+ statistics_.node_counts.flat_64++;
+ } else if (size <= 128) {
+ statistics_.node_counts.flat_128++;
+ } else if (size <= 256) {
+ statistics_.node_counts.flat_256++;
+ } else if (size <= 512) {
+ statistics_.node_counts.flat_512++;
+ } else if (size <= 1024) {
+ statistics_.node_counts.flat_1k++;
+ }
+ }
+
+ // Processes 'linear' reps (substring, flat, external) not requiring iteration
+ // or recursion. Returns RefRep{null} if all reps were processed, else returns
+ // the top-most non-linear concat or ring cordrep.
+ // Node counts are updated into `statistics_`, memory usage is update into
+ // `memory_usage`, which typically references `memory_usage_` except for ring
+ // buffers where we count children unrounded.
+ RepRef CountLinearReps(RepRef rep, MemoryUsage& memory_usage) {
+ // Consume all substrings
+ while (rep.rep->tag == SUBSTRING) {
+ statistics_.node_count++;
+ statistics_.node_counts.substring++;
+ memory_usage.Add(sizeof(CordRepSubstring), rep.refcount);
+ rep = rep.Child(rep.rep->substring()->child);
+ }
+
+ // Consume possible FLAT
+ if (rep.rep->tag >= FLAT) {
+ size_t size = rep.rep->flat()->AllocatedSize();
+ CountFlat(size);
+ memory_usage.Add(size, rep.refcount);
+ return RepRef{nullptr, 0};
+ }
+
+ // Consume possible external
+ if (rep.rep->tag == EXTERNAL) {
+ statistics_.node_count++;
+ statistics_.node_counts.external++;
+ size_t size = rep.rep->length + sizeof(CordRepExternalImpl<intptr_t>);
+ memory_usage.Add(size, rep.refcount);
+ return RepRef{nullptr, 0};
+ }
+
+ return rep;
+ }
+
+ // Analyzes the provided concat node in a flattened recursive way.
+ void AnalyzeConcat(RepRef rep) {
+ y_absl::InlinedVector<RepRef, 47> pending;
+
+ while (rep.rep != nullptr) {
+ const CordRepConcat* concat = rep.rep->concat();
+ RepRef left = rep.Child(concat->left);
+ RepRef right = rep.Child(concat->right);
+
+ statistics_.node_count++;
+ statistics_.node_counts.concat++;
+ memory_usage_.Add(sizeof(CordRepConcat), rep.refcount);
+
+ right = AssertConcat(CountLinearReps(right, memory_usage_));
+ rep = AssertConcat(CountLinearReps(left, memory_usage_));
+ if (rep.rep != nullptr) {
+ if (right.rep != nullptr) {
+ pending.push_back(right);
+ }
+ } else if (right.rep != nullptr) {
+ rep = right;
+ } else if (!pending.empty()) {
+ rep = pending.back();
+ pending.pop_back();
+ }
+ }
+ }
+
+ // Analyzes the provided ring.
+ void AnalyzeRing(RepRef rep) {
+ statistics_.node_count++;
+ statistics_.node_counts.ring++;
+ const CordRepRing* ring = rep.rep->ring();
+ memory_usage_.Add(CordRepRing::AllocSize(ring->capacity()), rep.refcount);
+ ring->ForEach([&](CordRepRing::index_type pos) {
+ CountLinearReps(rep.Child(ring->entry_child(pos)), memory_usage_);
+ });
+ }
+
+ // Analyzes the provided btree.
+ void AnalyzeBtree(RepRef rep) {
+ statistics_.node_count++;
+ statistics_.node_counts.btree++;
+ memory_usage_.Add(sizeof(CordRepBtree), rep.refcount);
+ const CordRepBtree* tree = rep.rep->btree();
+ if (tree->height() > 0) {
+ for (CordRep* edge : tree->Edges()) {
+ AnalyzeBtree(rep.Child(edge));
+ }
+ } else {
+ for (CordRep* edge : tree->Edges()) {
+ CountLinearReps(rep.Child(edge), memory_usage_);
+ }
+ }
+ }
+
+ CordzStatistics& statistics_;
+ MemoryUsage memory_usage_;
+};
+
+} // namespace
+
+CordzInfo* CordzInfo::Head(const CordzSnapshot& snapshot) {
+ ABSL_ASSERT(snapshot.is_snapshot());
+
+ // We can do an 'unsafe' load of 'head', as we are guaranteed that the
+ // instance it points to is kept alive by the provided CordzSnapshot, so we
+ // can simply return the current value using an acquire load.
+ // We do enforce in DEBUG builds that the 'head' value is present in the
+ // delete queue: ODR violations may lead to 'snapshot' and 'global_list_'
+ // being in different libraries / modules.
+ CordzInfo* head = global_list_.head.load(std::memory_order_acquire);
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(head));
+ return head;
+}
+
+CordzInfo* CordzInfo::Next(const CordzSnapshot& snapshot) const {
+ ABSL_ASSERT(snapshot.is_snapshot());
+
+ // Similar to the 'Head()' function, we do not need a mutex here.
+ CordzInfo* next = ci_next_.load(std::memory_order_acquire);
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(this));
+ ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(next));
+ return next;
+}
+
+void CordzInfo::TrackCord(InlineData& cord, MethodIdentifier method) {
+ assert(cord.is_tree());
+ assert(!cord.is_profiled());
+ CordzInfo* cordz_info = new CordzInfo(cord.as_tree(), nullptr, method);
+ cord.set_cordz_info(cordz_info);
+ cordz_info->Track();
+}
+
+void CordzInfo::TrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method) {
+ assert(cord.is_tree());
+ assert(src.is_tree());
+
+ // Unsample current as we the current cord is being replaced with 'src',
+ // so any method history is no longer relevant.
+ CordzInfo* cordz_info = cord.cordz_info();
+ if (cordz_info != nullptr) cordz_info->Untrack();
+
+ // Start new cord sample
+ cordz_info = new CordzInfo(cord.as_tree(), src.cordz_info(), method);
+ cord.set_cordz_info(cordz_info);
+ cordz_info->Track();
+}
+
+void CordzInfo::MaybeTrackCordImpl(InlineData& cord, const InlineData& src,
+ MethodIdentifier method) {
+ if (src.is_profiled()) {
+ TrackCord(cord, src, method);
+ } else if (cord.is_profiled()) {
+ cord.cordz_info()->Untrack();
+ cord.clear_cordz_info();
+ }
+}
+
+CordzInfo::MethodIdentifier CordzInfo::GetParentMethod(const CordzInfo* src) {
+ if (src == nullptr) return MethodIdentifier::kUnknown;
+ return src->parent_method_ != MethodIdentifier::kUnknown ? src->parent_method_
+ : src->method_;
+}
+
+int CordzInfo::FillParentStack(const CordzInfo* src, void** stack) {
+ assert(stack);
+ if (src == nullptr) return 0;
+ if (src->parent_stack_depth_) {
+ memcpy(stack, src->parent_stack_, src->parent_stack_depth_ * sizeof(void*));
+ return src->parent_stack_depth_;
+ }
+ memcpy(stack, src->stack_, src->stack_depth_ * sizeof(void*));
+ return src->stack_depth_;
+}
+
+CordzInfo::CordzInfo(CordRep* rep, const CordzInfo* src,
+ MethodIdentifier method)
+ : rep_(rep),
+ stack_depth_(y_absl::GetStackTrace(stack_, /*max_depth=*/kMaxStackDepth,
+ /*skip_count=*/1)),
+ parent_stack_depth_(FillParentStack(src, parent_stack_)),
+ method_(method),
+ parent_method_(GetParentMethod(src)),
+ create_time_(y_absl::Now()) {
+ update_tracker_.LossyAdd(method);
+ if (src) {
+ // Copy parent counters.
+ update_tracker_.LossyAdd(src->update_tracker_);
+ }
+}
+
+CordzInfo::~CordzInfo() {
+ // `rep_` is potentially kept alive if CordzInfo is included
+ // in a collection snapshot (which should be rare).
+ if (ABSL_PREDICT_FALSE(rep_)) {
+ CordRep::Unref(rep_);
+ }
+}
+
+void CordzInfo::Track() {
+ SpinLockHolder l(&list_->mutex);
+
+ CordzInfo* const head = list_->head.load(std::memory_order_acquire);
+ if (head != nullptr) {
+ head->ci_prev_.store(this, std::memory_order_release);
+ }
+ ci_next_.store(head, std::memory_order_release);
+ list_->head.store(this, std::memory_order_release);
+}
+
+void CordzInfo::Untrack() {
+ ODRCheck();
+ {
+ SpinLockHolder l(&list_->mutex);
+
+ CordzInfo* const head = list_->head.load(std::memory_order_acquire);
+ CordzInfo* const next = ci_next_.load(std::memory_order_acquire);
+ CordzInfo* const prev = ci_prev_.load(std::memory_order_acquire);
+
+ if (next) {
+ ABSL_ASSERT(next->ci_prev_.load(std::memory_order_acquire) == this);
+ next->ci_prev_.store(prev, std::memory_order_release);
+ }
+ if (prev) {
+ ABSL_ASSERT(head != this);
+ ABSL_ASSERT(prev->ci_next_.load(std::memory_order_acquire) == this);
+ prev->ci_next_.store(next, std::memory_order_release);
+ } else {
+ ABSL_ASSERT(head == this);
+ list_->head.store(next, std::memory_order_release);
+ }
+ }
+
+ // We can no longer be discovered: perform a fast path check if we are not
+ // listed on any delete queue, so we can directly delete this instance.
+ if (SafeToDelete()) {
+ UnsafeSetCordRep(nullptr);
+ delete this;
+ return;
+ }
+
+ // We are likely part of a snapshot, extend the life of the CordRep
+ {
+ y_absl::MutexLock lock(&mutex_);
+ if (rep_) CordRep::Ref(rep_);
+ }
+ CordzHandle::Delete(this);
+}
+
+void CordzInfo::Lock(MethodIdentifier method)
+ ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_) {
+ mutex_.Lock();
+ update_tracker_.LossyAdd(method);
+ assert(rep_);
+}
+
+void CordzInfo::Unlock() ABSL_UNLOCK_FUNCTION(mutex_) {
+ bool tracked = rep_ != nullptr;
+ mutex_.Unlock();
+ if (!tracked) {
+ Untrack();
+ }
+}
+
+y_absl::Span<void* const> CordzInfo::GetStack() const {
+ return y_absl::MakeConstSpan(stack_, stack_depth_);
+}
+
+y_absl::Span<void* const> CordzInfo::GetParentStack() const {
+ return y_absl::MakeConstSpan(parent_stack_, parent_stack_depth_);
+}
+
+CordzStatistics CordzInfo::GetCordzStatistics() const {
+ CordzStatistics stats;
+ stats.method = method_;
+ stats.parent_method = parent_method_;
+ stats.update_tracker = update_tracker_;
+ if (CordRep* rep = RefCordRep()) {
+ stats.size = rep->length;
+ CordRepAnalyzer analyzer(stats);
+ analyzer.AnalyzeCordRep(rep);
+ CordRep::Unref(rep);
+ }
+ return stats;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.h
index e24214d259..4057e9a8a8 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info.h
@@ -1,298 +1,298 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_CORDZ_INFO_H_
-#define ABSL_STRINGS_CORDZ_INFO_H_
-
-#include <atomic>
-#include <cstdint>
-#include <functional>
-
-#include "y_absl/base/config.h"
-#include "y_absl/base/internal/raw_logging.h"
-#include "y_absl/base/internal/spinlock.h"
-#include "y_absl/base/thread_annotations.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cordz_functions.h"
-#include "y_absl/strings/internal/cordz_handle.h"
-#include "y_absl/strings/internal/cordz_statistics.h"
-#include "y_absl/strings/internal/cordz_update_tracker.h"
-#include "y_absl/synchronization/mutex.h"
-#include "y_absl/types/span.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// CordzInfo tracks a profiled Cord. Each of these objects can be in two places.
-// If a Cord is alive, the CordzInfo will be in the global_cordz_infos map, and
-// can also be retrieved via the linked list starting with
-// global_cordz_infos_head and continued via the cordz_info_next() method. When
-// a Cord has reached the end of its lifespan, the CordzInfo object will be
-// migrated out of the global_cordz_infos list and the global_cordz_infos_map,
-// and will either be deleted or appended to the global_delete_queue. If it is
-// placed on the global_delete_queue, the CordzInfo object will be cleaned in
-// the destructor of a CordzSampleToken object.
-class ABSL_LOCKABLE CordzInfo : public CordzHandle {
- public:
- using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
-
- // TrackCord creates a CordzInfo instance which tracks important metrics of
- // a sampled cord, and stores the created CordzInfo instance into `cord'. All
- // CordzInfo instances are placed in a global list which is used to discover
- // and snapshot all actively tracked cords. Callers are responsible for
- // calling UntrackCord() before the tracked Cord instance is deleted, or to
- // stop tracking the sampled Cord. Callers are also responsible for guarding
- // changes to the 'tree' value of a Cord (InlineData.tree) through the Lock()
- // and Unlock() calls. Any change resulting in a new tree value for the cord
- // requires a call to SetCordRep() before the old tree has been unreffed
- // and/or deleted. `method` identifies the Cord public API method initiating
- // the cord to be sampled.
- // Requires `cord` to hold a tree, and `cord.cordz_info()` to be null.
- static void TrackCord(InlineData& cord, MethodIdentifier method);
-
- // Identical to TrackCord(), except that this function fills the
- // `parent_stack` and `parent_method` properties of the returned CordzInfo
- // instance from the provided `src` instance if `src` is sampled.
- // This function should be used for sampling 'copy constructed' and 'copy
- // assigned' cords. This function allows 'cord` to be already sampled, in
- // which case the CordzInfo will be newly created from `src`.
- static void TrackCord(InlineData& cord, const InlineData& src,
- MethodIdentifier method);
-
- // Maybe sample the cord identified by 'cord' for method 'method'.
- // Uses `cordz_should_profile` to randomly pick cords to be sampled, and if
- // so, invokes `TrackCord` to start sampling `cord`.
- static void MaybeTrackCord(InlineData& cord, MethodIdentifier method);
-
- // Maybe sample the cord identified by 'cord' for method 'method'.
- // `src` identifies a 'parent' cord which is assigned to `cord`, typically the
- // input cord for a copy constructor, or an assign method such as `operator=`
- // `cord` will be sampled if (and only if) `src` is sampled.
- // If `cord` is currently being sampled and `src` is not being sampled, then
- // this function will stop sampling the cord and reset the cord's cordz_info.
- //
- // Previously this function defined that `cord` will be sampled if either
- // `src` is sampled, or if `cord` is randomly picked for sampling. However,
- // this can cause issues, as there may be paths where some cord is assigned an
- // indirect copy of it's own value. As such a 'string of copies' would then
- // remain sampled (`src.is_profiled`), then assigning such a cord back to
- // 'itself' creates a cycle where the cord will converge to 'always sampled`.
- //
- // For example:
- //
- // Cord x;
- // for (...) {
- // // Copy ctor --> y.is_profiled := x.is_profiled | random(...)
- // Cord y = x;
- // ...
- // // Assign x = y --> x.is_profiled = y.is_profiled | random(...)
- // // ==> x.is_profiled |= random(...)
- // // ==> x converges to 'always profiled'
- // x = y;
- // }
- static void MaybeTrackCord(InlineData& cord, const InlineData& src,
- MethodIdentifier method);
-
- // Stops tracking changes for a sampled cord, and deletes the provided info.
- // This function must be called before the sampled cord instance is deleted,
- // and before the root cordrep of the sampled cord is unreffed.
- // This function may extend the lifetime of the cordrep in cases where the
- // CordInfo instance is being held by a concurrent collection thread.
- void Untrack();
-
- // Invokes UntrackCord() on `info` if `info` is not null.
- static void MaybeUntrackCord(CordzInfo* info);
-
- CordzInfo() = delete;
- CordzInfo(const CordzInfo&) = delete;
- CordzInfo& operator=(const CordzInfo&) = delete;
-
- // Retrieves the oldest existing CordzInfo.
- static CordzInfo* Head(const CordzSnapshot& snapshot)
- ABSL_NO_THREAD_SAFETY_ANALYSIS;
-
- // Retrieves the next oldest existing CordzInfo older than 'this' instance.
- CordzInfo* Next(const CordzSnapshot& snapshot) const
- ABSL_NO_THREAD_SAFETY_ANALYSIS;
-
- // Locks this instance for the update identified by `method`.
- // Increases the count for `method` in `update_tracker`.
- void Lock(MethodIdentifier method) ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_);
-
- // Unlocks this instance. If the contained `rep` has been set to null
- // indicating the Cord has been cleared or is otherwise no longer sampled,
- // then this method will delete this CordzInfo instance.
- void Unlock() ABSL_UNLOCK_FUNCTION(mutex_);
-
- // Asserts that this CordzInfo instance is locked.
- void AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_);
-
- // Updates the `rep` property of this instance. This methods is invoked by
- // Cord logic each time the root node of a sampled Cord changes, and before
- // the old root reference count is deleted. This guarantees that collection
- // code can always safely take a reference on the tracked cord.
- // Requires a lock to be held through the `Lock()` method.
- // TODO(b/117940323): annotate with ABSL_EXCLUSIVE_LOCKS_REQUIRED once all
- // Cord code is in a state where this can be proven true by the compiler.
- void SetCordRep(CordRep* rep);
-
- // Returns the current `rep` property of this instance with a reference
- // added, or null if this instance represents a cord that has since been
- // deleted or untracked.
- CordRep* RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_);
-
- // Returns the current value of `rep_` for testing purposes only.
- CordRep* GetCordRepForTesting() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
- return rep_;
- }
-
- // Sets the current value of `rep_` for testing purposes only.
- void SetCordRepForTesting(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS {
- rep_ = rep;
- }
-
- // Returns the stack trace for where the cord was first sampled. Cords are
- // potentially sampled when they promote from an inlined cord to a tree or
- // ring representation, which is not necessarily the location where the cord
- // was first created. Some cords are created as inlined cords, and only as
- // data is added do they become a non-inlined cord. However, typically the
- // location represents reasonably well where the cord is 'created'.
- y_absl::Span<void* const> GetStack() const;
-
- // Returns the stack trace for a sampled cord's 'parent stack trace'. This
- // value may be set if the cord is sampled (promoted) after being created
- // from, or being assigned the value of an existing (sampled) cord.
- y_absl::Span<void* const> GetParentStack() const;
-
- // Retrieves the CordzStatistics associated with this Cord. The statistics
- // are only updated when a Cord goes through a mutation, such as an Append
- // or RemovePrefix.
- CordzStatistics GetCordzStatistics() const;
-
- private:
- using SpinLock = y_absl::base_internal::SpinLock;
- using SpinLockHolder = ::y_absl::base_internal::SpinLockHolder;
-
- // Global cordz info list. CordzInfo stores a pointer to the global list
- // instance to harden against ODR violations.
- struct List {
- constexpr explicit List(y_absl::ConstInitType)
- : mutex(y_absl::kConstInit,
- y_absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
-
- SpinLock mutex;
- std::atomic<CordzInfo*> head ABSL_GUARDED_BY(mutex){nullptr};
- };
-
- static constexpr int kMaxStackDepth = 64;
-
- explicit CordzInfo(CordRep* rep, const CordzInfo* src,
- MethodIdentifier method);
- ~CordzInfo() override;
-
- // Sets `rep_` without holding a lock.
- void UnsafeSetCordRep(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS;
-
- void Track();
-
- // Returns the parent method from `src`, which is either `parent_method_` or
- // `method_` depending on `parent_method_` being kUnknown.
- // Returns kUnknown if `src` is null.
- static MethodIdentifier GetParentMethod(const CordzInfo* src);
-
- // Fills the provided stack from `src`, copying either `parent_stack_` or
- // `stack_` depending on `parent_stack_` being empty, returning the size of
- // the parent stack.
- // Returns 0 if `src` is null.
- static int FillParentStack(const CordzInfo* src, void** stack);
-
- void ODRCheck() const {
-#ifndef NDEBUG
- ABSL_RAW_CHECK(list_ == &global_list_, "ODR violation in Cord");
-#endif
- }
-
- // Non-inlined implementation of `MaybeTrackCord`, which is executed if
- // either `src` is sampled or `cord` is sampled, and either untracks or
- // tracks `cord` as documented per `MaybeTrackCord`.
- static void MaybeTrackCordImpl(InlineData& cord, const InlineData& src,
- MethodIdentifier method);
-
- ABSL_CONST_INIT static List global_list_;
- List* const list_ = &global_list_;
-
- // ci_prev_ and ci_next_ require the global list mutex to be held.
- // Unfortunately we can't use thread annotations such that the thread safety
- // analysis understands that list_ and global_list_ are one and the same.
- std::atomic<CordzInfo*> ci_prev_{nullptr};
- std::atomic<CordzInfo*> ci_next_{nullptr};
-
- mutable y_absl::Mutex mutex_;
- CordRep* rep_ ABSL_GUARDED_BY(mutex_);
-
- void* stack_[kMaxStackDepth];
- void* parent_stack_[kMaxStackDepth];
- const int stack_depth_;
- const int parent_stack_depth_;
- const MethodIdentifier method_;
- const MethodIdentifier parent_method_;
- CordzUpdateTracker update_tracker_;
- const y_absl::Time create_time_;
-};
-
-inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
- InlineData& cord, MethodIdentifier method) {
- if (ABSL_PREDICT_FALSE(cordz_should_profile())) {
- TrackCord(cord, method);
- }
-}
-
-inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
- InlineData& cord, const InlineData& src, MethodIdentifier method) {
- if (ABSL_PREDICT_FALSE(InlineData::is_either_profiled(cord, src))) {
- MaybeTrackCordImpl(cord, src, method);
- }
-}
-
-inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeUntrackCord(
- CordzInfo* info) {
- if (ABSL_PREDICT_FALSE(info)) {
- info->Untrack();
- }
-}
-
-inline void CordzInfo::AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_) {
-#ifndef NDEBUG
- mutex_.AssertHeld();
-#endif
-}
-
-inline void CordzInfo::SetCordRep(CordRep* rep) {
- AssertHeld();
- rep_ = rep;
-}
-
-inline void CordzInfo::UnsafeSetCordRep(CordRep* rep) { rep_ = rep; }
-
-inline CordRep* CordzInfo::RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_) {
- MutexLock lock(&mutex_);
- return rep_ ? CordRep::Ref(rep_) : nullptr;
-}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_CORDZ_INFO_H_
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_CORDZ_INFO_H_
+#define ABSL_STRINGS_CORDZ_INFO_H_
+
+#include <atomic>
+#include <cstdint>
+#include <functional>
+
+#include "y_absl/base/config.h"
+#include "y_absl/base/internal/raw_logging.h"
+#include "y_absl/base/internal/spinlock.h"
+#include "y_absl/base/thread_annotations.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cordz_functions.h"
+#include "y_absl/strings/internal/cordz_handle.h"
+#include "y_absl/strings/internal/cordz_statistics.h"
+#include "y_absl/strings/internal/cordz_update_tracker.h"
+#include "y_absl/synchronization/mutex.h"
+#include "y_absl/types/span.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzInfo tracks a profiled Cord. Each of these objects can be in two places.
+// If a Cord is alive, the CordzInfo will be in the global_cordz_infos map, and
+// can also be retrieved via the linked list starting with
+// global_cordz_infos_head and continued via the cordz_info_next() method. When
+// a Cord has reached the end of its lifespan, the CordzInfo object will be
+// migrated out of the global_cordz_infos list and the global_cordz_infos_map,
+// and will either be deleted or appended to the global_delete_queue. If it is
+// placed on the global_delete_queue, the CordzInfo object will be cleaned in
+// the destructor of a CordzSampleToken object.
+class ABSL_LOCKABLE CordzInfo : public CordzHandle {
+ public:
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // TrackCord creates a CordzInfo instance which tracks important metrics of
+ // a sampled cord, and stores the created CordzInfo instance into `cord'. All
+ // CordzInfo instances are placed in a global list which is used to discover
+ // and snapshot all actively tracked cords. Callers are responsible for
+ // calling UntrackCord() before the tracked Cord instance is deleted, or to
+ // stop tracking the sampled Cord. Callers are also responsible for guarding
+ // changes to the 'tree' value of a Cord (InlineData.tree) through the Lock()
+ // and Unlock() calls. Any change resulting in a new tree value for the cord
+ // requires a call to SetCordRep() before the old tree has been unreffed
+ // and/or deleted. `method` identifies the Cord public API method initiating
+ // the cord to be sampled.
+ // Requires `cord` to hold a tree, and `cord.cordz_info()` to be null.
+ static void TrackCord(InlineData& cord, MethodIdentifier method);
+
+ // Identical to TrackCord(), except that this function fills the
+ // `parent_stack` and `parent_method` properties of the returned CordzInfo
+ // instance from the provided `src` instance if `src` is sampled.
+ // This function should be used for sampling 'copy constructed' and 'copy
+ // assigned' cords. This function allows 'cord` to be already sampled, in
+ // which case the CordzInfo will be newly created from `src`.
+ static void TrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ // Maybe sample the cord identified by 'cord' for method 'method'.
+ // Uses `cordz_should_profile` to randomly pick cords to be sampled, and if
+ // so, invokes `TrackCord` to start sampling `cord`.
+ static void MaybeTrackCord(InlineData& cord, MethodIdentifier method);
+
+ // Maybe sample the cord identified by 'cord' for method 'method'.
+ // `src` identifies a 'parent' cord which is assigned to `cord`, typically the
+ // input cord for a copy constructor, or an assign method such as `operator=`
+ // `cord` will be sampled if (and only if) `src` is sampled.
+ // If `cord` is currently being sampled and `src` is not being sampled, then
+ // this function will stop sampling the cord and reset the cord's cordz_info.
+ //
+ // Previously this function defined that `cord` will be sampled if either
+ // `src` is sampled, or if `cord` is randomly picked for sampling. However,
+ // this can cause issues, as there may be paths where some cord is assigned an
+ // indirect copy of it's own value. As such a 'string of copies' would then
+ // remain sampled (`src.is_profiled`), then assigning such a cord back to
+ // 'itself' creates a cycle where the cord will converge to 'always sampled`.
+ //
+ // For example:
+ //
+ // Cord x;
+ // for (...) {
+ // // Copy ctor --> y.is_profiled := x.is_profiled | random(...)
+ // Cord y = x;
+ // ...
+ // // Assign x = y --> x.is_profiled = y.is_profiled | random(...)
+ // // ==> x.is_profiled |= random(...)
+ // // ==> x converges to 'always profiled'
+ // x = y;
+ // }
+ static void MaybeTrackCord(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ // Stops tracking changes for a sampled cord, and deletes the provided info.
+ // This function must be called before the sampled cord instance is deleted,
+ // and before the root cordrep of the sampled cord is unreffed.
+ // This function may extend the lifetime of the cordrep in cases where the
+ // CordInfo instance is being held by a concurrent collection thread.
+ void Untrack();
+
+ // Invokes UntrackCord() on `info` if `info` is not null.
+ static void MaybeUntrackCord(CordzInfo* info);
+
+ CordzInfo() = delete;
+ CordzInfo(const CordzInfo&) = delete;
+ CordzInfo& operator=(const CordzInfo&) = delete;
+
+ // Retrieves the oldest existing CordzInfo.
+ static CordzInfo* Head(const CordzSnapshot& snapshot)
+ ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ // Retrieves the next oldest existing CordzInfo older than 'this' instance.
+ CordzInfo* Next(const CordzSnapshot& snapshot) const
+ ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ // Locks this instance for the update identified by `method`.
+ // Increases the count for `method` in `update_tracker`.
+ void Lock(MethodIdentifier method) ABSL_EXCLUSIVE_LOCK_FUNCTION(mutex_);
+
+ // Unlocks this instance. If the contained `rep` has been set to null
+ // indicating the Cord has been cleared or is otherwise no longer sampled,
+ // then this method will delete this CordzInfo instance.
+ void Unlock() ABSL_UNLOCK_FUNCTION(mutex_);
+
+ // Asserts that this CordzInfo instance is locked.
+ void AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_);
+
+ // Updates the `rep` property of this instance. This methods is invoked by
+ // Cord logic each time the root node of a sampled Cord changes, and before
+ // the old root reference count is deleted. This guarantees that collection
+ // code can always safely take a reference on the tracked cord.
+ // Requires a lock to be held through the `Lock()` method.
+ // TODO(b/117940323): annotate with ABSL_EXCLUSIVE_LOCKS_REQUIRED once all
+ // Cord code is in a state where this can be proven true by the compiler.
+ void SetCordRep(CordRep* rep);
+
+ // Returns the current `rep` property of this instance with a reference
+ // added, or null if this instance represents a cord that has since been
+ // deleted or untracked.
+ CordRep* RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_);
+
+ // Returns the current value of `rep_` for testing purposes only.
+ CordRep* GetCordRepForTesting() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ return rep_;
+ }
+
+ // Sets the current value of `rep_` for testing purposes only.
+ void SetCordRepForTesting(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS {
+ rep_ = rep;
+ }
+
+ // Returns the stack trace for where the cord was first sampled. Cords are
+ // potentially sampled when they promote from an inlined cord to a tree or
+ // ring representation, which is not necessarily the location where the cord
+ // was first created. Some cords are created as inlined cords, and only as
+ // data is added do they become a non-inlined cord. However, typically the
+ // location represents reasonably well where the cord is 'created'.
+ y_absl::Span<void* const> GetStack() const;
+
+ // Returns the stack trace for a sampled cord's 'parent stack trace'. This
+ // value may be set if the cord is sampled (promoted) after being created
+ // from, or being assigned the value of an existing (sampled) cord.
+ y_absl::Span<void* const> GetParentStack() const;
+
+ // Retrieves the CordzStatistics associated with this Cord. The statistics
+ // are only updated when a Cord goes through a mutation, such as an Append
+ // or RemovePrefix.
+ CordzStatistics GetCordzStatistics() const;
+
+ private:
+ using SpinLock = y_absl::base_internal::SpinLock;
+ using SpinLockHolder = ::y_absl::base_internal::SpinLockHolder;
+
+ // Global cordz info list. CordzInfo stores a pointer to the global list
+ // instance to harden against ODR violations.
+ struct List {
+ constexpr explicit List(y_absl::ConstInitType)
+ : mutex(y_absl::kConstInit,
+ y_absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
+
+ SpinLock mutex;
+ std::atomic<CordzInfo*> head ABSL_GUARDED_BY(mutex){nullptr};
+ };
+
+ static constexpr int kMaxStackDepth = 64;
+
+ explicit CordzInfo(CordRep* rep, const CordzInfo* src,
+ MethodIdentifier method);
+ ~CordzInfo() override;
+
+ // Sets `rep_` without holding a lock.
+ void UnsafeSetCordRep(CordRep* rep) ABSL_NO_THREAD_SAFETY_ANALYSIS;
+
+ void Track();
+
+ // Returns the parent method from `src`, which is either `parent_method_` or
+ // `method_` depending on `parent_method_` being kUnknown.
+ // Returns kUnknown if `src` is null.
+ static MethodIdentifier GetParentMethod(const CordzInfo* src);
+
+ // Fills the provided stack from `src`, copying either `parent_stack_` or
+ // `stack_` depending on `parent_stack_` being empty, returning the size of
+ // the parent stack.
+ // Returns 0 if `src` is null.
+ static int FillParentStack(const CordzInfo* src, void** stack);
+
+ void ODRCheck() const {
+#ifndef NDEBUG
+ ABSL_RAW_CHECK(list_ == &global_list_, "ODR violation in Cord");
+#endif
+ }
+
+ // Non-inlined implementation of `MaybeTrackCord`, which is executed if
+ // either `src` is sampled or `cord` is sampled, and either untracks or
+ // tracks `cord` as documented per `MaybeTrackCord`.
+ static void MaybeTrackCordImpl(InlineData& cord, const InlineData& src,
+ MethodIdentifier method);
+
+ ABSL_CONST_INIT static List global_list_;
+ List* const list_ = &global_list_;
+
+ // ci_prev_ and ci_next_ require the global list mutex to be held.
+ // Unfortunately we can't use thread annotations such that the thread safety
+ // analysis understands that list_ and global_list_ are one and the same.
+ std::atomic<CordzInfo*> ci_prev_{nullptr};
+ std::atomic<CordzInfo*> ci_next_{nullptr};
+
+ mutable y_absl::Mutex mutex_;
+ CordRep* rep_ ABSL_GUARDED_BY(mutex_);
+
+ void* stack_[kMaxStackDepth];
+ void* parent_stack_[kMaxStackDepth];
+ const int stack_depth_;
+ const int parent_stack_depth_;
+ const MethodIdentifier method_;
+ const MethodIdentifier parent_method_;
+ CordzUpdateTracker update_tracker_;
+ const y_absl::Time create_time_;
+};
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
+ InlineData& cord, MethodIdentifier method) {
+ if (ABSL_PREDICT_FALSE(cordz_should_profile())) {
+ TrackCord(cord, method);
+ }
+}
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
+ InlineData& cord, const InlineData& src, MethodIdentifier method) {
+ if (ABSL_PREDICT_FALSE(InlineData::is_either_profiled(cord, src))) {
+ MaybeTrackCordImpl(cord, src, method);
+ }
+}
+
+inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeUntrackCord(
+ CordzInfo* info) {
+ if (ABSL_PREDICT_FALSE(info)) {
+ info->Untrack();
+ }
+}
+
+inline void CordzInfo::AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_) {
+#ifndef NDEBUG
+ mutex_.AssertHeld();
+#endif
+}
+
+inline void CordzInfo::SetCordRep(CordRep* rep) {
+ AssertHeld();
+ rep_ = rep;
+}
+
+inline void CordzInfo::UnsafeSetCordRep(CordRep* rep) { rep_ = rep; }
+
+inline CordRep* CordzInfo::RefCordRep() const ABSL_LOCKS_EXCLUDED(mutex_) {
+ MutexLock lock(&mutex_);
+ return rep_ ? CordRep::Ref(rep_) : nullptr;
+}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_CORDZ_INFO_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info/ya.make
index 930eaa8b05..4c33f695e7 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info/ya.make
@@ -13,27 +13,27 @@ LICENSE(Apache-2.0)
PEERDIR(
contrib/restricted/abseil-cpp-tstring/y_absl/base
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc
contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging
contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/spinlock_wait
contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/throw_delegate
contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize
- contrib/restricted/abseil-cpp-tstring/y_absl/demangle
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize
+ contrib/restricted/abseil-cpp-tstring/y_absl/demangle
contrib/restricted/abseil-cpp-tstring/y_absl/numeric
- contrib/restricted/abseil-cpp-tstring/y_absl/profiling/internal/exponential_biased
+ contrib/restricted/abseil-cpp-tstring/y_absl/profiling/internal/exponential_biased
contrib/restricted/abseil-cpp-tstring/y_absl/strings
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle
- contrib/restricted/abseil-cpp-tstring/y_absl/synchronization
- contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal
- contrib/restricted/abseil-cpp-tstring/y_absl/time
- contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time
- contrib/restricted/abseil-cpp-tstring/y_absl/time/time_zone
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle
+ contrib/restricted/abseil-cpp-tstring/y_absl/synchronization
+ contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/time
+ contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time
+ contrib/restricted/abseil-cpp-tstring/y_absl/time/time_zone
)
ADDINCL(
@@ -42,10 +42,10 @@ ADDINCL(
NO_COMPILER_WARNINGS()
-SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
+SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
SRCS(
- cordz_info.cc
+ cordz_info.cc
)
END()
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.cc
index f29678adb8..0f79f2dbd0 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.cc
@@ -1,64 +1,64 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/strings/internal/cordz_sample_token.h"
-
-#include "y_absl/base/config.h"
-#include "y_absl/strings/internal/cordz_handle.h"
-#include "y_absl/strings/internal/cordz_info.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-CordzSampleToken::Iterator& CordzSampleToken::Iterator::operator++() {
- if (current_) {
- current_ = current_->Next(*token_);
- }
- return *this;
-}
-
-CordzSampleToken::Iterator CordzSampleToken::Iterator::operator++(int) {
- Iterator it(*this);
- operator++();
- return it;
-}
-
-bool operator==(const CordzSampleToken::Iterator& lhs,
- const CordzSampleToken::Iterator& rhs) {
- return lhs.current_ == rhs.current_ &&
- (lhs.current_ == nullptr || lhs.token_ == rhs.token_);
-}
-
-bool operator!=(const CordzSampleToken::Iterator& lhs,
- const CordzSampleToken::Iterator& rhs) {
- return !(lhs == rhs);
-}
-
-CordzSampleToken::Iterator::reference CordzSampleToken::Iterator::operator*()
- const {
- return *current_;
-}
-
-CordzSampleToken::Iterator::pointer CordzSampleToken::Iterator::operator->()
- const {
- return current_;
-}
-
-CordzSampleToken::Iterator::Iterator(const CordzSampleToken* token)
- : token_(token), current_(CordzInfo::Head(*token)) {}
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/strings/internal/cordz_sample_token.h"
+
+#include "y_absl/base/config.h"
+#include "y_absl/strings/internal/cordz_handle.h"
+#include "y_absl/strings/internal/cordz_info.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+CordzSampleToken::Iterator& CordzSampleToken::Iterator::operator++() {
+ if (current_) {
+ current_ = current_->Next(*token_);
+ }
+ return *this;
+}
+
+CordzSampleToken::Iterator CordzSampleToken::Iterator::operator++(int) {
+ Iterator it(*this);
+ operator++();
+ return it;
+}
+
+bool operator==(const CordzSampleToken::Iterator& lhs,
+ const CordzSampleToken::Iterator& rhs) {
+ return lhs.current_ == rhs.current_ &&
+ (lhs.current_ == nullptr || lhs.token_ == rhs.token_);
+}
+
+bool operator!=(const CordzSampleToken::Iterator& lhs,
+ const CordzSampleToken::Iterator& rhs) {
+ return !(lhs == rhs);
+}
+
+CordzSampleToken::Iterator::reference CordzSampleToken::Iterator::operator*()
+ const {
+ return *current_;
+}
+
+CordzSampleToken::Iterator::pointer CordzSampleToken::Iterator::operator->()
+ const {
+ return current_;
+}
+
+CordzSampleToken::Iterator::Iterator(const CordzSampleToken* token)
+ : token_(token), current_(CordzInfo::Head(*token)) {}
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.h
index 85bed6dae8..ae4aea50b6 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token.h
@@ -1,97 +1,97 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#include "y_absl/base/config.h"
-#include "y_absl/strings/internal/cordz_handle.h"
-#include "y_absl/strings/internal/cordz_info.h"
-
-#ifndef ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
-#define ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// The existence of a CordzSampleToken guarantees that a reader can traverse the
-// global_cordz_infos_head linked-list without needing to hold a mutex. When a
-// CordzSampleToken exists, all CordzInfo objects that would be destroyed are
-// instead appended to a deletion queue. When the CordzSampleToken is destroyed,
-// it will also clean up any of these CordzInfo objects.
-//
-// E.g., ST are CordzSampleToken objects and CH are CordzHandle objects.
-// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- global_delete_queue_tail
-//
-// This list tracks that CH1 and CH2 were created after ST1, so the thread
-// holding ST1 might have a referece to CH1, CH2, ST2, and CH3. However, ST2 was
-// created later, so the thread holding the ST2 token cannot have a reference to
-// ST1, CH1, or CH2. If ST1 is cleaned up first, that thread will delete ST1,
-// CH1, and CH2. If instead ST2 is cleaned up first, that thread will only
-// delete ST2.
-//
-// If ST1 is cleaned up first, the new list will be:
-// ST2 <- CH3 <- global_delete_queue_tail
-//
-// If ST2 is cleaned up first, the new list will be:
-// ST1 <- CH1 <- CH2 <- CH3 <- global_delete_queue_tail
-//
-// All new CordzHandle objects are appended to the list, so if a new thread
-// comes along before either ST1 or ST2 are cleaned up, the new list will be:
-// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- ST3 <- global_delete_queue_tail
-//
-// A thread must hold the global_delete_queue_mu mutex whenever it's altering
-// this list.
-//
-// It is safe for thread that holds a CordzSampleToken to read
-// global_cordz_infos at any time since the objects it is able to retrieve will
-// not be deleted while the CordzSampleToken exists.
-class CordzSampleToken : public CordzSnapshot {
- public:
- class Iterator {
- public:
- using iterator_category = std::input_iterator_tag;
- using value_type = const CordzInfo&;
- using difference_type = ptrdiff_t;
- using pointer = const CordzInfo*;
- using reference = value_type;
-
- Iterator() = default;
-
- Iterator& operator++();
- Iterator operator++(int);
- friend bool operator==(const Iterator& lhs, const Iterator& rhs);
- friend bool operator!=(const Iterator& lhs, const Iterator& rhs);
- reference operator*() const;
- pointer operator->() const;
-
- private:
- friend class CordzSampleToken;
- explicit Iterator(const CordzSampleToken* token);
-
- const CordzSampleToken* token_ = nullptr;
- pointer current_ = nullptr;
- };
-
- CordzSampleToken() = default;
- CordzSampleToken(const CordzSampleToken&) = delete;
- CordzSampleToken& operator=(const CordzSampleToken&) = delete;
-
- Iterator begin() { return Iterator(this); }
- Iterator end() { return Iterator(); }
-};
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "y_absl/base/config.h"
+#include "y_absl/strings/internal/cordz_handle.h"
+#include "y_absl/strings/internal/cordz_info.h"
+
+#ifndef ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+#define ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// The existence of a CordzSampleToken guarantees that a reader can traverse the
+// global_cordz_infos_head linked-list without needing to hold a mutex. When a
+// CordzSampleToken exists, all CordzInfo objects that would be destroyed are
+// instead appended to a deletion queue. When the CordzSampleToken is destroyed,
+// it will also clean up any of these CordzInfo objects.
+//
+// E.g., ST are CordzSampleToken objects and CH are CordzHandle objects.
+// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- global_delete_queue_tail
+//
+// This list tracks that CH1 and CH2 were created after ST1, so the thread
+// holding ST1 might have a referece to CH1, CH2, ST2, and CH3. However, ST2 was
+// created later, so the thread holding the ST2 token cannot have a reference to
+// ST1, CH1, or CH2. If ST1 is cleaned up first, that thread will delete ST1,
+// CH1, and CH2. If instead ST2 is cleaned up first, that thread will only
+// delete ST2.
+//
+// If ST1 is cleaned up first, the new list will be:
+// ST2 <- CH3 <- global_delete_queue_tail
+//
+// If ST2 is cleaned up first, the new list will be:
+// ST1 <- CH1 <- CH2 <- CH3 <- global_delete_queue_tail
+//
+// All new CordzHandle objects are appended to the list, so if a new thread
+// comes along before either ST1 or ST2 are cleaned up, the new list will be:
+// ST1 <- CH1 <- CH2 <- ST2 <- CH3 <- ST3 <- global_delete_queue_tail
+//
+// A thread must hold the global_delete_queue_mu mutex whenever it's altering
+// this list.
+//
+// It is safe for thread that holds a CordzSampleToken to read
+// global_cordz_infos at any time since the objects it is able to retrieve will
+// not be deleted while the CordzSampleToken exists.
+class CordzSampleToken : public CordzSnapshot {
+ public:
+ class Iterator {
+ public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = const CordzInfo&;
+ using difference_type = ptrdiff_t;
+ using pointer = const CordzInfo*;
+ using reference = value_type;
+
+ Iterator() = default;
+
+ Iterator& operator++();
+ Iterator operator++(int);
+ friend bool operator==(const Iterator& lhs, const Iterator& rhs);
+ friend bool operator!=(const Iterator& lhs, const Iterator& rhs);
+ reference operator*() const;
+ pointer operator->() const;
+
+ private:
+ friend class CordzSampleToken;
+ explicit Iterator(const CordzSampleToken* token);
+
+ const CordzSampleToken* token_ = nullptr;
+ pointer current_ = nullptr;
+ };
+
+ CordzSampleToken() = default;
+ CordzSampleToken(const CordzSampleToken&) = delete;
+ CordzSampleToken& operator=(const CordzSampleToken&) = delete;
+
+ Iterator begin() { return Iterator(this); }
+ Iterator end() { return Iterator(); }
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_CORDZ_SAMPLE_TOKEN_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token/ya.make
index 4d46274f4e..6a14824712 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_sample_token/ya.make
@@ -1,52 +1,52 @@
-# Generated by devtools/yamaker.
-
-LIBRARY()
-
-WITHOUT_LICENSE_TEXTS()
-
-OWNER(
- somov
- g:cpp-contrib
-)
-
-LICENSE(Apache-2.0)
-
-PEERDIR(
- contrib/restricted/abseil-cpp-tstring/y_absl/base
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/spinlock_wait
- contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/throw_delegate
- contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace
- contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize
- contrib/restricted/abseil-cpp-tstring/y_absl/demangle
- contrib/restricted/abseil-cpp-tstring/y_absl/numeric
- contrib/restricted/abseil-cpp-tstring/y_absl/profiling/internal/exponential_biased
- contrib/restricted/abseil-cpp-tstring/y_absl/strings
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info
- contrib/restricted/abseil-cpp-tstring/y_absl/synchronization
- contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal
- contrib/restricted/abseil-cpp-tstring/y_absl/time
- contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time
- contrib/restricted/abseil-cpp-tstring/y_absl/time/time_zone
-)
-
-ADDINCL(
- GLOBAL contrib/restricted/abseil-cpp-tstring
-)
-
-NO_COMPILER_WARNINGS()
-
-SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
-
-SRCS(
- cordz_sample_token.cc
-)
-
-END()
+# Generated by devtools/yamaker.
+
+LIBRARY()
+
+WITHOUT_LICENSE_TEXTS()
+
+OWNER(
+ somov
+ g:cpp-contrib
+)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/restricted/abseil-cpp-tstring/y_absl/base
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/low_level_alloc
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/raw_logging
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/spinlock_wait
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/internal/throw_delegate
+ contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging/stacktrace
+ contrib/restricted/abseil-cpp-tstring/y_absl/debugging/symbolize
+ contrib/restricted/abseil-cpp-tstring/y_absl/demangle
+ contrib/restricted/abseil-cpp-tstring/y_absl/numeric
+ contrib/restricted/abseil-cpp-tstring/y_absl/profiling/internal/exponential_biased
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_cord_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_functions
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_handle
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_info
+ contrib/restricted/abseil-cpp-tstring/y_absl/synchronization
+ contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/time
+ contrib/restricted/abseil-cpp-tstring/y_absl/time/civil_time
+ contrib/restricted/abseil-cpp-tstring/y_absl/time/time_zone
+)
+
+ADDINCL(
+ GLOBAL contrib/restricted/abseil-cpp-tstring
+)
+
+NO_COMPILER_WARNINGS()
+
+SRCDIR(contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal)
+
+SRCS(
+ cordz_sample_token.cc
+)
+
+END()
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_statistics.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_statistics.h
index 34e7c34bd8..fbee9dd1c2 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_statistics.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_statistics.h
@@ -1,87 +1,87 @@
-// Copyright 2019 The Abseil Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
-#define ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
-
-#include <cstdint>
-
-#include "y_absl/base/config.h"
-#include "y_absl/strings/internal/cordz_update_tracker.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// CordzStatistics captures some meta information about a Cord's shape.
-struct CordzStatistics {
- using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
-
- // Node counts information
- struct NodeCounts {
- size_t flat = 0; // #flats
- size_t flat_64 = 0; // #flats up to 64 bytes
- size_t flat_128 = 0; // #flats up to 128 bytes
- size_t flat_256 = 0; // #flats up to 256 bytes
- size_t flat_512 = 0; // #flats up to 512 bytes
- size_t flat_1k = 0; // #flats up to 1K bytes
- size_t external = 0; // #external reps
- size_t substring = 0; // #substring reps
- size_t concat = 0; // #concat reps
- size_t ring = 0; // #ring buffer reps
- size_t btree = 0; // #btree reps
- };
-
- // The size of the cord in bytes. This matches the result of Cord::size().
- int64_t size = 0;
-
- // The estimated memory used by the sampled cord. This value matches the
- // value as reported by Cord::EstimatedMemoryUsage().
- // A value of 0 implies the property has not been recorded.
- int64_t estimated_memory_usage = 0;
-
- // The effective memory used by the sampled cord, inversely weighted by the
- // effective indegree of each allocated node. This is a representation of the
- // fair share of memory usage that should be attributed to the sampled cord.
- // This value is more useful for cases where one or more nodes are referenced
- // by multiple Cord instances, and for cases where a Cord includes the same
- // node multiple times (either directly or indirectly).
- // A value of 0 implies the property has not been recorded.
- int64_t estimated_fair_share_memory_usage = 0;
-
- // The total number of nodes referenced by this cord.
- // For ring buffer Cords, this includes the 'ring buffer' node.
- // For btree Cords, this includes all 'CordRepBtree' tree nodes as well as all
- // the substring, flat and external nodes referenced by the tree.
- // A value of 0 implies the property has not been recorded.
- int64_t node_count = 0;
-
- // Detailed node counts per type
- NodeCounts node_counts;
-
- // The cord method responsible for sampling the cord.
- MethodIdentifier method = MethodIdentifier::kUnknown;
-
- // The cord method responsible for sampling the parent cord if applicable.
- MethodIdentifier parent_method = MethodIdentifier::kUnknown;
-
- // Update tracker tracking invocation count per cord method.
- CordzUpdateTracker update_tracker;
-};
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+// Copyright 2019 The Abseil Authors.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
+
+#include <cstdint>
+
+#include "y_absl/base/config.h"
+#include "y_absl/strings/internal/cordz_update_tracker.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzStatistics captures some meta information about a Cord's shape.
+struct CordzStatistics {
+ using MethodIdentifier = CordzUpdateTracker::MethodIdentifier;
+
+ // Node counts information
+ struct NodeCounts {
+ size_t flat = 0; // #flats
+ size_t flat_64 = 0; // #flats up to 64 bytes
+ size_t flat_128 = 0; // #flats up to 128 bytes
+ size_t flat_256 = 0; // #flats up to 256 bytes
+ size_t flat_512 = 0; // #flats up to 512 bytes
+ size_t flat_1k = 0; // #flats up to 1K bytes
+ size_t external = 0; // #external reps
+ size_t substring = 0; // #substring reps
+ size_t concat = 0; // #concat reps
+ size_t ring = 0; // #ring buffer reps
+ size_t btree = 0; // #btree reps
+ };
+
+ // The size of the cord in bytes. This matches the result of Cord::size().
+ int64_t size = 0;
+
+ // The estimated memory used by the sampled cord. This value matches the
+ // value as reported by Cord::EstimatedMemoryUsage().
+ // A value of 0 implies the property has not been recorded.
+ int64_t estimated_memory_usage = 0;
+
+ // The effective memory used by the sampled cord, inversely weighted by the
+ // effective indegree of each allocated node. This is a representation of the
+ // fair share of memory usage that should be attributed to the sampled cord.
+ // This value is more useful for cases where one or more nodes are referenced
+ // by multiple Cord instances, and for cases where a Cord includes the same
+ // node multiple times (either directly or indirectly).
+ // A value of 0 implies the property has not been recorded.
+ int64_t estimated_fair_share_memory_usage = 0;
+
+ // The total number of nodes referenced by this cord.
+ // For ring buffer Cords, this includes the 'ring buffer' node.
+ // For btree Cords, this includes all 'CordRepBtree' tree nodes as well as all
+ // the substring, flat and external nodes referenced by the tree.
+ // A value of 0 implies the property has not been recorded.
+ int64_t node_count = 0;
+
+ // Detailed node counts per type
+ NodeCounts node_counts;
+
+ // The cord method responsible for sampling the cord.
+ MethodIdentifier method = MethodIdentifier::kUnknown;
+
+ // The cord method responsible for sampling the parent cord if applicable.
+ MethodIdentifier parent_method = MethodIdentifier::kUnknown;
+
+ // Update tracker tracking invocation count per cord method.
+ CordzUpdateTracker update_tracker;
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_STATISTICS_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_scope.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_scope.h
index 66e0e8f51b..a8495c0503 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_scope.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_scope.h
@@ -1,71 +1,71 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
-#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
-
-#include "y_absl/base/config.h"
-#include "y_absl/base/optimization.h"
-#include "y_absl/base/thread_annotations.h"
-#include "y_absl/strings/internal/cord_internal.h"
-#include "y_absl/strings/internal/cordz_info.h"
-#include "y_absl/strings/internal/cordz_update_tracker.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// CordzUpdateScope scopes an update to the provided CordzInfo.
-// The class invokes `info->Lock(method)` and `info->Unlock()` to guard
-// cordrep updates. This class does nothing if `info` is null.
-// See also the 'Lock`, `Unlock` and `SetCordRep` methods in `CordzInfo`.
-class ABSL_SCOPED_LOCKABLE CordzUpdateScope {
- public:
- CordzUpdateScope(CordzInfo* info, CordzUpdateTracker::MethodIdentifier method)
- ABSL_EXCLUSIVE_LOCK_FUNCTION(info)
- : info_(info) {
- if (ABSL_PREDICT_FALSE(info_)) {
- info->Lock(method);
- }
- }
-
- // CordzUpdateScope can not be copied or assigned to.
- CordzUpdateScope(CordzUpdateScope&& rhs) = delete;
- CordzUpdateScope(const CordzUpdateScope&) = delete;
- CordzUpdateScope& operator=(CordzUpdateScope&& rhs) = delete;
- CordzUpdateScope& operator=(const CordzUpdateScope&) = delete;
-
- ~CordzUpdateScope() ABSL_UNLOCK_FUNCTION() {
- if (ABSL_PREDICT_FALSE(info_)) {
- info_->Unlock();
- }
- }
-
- void SetCordRep(CordRep* rep) const {
- if (ABSL_PREDICT_FALSE(info_)) {
- info_->SetCordRep(rep);
- }
- }
-
- CordzInfo* info() const { return info_; }
-
- private:
- CordzInfo* info_;
-};
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
+
+#include "y_absl/base/config.h"
+#include "y_absl/base/optimization.h"
+#include "y_absl/base/thread_annotations.h"
+#include "y_absl/strings/internal/cord_internal.h"
+#include "y_absl/strings/internal/cordz_info.h"
+#include "y_absl/strings/internal/cordz_update_tracker.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzUpdateScope scopes an update to the provided CordzInfo.
+// The class invokes `info->Lock(method)` and `info->Unlock()` to guard
+// cordrep updates. This class does nothing if `info` is null.
+// See also the 'Lock`, `Unlock` and `SetCordRep` methods in `CordzInfo`.
+class ABSL_SCOPED_LOCKABLE CordzUpdateScope {
+ public:
+ CordzUpdateScope(CordzInfo* info, CordzUpdateTracker::MethodIdentifier method)
+ ABSL_EXCLUSIVE_LOCK_FUNCTION(info)
+ : info_(info) {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info->Lock(method);
+ }
+ }
+
+ // CordzUpdateScope can not be copied or assigned to.
+ CordzUpdateScope(CordzUpdateScope&& rhs) = delete;
+ CordzUpdateScope(const CordzUpdateScope&) = delete;
+ CordzUpdateScope& operator=(CordzUpdateScope&& rhs) = delete;
+ CordzUpdateScope& operator=(const CordzUpdateScope&) = delete;
+
+ ~CordzUpdateScope() ABSL_UNLOCK_FUNCTION() {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info_->Unlock();
+ }
+ }
+
+ void SetCordRep(CordRep* rep) const {
+ if (ABSL_PREDICT_FALSE(info_)) {
+ info_->SetCordRep(rep);
+ }
+ }
+
+ CordzInfo* info() const { return info_; }
+
+ private:
+ CordzInfo* info_;
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_SCOPE_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_tracker.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_tracker.h
index 48a449b4bf..670ecffcec 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_tracker.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/cordz_update_tracker.h
@@ -1,121 +1,121 @@
-// Copyright 2021 The Abseil Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
-#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
-
-#include <atomic>
-#include <cstdint>
-
-#include "y_absl/base/config.h"
-
-namespace y_absl {
-ABSL_NAMESPACE_BEGIN
-namespace cord_internal {
-
-// CordzUpdateTracker tracks counters for Cord update methods.
-//
-// The purpose of CordzUpdateTracker is to track the number of calls to methods
-// updating Cord data for sampled cords. The class internally uses 'lossy'
-// atomic operations: Cord is thread-compatible, so there is no need to
-// synchronize updates. However, Cordz collection threads may call 'Value()' at
-// any point, so the class needs to provide thread safe access.
-//
-// This class is thread-safe. But as per above comments, all non-const methods
-// should be used single-threaded only: updates are thread-safe but lossy.
-class CordzUpdateTracker {
- public:
- // Tracked update methods.
- enum MethodIdentifier {
- kUnknown,
- kAppendBuffer,
- kAppendCord,
- kAppendExternalMemory,
- kAppendString,
- kAssignCord,
- kAssignString,
- kClear,
- kConstructorCord,
- kConstructorString,
- kCordReader,
- kFlatten,
- kGetAppendRegion,
- kMakeCordFromExternal,
- kMoveAppendCord,
- kMoveAssignCord,
- kMovePrependCord,
- kPrependBuffer,
- kPrependCord,
- kPrependString,
- kRemovePrefix,
- kRemoveSuffix,
- kSubCord,
-
- // kNumMethods defines the number of entries: must be the last entry.
- kNumMethods,
- };
-
- // Constructs a new instance. All counters are zero-initialized.
- constexpr CordzUpdateTracker() noexcept : values_{} {}
-
- // Copy constructs a new instance.
- CordzUpdateTracker(const CordzUpdateTracker& rhs) noexcept { *this = rhs; }
-
- // Assigns the provided value to this instance.
- CordzUpdateTracker& operator=(const CordzUpdateTracker& rhs) noexcept {
- for (int i = 0; i < kNumMethods; ++i) {
- values_[i].store(rhs.values_[i].load(std::memory_order_relaxed),
- std::memory_order_relaxed);
- }
- return *this;
- }
-
- // Returns the value for the specified method.
- int64_t Value(MethodIdentifier method) const {
- return values_[method].load(std::memory_order_relaxed);
- }
-
- // Increases the value for the specified method by `n`
- void LossyAdd(MethodIdentifier method, int64_t n = 1) {
- auto& value = values_[method];
- value.store(value.load(std::memory_order_relaxed) + n,
- std::memory_order_relaxed);
- }
-
- // Adds all the values from `src` to this instance
- void LossyAdd(const CordzUpdateTracker& src) {
- for (int i = 0; i < kNumMethods; ++i) {
- MethodIdentifier method = static_cast<MethodIdentifier>(i);
- if (int64_t value = src.Value(method)) {
- LossyAdd(method, value);
- }
- }
- }
-
- private:
- // Until C++20 std::atomic is not constexpr default-constructible, so we need
- // a wrapper for this class to be constexpr constructible.
- class Counter : public std::atomic<int64_t> {
- public:
- constexpr Counter() noexcept : std::atomic<int64_t>(0) {}
- };
-
- Counter values_[kNumMethods];
-};
-
-} // namespace cord_internal
-ABSL_NAMESPACE_END
-} // namespace y_absl
-
-#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+// Copyright 2021 The Abseil Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+#define ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
+
+#include <atomic>
+#include <cstdint>
+
+#include "y_absl/base/config.h"
+
+namespace y_absl {
+ABSL_NAMESPACE_BEGIN
+namespace cord_internal {
+
+// CordzUpdateTracker tracks counters for Cord update methods.
+//
+// The purpose of CordzUpdateTracker is to track the number of calls to methods
+// updating Cord data for sampled cords. The class internally uses 'lossy'
+// atomic operations: Cord is thread-compatible, so there is no need to
+// synchronize updates. However, Cordz collection threads may call 'Value()' at
+// any point, so the class needs to provide thread safe access.
+//
+// This class is thread-safe. But as per above comments, all non-const methods
+// should be used single-threaded only: updates are thread-safe but lossy.
+class CordzUpdateTracker {
+ public:
+ // Tracked update methods.
+ enum MethodIdentifier {
+ kUnknown,
+ kAppendBuffer,
+ kAppendCord,
+ kAppendExternalMemory,
+ kAppendString,
+ kAssignCord,
+ kAssignString,
+ kClear,
+ kConstructorCord,
+ kConstructorString,
+ kCordReader,
+ kFlatten,
+ kGetAppendRegion,
+ kMakeCordFromExternal,
+ kMoveAppendCord,
+ kMoveAssignCord,
+ kMovePrependCord,
+ kPrependBuffer,
+ kPrependCord,
+ kPrependString,
+ kRemovePrefix,
+ kRemoveSuffix,
+ kSubCord,
+
+ // kNumMethods defines the number of entries: must be the last entry.
+ kNumMethods,
+ };
+
+ // Constructs a new instance. All counters are zero-initialized.
+ constexpr CordzUpdateTracker() noexcept : values_{} {}
+
+ // Copy constructs a new instance.
+ CordzUpdateTracker(const CordzUpdateTracker& rhs) noexcept { *this = rhs; }
+
+ // Assigns the provided value to this instance.
+ CordzUpdateTracker& operator=(const CordzUpdateTracker& rhs) noexcept {
+ for (int i = 0; i < kNumMethods; ++i) {
+ values_[i].store(rhs.values_[i].load(std::memory_order_relaxed),
+ std::memory_order_relaxed);
+ }
+ return *this;
+ }
+
+ // Returns the value for the specified method.
+ int64_t Value(MethodIdentifier method) const {
+ return values_[method].load(std::memory_order_relaxed);
+ }
+
+ // Increases the value for the specified method by `n`
+ void LossyAdd(MethodIdentifier method, int64_t n = 1) {
+ auto& value = values_[method];
+ value.store(value.load(std::memory_order_relaxed) + n,
+ std::memory_order_relaxed);
+ }
+
+ // Adds all the values from `src` to this instance
+ void LossyAdd(const CordzUpdateTracker& src) {
+ for (int i = 0; i < kNumMethods; ++i) {
+ MethodIdentifier method = static_cast<MethodIdentifier>(i);
+ if (int64_t value = src.Value(method)) {
+ LossyAdd(method, value);
+ }
+ }
+ }
+
+ private:
+ // Until C++20 std::atomic is not constexpr default-constructible, so we need
+ // a wrapper for this class to be constexpr constructible.
+ class Counter : public std::atomic<int64_t> {
+ public:
+ constexpr Counter() noexcept : std::atomic<int64_t>(0) {}
+ };
+
+ Counter values_[kNumMethods];
+};
+
+} // namespace cord_internal
+ABSL_NAMESPACE_END
+} // namespace y_absl
+
+#endif // ABSL_STRINGS_INTERNAL_CORDZ_UPDATE_TRACKER_H_
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/resize_uninitialized.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/resize_uninitialized.h
index 14860bb237..70ad53c714 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/resize_uninitialized.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/resize_uninitialized.h
@@ -17,7 +17,7 @@
#ifndef ABSL_STRINGS_INTERNAL_RESIZE_UNINITIALIZED_H_
#define ABSL_STRINGS_INTERNAL_RESIZE_UNINITIALIZED_H_
-#include <algorithm>
+#include <algorithm>
#include <util/generic/string.h>
#include <type_traits>
#include <utility>
@@ -29,9 +29,9 @@ namespace y_absl {
ABSL_NAMESPACE_BEGIN
namespace strings_internal {
-// In this type trait, we look for a __resize_default_init member function, and
-// we use it if available, otherwise, we use resize. We provide HasMember to
-// indicate whether __resize_default_init is present.
+// In this type trait, we look for a __resize_default_init member function, and
+// we use it if available, otherwise, we use resize. We provide HasMember to
+// indicate whether __resize_default_init is present.
template <typename string_type, typename = void>
struct ResizeUninitializedTraits {
using HasMember = std::false_type;
@@ -68,50 +68,50 @@ inline void STLStringResizeUninitialized(string_type* s, size_t new_size) {
ResizeUninitializedTraits<string_type>::Resize(s, new_size);
}
-// Used to ensure exponential growth so that the amortized complexity of
-// increasing the string size by a small amount is O(1), in contrast to
-// O(str->size()) in the case of precise growth.
-template <typename string_type>
-void STLStringReserveAmortized(string_type* s, size_t new_size) {
- const size_t cap = s->capacity();
- if (new_size > cap) {
- // Make sure to always grow by at least a factor of 2x.
- s->reserve((std::max)(new_size, 2 * cap));
- }
-}
-
-// In this type trait, we look for an __append_default_init member function, and
-// we use it if available, otherwise, we use append.
-template <typename string_type, typename = void>
-struct AppendUninitializedTraits {
- static void Append(string_type* s, size_t n) {
- s->append(n, typename string_type::value_type());
- }
-};
-
-template <typename string_type>
-struct AppendUninitializedTraits<
- string_type, y_absl::void_t<decltype(std::declval<string_type&>()
- .__append_default_init(237))> > {
- static void Append(string_type* s, size_t n) {
- s->__append_default_init(n);
- }
-};
-
-// Like STLStringResizeUninitialized(str, new_size), except guaranteed to use
-// exponential growth so that the amortized complexity of increasing the string
-// size by a small amount is O(1), in contrast to O(str->size()) in the case of
-// precise growth.
-template <typename string_type>
-void STLStringResizeUninitializedAmortized(string_type* s, size_t new_size) {
- const size_t size = s->size();
- if (new_size > size) {
- AppendUninitializedTraits<string_type>::Append(s, new_size - size);
- } else {
- s->erase(new_size);
- }
-}
-
+// Used to ensure exponential growth so that the amortized complexity of
+// increasing the string size by a small amount is O(1), in contrast to
+// O(str->size()) in the case of precise growth.
+template <typename string_type>
+void STLStringReserveAmortized(string_type* s, size_t new_size) {
+ const size_t cap = s->capacity();
+ if (new_size > cap) {
+ // Make sure to always grow by at least a factor of 2x.
+ s->reserve((std::max)(new_size, 2 * cap));
+ }
+}
+
+// In this type trait, we look for an __append_default_init member function, and
+// we use it if available, otherwise, we use append.
+template <typename string_type, typename = void>
+struct AppendUninitializedTraits {
+ static void Append(string_type* s, size_t n) {
+ s->append(n, typename string_type::value_type());
+ }
+};
+
+template <typename string_type>
+struct AppendUninitializedTraits<
+ string_type, y_absl::void_t<decltype(std::declval<string_type&>()
+ .__append_default_init(237))> > {
+ static void Append(string_type* s, size_t n) {
+ s->__append_default_init(n);
+ }
+};
+
+// Like STLStringResizeUninitialized(str, new_size), except guaranteed to use
+// exponential growth so that the amortized complexity of increasing the string
+// size by a small amount is O(1), in contrast to O(str->size()) in the case of
+// precise growth.
+template <typename string_type>
+void STLStringResizeUninitializedAmortized(string_type* s, size_t new_size) {
+ const size_t size = s->size();
+ if (new_size > size) {
+ AppendUninitializedTraits<string_type>::Append(s, new_size - size);
+ } else {
+ s->erase(new_size);
+ }
+}
+
} // namespace strings_internal
ABSL_NAMESPACE_END
} // namespace y_absl
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/arg.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/arg.h
index 59b7bcc727..fef5fa1c1f 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/arg.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/arg.h
@@ -123,14 +123,14 @@ StringConvertResult FormatConvertImpl(const TString& v,
StringConvertResult FormatConvertImpl(string_view v,
FormatConversionSpecImpl conv,
FormatSinkImpl* sink);
-#if defined(ABSL_HAVE_STD_STRING_VIEW) && !defined(ABSL_USES_STD_STRING_VIEW)
-inline StringConvertResult FormatConvertImpl(std::string_view v,
- FormatConversionSpecImpl conv,
- FormatSinkImpl* sink) {
- return FormatConvertImpl(y_absl::string_view(v.data(), v.size()), conv, sink);
-}
-#endif // ABSL_HAVE_STD_STRING_VIEW && !ABSL_USES_STD_STRING_VIEW
-
+#if defined(ABSL_HAVE_STD_STRING_VIEW) && !defined(ABSL_USES_STD_STRING_VIEW)
+inline StringConvertResult FormatConvertImpl(std::string_view v,
+ FormatConversionSpecImpl conv,
+ FormatSinkImpl* sink) {
+ return FormatConvertImpl(y_absl::string_view(v.data(), v.size()), conv, sink);
+}
+#endif // ABSL_HAVE_STD_STRING_VIEW && !ABSL_USES_STD_STRING_VIEW
+
ArgConvertResult<FormatConversionCharSetUnion(
FormatConversionCharSetInternal::s, FormatConversionCharSetInternal::p)>
FormatConvertImpl(const char* v, const FormatConversionSpecImpl conv,
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.cc
index 211ce25dea..534a739440 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.cc
@@ -58,7 +58,7 @@ inline bool ArgContext::Bind(const UnboundConversion* unbound,
if (static_cast<size_t>(arg_position - 1) >= pack_.size()) return false;
arg = &pack_[arg_position - 1]; // 1-based
- if (unbound->flags != Flags::kBasic) {
+ if (unbound->flags != Flags::kBasic) {
int width = unbound->width.value();
bool force_left = false;
if (unbound->width.is_from_arg()) {
@@ -84,8 +84,8 @@ inline bool ArgContext::Bind(const UnboundConversion* unbound,
FormatConversionSpecImplFriend::SetPrecision(precision, bound);
if (force_left) {
- FormatConversionSpecImplFriend::SetFlags(unbound->flags | Flags::kLeft,
- bound);
+ FormatConversionSpecImplFriend::SetFlags(unbound->flags | Flags::kLeft,
+ bound);
} else {
FormatConversionSpecImplFriend::SetFlags(unbound->flags, bound);
}
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.h
index 3966610710..547cabcb90 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/bind.h
@@ -100,7 +100,7 @@ class FormatSpecTemplate
// We use the 'unavailable' attribute to give a better compiler error than
// just 'method is deleted'.
// To avoid checking the format twice, we just check that the format is
- // constexpr. If it is valid, then the overload below will kick in.
+ // constexpr. If it is valid, then the overload below will kick in.
// We add the template here to make this overload have lower priority.
template <typename = void>
FormatSpecTemplate(const char* s) // NOLINT
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.cc
index f2a4169ae7..dd72c071f6 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.cc
@@ -23,13 +23,13 @@ namespace y_absl {
ABSL_NAMESPACE_BEGIN
namespace str_format_internal {
-TString FlagsToString(Flags v) {
+TString FlagsToString(Flags v) {
TString s;
- s.append(FlagsContains(v, Flags::kLeft) ? "-" : "");
- s.append(FlagsContains(v, Flags::kShowPos) ? "+" : "");
- s.append(FlagsContains(v, Flags::kSignCol) ? " " : "");
- s.append(FlagsContains(v, Flags::kAlt) ? "#" : "");
- s.append(FlagsContains(v, Flags::kZero) ? "0" : "");
+ s.append(FlagsContains(v, Flags::kLeft) ? "-" : "");
+ s.append(FlagsContains(v, Flags::kShowPos) ? "+" : "");
+ s.append(FlagsContains(v, Flags::kSignCol) ? " " : "");
+ s.append(FlagsContains(v, Flags::kAlt) ? "#" : "");
+ s.append(FlagsContains(v, Flags::kZero) ? "0" : "");
return s;
}
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.h
index e5de5cb6a1..70a35342c4 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/extension.h
@@ -128,33 +128,33 @@ class FormatSinkImpl {
char buf_[1024];
};
-enum class Flags : uint8_t {
- kBasic = 0,
- kLeft = 1 << 0,
- kShowPos = 1 << 1,
- kSignCol = 1 << 2,
- kAlt = 1 << 3,
- kZero = 1 << 4,
- // This is not a real flag. It just exists to turn off kBasic when no other
- // flags are set. This is for when width/precision are specified.
- kNonBasic = 1 << 5,
+enum class Flags : uint8_t {
+ kBasic = 0,
+ kLeft = 1 << 0,
+ kShowPos = 1 << 1,
+ kSignCol = 1 << 2,
+ kAlt = 1 << 3,
+ kZero = 1 << 4,
+ // This is not a real flag. It just exists to turn off kBasic when no other
+ // flags are set. This is for when width/precision are specified.
+ kNonBasic = 1 << 5,
};
-constexpr Flags operator|(Flags a, Flags b) {
- return static_cast<Flags>(static_cast<uint8_t>(a) | static_cast<uint8_t>(b));
-}
-
-constexpr bool FlagsContains(Flags haystack, Flags needle) {
- return (static_cast<uint8_t>(haystack) & static_cast<uint8_t>(needle)) ==
- static_cast<uint8_t>(needle);
-}
-
-TString FlagsToString(Flags v);
-
-inline std::ostream& operator<<(std::ostream& os, Flags v) {
- return os << FlagsToString(v);
-}
-
+constexpr Flags operator|(Flags a, Flags b) {
+ return static_cast<Flags>(static_cast<uint8_t>(a) | static_cast<uint8_t>(b));
+}
+
+constexpr bool FlagsContains(Flags haystack, Flags needle) {
+ return (static_cast<uint8_t>(haystack) & static_cast<uint8_t>(needle)) ==
+ static_cast<uint8_t>(needle);
+}
+
+TString FlagsToString(Flags v);
+
+inline std::ostream& operator<<(std::ostream& os, Flags v) {
+ return os << FlagsToString(v);
+}
+
// clang-format off
#define ABSL_INTERNAL_CONVERSION_CHARS_EXPAND_(X_VAL, X_SEP) \
/* text */ \
@@ -271,16 +271,16 @@ struct FormatConversionSpecImplFriend;
class FormatConversionSpecImpl {
public:
// Width and precison are not specified, no flags are set.
- bool is_basic() const { return flags_ == Flags::kBasic; }
- bool has_left_flag() const { return FlagsContains(flags_, Flags::kLeft); }
- bool has_show_pos_flag() const {
- return FlagsContains(flags_, Flags::kShowPos);
- }
- bool has_sign_col_flag() const {
- return FlagsContains(flags_, Flags::kSignCol);
- }
- bool has_alt_flag() const { return FlagsContains(flags_, Flags::kAlt); }
- bool has_zero_flag() const { return FlagsContains(flags_, Flags::kZero); }
+ bool is_basic() const { return flags_ == Flags::kBasic; }
+ bool has_left_flag() const { return FlagsContains(flags_, Flags::kLeft); }
+ bool has_show_pos_flag() const {
+ return FlagsContains(flags_, Flags::kShowPos);
+ }
+ bool has_sign_col_flag() const {
+ return FlagsContains(flags_, Flags::kSignCol);
+ }
+ bool has_alt_flag() const { return FlagsContains(flags_, Flags::kAlt); }
+ bool has_zero_flag() const { return FlagsContains(flags_, Flags::kZero); }
FormatConversionChar conversion_char() const {
// Keep this field first in the struct . It generates better code when
@@ -324,7 +324,7 @@ struct FormatConversionSpecImplFriend final {
conv->precision_ = p;
}
static TString FlagsToString(const FormatConversionSpecImpl& spec) {
- return str_format_internal::FlagsToString(spec.flags_);
+ return str_format_internal::FlagsToString(spec.flags_);
}
};
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.cc b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.cc
index af07e32fe5..594b943151 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.cc
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.cc
@@ -34,67 +34,67 @@ namespace str_format_internal {
using CC = FormatConversionCharInternal;
using LM = LengthMod;
-// Abbreviations to fit in the table below.
-constexpr auto f_sign = Flags::kSignCol;
-constexpr auto f_alt = Flags::kAlt;
-constexpr auto f_pos = Flags::kShowPos;
-constexpr auto f_left = Flags::kLeft;
-constexpr auto f_zero = Flags::kZero;
-
+// Abbreviations to fit in the table below.
+constexpr auto f_sign = Flags::kSignCol;
+constexpr auto f_alt = Flags::kAlt;
+constexpr auto f_pos = Flags::kShowPos;
+constexpr auto f_left = Flags::kLeft;
+constexpr auto f_zero = Flags::kZero;
+
ABSL_CONST_INIT const ConvTag kTags[256] = {
- {}, {}, {}, {}, {}, {}, {}, {}, // 00-07
- {}, {}, {}, {}, {}, {}, {}, {}, // 08-0f
- {}, {}, {}, {}, {}, {}, {}, {}, // 10-17
- {}, {}, {}, {}, {}, {}, {}, {}, // 18-1f
- f_sign, {}, {}, f_alt, {}, {}, {}, {}, // !"#$%&'
- {}, {}, {}, f_pos, {}, f_left, {}, {}, // ()*+,-./
- f_zero, {}, {}, {}, {}, {}, {}, {}, // 01234567
- {}, {}, {}, {}, {}, {}, {}, {}, // 89:;<=>?
- {}, CC::A, {}, {}, {}, CC::E, CC::F, CC::G, // @ABCDEFG
- {}, {}, {}, {}, LM::L, {}, {}, {}, // HIJKLMNO
- {}, {}, {}, {}, {}, {}, {}, {}, // PQRSTUVW
- CC::X, {}, {}, {}, {}, {}, {}, {}, // XYZ[\]^_
- {}, CC::a, {}, CC::c, CC::d, CC::e, CC::f, CC::g, // `abcdefg
- LM::h, CC::i, LM::j, {}, LM::l, {}, CC::n, CC::o, // hijklmno
- CC::p, LM::q, {}, CC::s, LM::t, CC::u, {}, {}, // pqrstuvw
- CC::x, {}, LM::z, {}, {}, {}, {}, {}, // xyz{|}!
- {}, {}, {}, {}, {}, {}, {}, {}, // 80-87
- {}, {}, {}, {}, {}, {}, {}, {}, // 88-8f
- {}, {}, {}, {}, {}, {}, {}, {}, // 90-97
- {}, {}, {}, {}, {}, {}, {}, {}, // 98-9f
- {}, {}, {}, {}, {}, {}, {}, {}, // a0-a7
- {}, {}, {}, {}, {}, {}, {}, {}, // a8-af
- {}, {}, {}, {}, {}, {}, {}, {}, // b0-b7
- {}, {}, {}, {}, {}, {}, {}, {}, // b8-bf
- {}, {}, {}, {}, {}, {}, {}, {}, // c0-c7
- {}, {}, {}, {}, {}, {}, {}, {}, // c8-cf
- {}, {}, {}, {}, {}, {}, {}, {}, // d0-d7
- {}, {}, {}, {}, {}, {}, {}, {}, // d8-df
- {}, {}, {}, {}, {}, {}, {}, {}, // e0-e7
- {}, {}, {}, {}, {}, {}, {}, {}, // e8-ef
- {}, {}, {}, {}, {}, {}, {}, {}, // f0-f7
- {}, {}, {}, {}, {}, {}, {}, {}, // f8-ff
+ {}, {}, {}, {}, {}, {}, {}, {}, // 00-07
+ {}, {}, {}, {}, {}, {}, {}, {}, // 08-0f
+ {}, {}, {}, {}, {}, {}, {}, {}, // 10-17
+ {}, {}, {}, {}, {}, {}, {}, {}, // 18-1f
+ f_sign, {}, {}, f_alt, {}, {}, {}, {}, // !"#$%&'
+ {}, {}, {}, f_pos, {}, f_left, {}, {}, // ()*+,-./
+ f_zero, {}, {}, {}, {}, {}, {}, {}, // 01234567
+ {}, {}, {}, {}, {}, {}, {}, {}, // 89:;<=>?
+ {}, CC::A, {}, {}, {}, CC::E, CC::F, CC::G, // @ABCDEFG
+ {}, {}, {}, {}, LM::L, {}, {}, {}, // HIJKLMNO
+ {}, {}, {}, {}, {}, {}, {}, {}, // PQRSTUVW
+ CC::X, {}, {}, {}, {}, {}, {}, {}, // XYZ[\]^_
+ {}, CC::a, {}, CC::c, CC::d, CC::e, CC::f, CC::g, // `abcdefg
+ LM::h, CC::i, LM::j, {}, LM::l, {}, CC::n, CC::o, // hijklmno
+ CC::p, LM::q, {}, CC::s, LM::t, CC::u, {}, {}, // pqrstuvw
+ CC::x, {}, LM::z, {}, {}, {}, {}, {}, // xyz{|}!
+ {}, {}, {}, {}, {}, {}, {}, {}, // 80-87
+ {}, {}, {}, {}, {}, {}, {}, {}, // 88-8f
+ {}, {}, {}, {}, {}, {}, {}, {}, // 90-97
+ {}, {}, {}, {}, {}, {}, {}, {}, // 98-9f
+ {}, {}, {}, {}, {}, {}, {}, {}, // a0-a7
+ {}, {}, {}, {}, {}, {}, {}, {}, // a8-af
+ {}, {}, {}, {}, {}, {}, {}, {}, // b0-b7
+ {}, {}, {}, {}, {}, {}, {}, {}, // b8-bf
+ {}, {}, {}, {}, {}, {}, {}, {}, // c0-c7
+ {}, {}, {}, {}, {}, {}, {}, {}, // c8-cf
+ {}, {}, {}, {}, {}, {}, {}, {}, // d0-d7
+ {}, {}, {}, {}, {}, {}, {}, {}, // d8-df
+ {}, {}, {}, {}, {}, {}, {}, {}, // e0-e7
+ {}, {}, {}, {}, {}, {}, {}, {}, // e8-ef
+ {}, {}, {}, {}, {}, {}, {}, {}, // f0-f7
+ {}, {}, {}, {}, {}, {}, {}, {}, // f8-ff
};
namespace {
bool CheckFastPathSetting(const UnboundConversion& conv) {
- bool width_precision_needed =
- conv.width.value() >= 0 || conv.precision.value() >= 0;
- if (width_precision_needed && conv.flags == Flags::kBasic) {
+ bool width_precision_needed =
+ conv.width.value() >= 0 || conv.precision.value() >= 0;
+ if (width_precision_needed && conv.flags == Flags::kBasic) {
fprintf(stderr,
"basic=%d left=%d show_pos=%d sign_col=%d alt=%d zero=%d "
"width=%d precision=%d\n",
- conv.flags == Flags::kBasic ? 1 : 0,
- FlagsContains(conv.flags, Flags::kLeft) ? 1 : 0,
- FlagsContains(conv.flags, Flags::kShowPos) ? 1 : 0,
- FlagsContains(conv.flags, Flags::kSignCol) ? 1 : 0,
- FlagsContains(conv.flags, Flags::kAlt) ? 1 : 0,
- FlagsContains(conv.flags, Flags::kZero) ? 1 : 0, conv.width.value(),
- conv.precision.value());
- return false;
+ conv.flags == Flags::kBasic ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kLeft) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kShowPos) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kSignCol) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kAlt) ? 1 : 0,
+ FlagsContains(conv.flags, Flags::kZero) ? 1 : 0, conv.width.value(),
+ conv.precision.value());
+ return false;
}
- return true;
+ return true;
}
template <bool is_positional>
@@ -138,19 +138,19 @@ const char *ConsumeConversion(const char *pos, const char *const end,
ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
// We should start with the basic flag on.
- assert(conv->flags == Flags::kBasic);
+ assert(conv->flags == Flags::kBasic);
// Any non alpha character makes this conversion not basic.
// This includes flags (-+ #0), width (1-9, *) or precision (.).
// All conversion characters and length modifiers are alpha characters.
if (c < 'A') {
- while (c <= '0') {
- auto tag = GetTagForChar(c);
- if (tag.is_flags()) {
- conv->flags = conv->flags | tag.as_flags();
- ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
- } else {
- break;
+ while (c <= '0') {
+ auto tag = GetTagForChar(c);
+ if (tag.is_flags()) {
+ conv->flags = conv->flags | tag.as_flags();
+ ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
+ } else {
+ break;
}
}
@@ -163,10 +163,10 @@ const char *ConsumeConversion(const char *pos, const char *const end,
*next_arg = -1;
return ConsumeConversion<true>(original_pos, end, conv, next_arg);
}
- conv->flags = conv->flags | Flags::kNonBasic;
+ conv->flags = conv->flags | Flags::kNonBasic;
conv->width.set_value(maybe_width);
} else if (c == '*') {
- conv->flags = conv->flags | Flags::kNonBasic;
+ conv->flags = conv->flags | Flags::kNonBasic;
ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
if (is_positional) {
if (ABSL_PREDICT_FALSE(c < '1' || c > '9')) return nullptr;
@@ -180,7 +180,7 @@ const char *ConsumeConversion(const char *pos, const char *const end,
}
if (c == '.') {
- conv->flags = conv->flags | Flags::kNonBasic;
+ conv->flags = conv->flags | Flags::kNonBasic;
ABSL_FORMAT_PARSER_INTERNAL_GET_CHAR();
if (std::isdigit(c)) {
conv->precision.set_value(parse_digits());
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.h
index ba614bb8b4..c8d38e5278 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/parser.h
@@ -41,7 +41,7 @@ TString LengthModToString(LengthMod v);
// The analyzed properties of a single specified conversion.
struct UnboundConversion {
- UnboundConversion() {}
+ UnboundConversion() {}
class InputValue {
public:
@@ -76,7 +76,7 @@ struct UnboundConversion {
InputValue width;
InputValue precision;
- Flags flags = Flags::kBasic;
+ Flags flags = Flags::kBasic;
LengthMod length_mod = LengthMod::none;
FormatConversionChar conv = FormatConversionCharInternal::kNone;
};
@@ -90,43 +90,43 @@ const char* ConsumeUnboundConversion(const char* p, const char* end,
UnboundConversion* conv, int* next_arg);
// Helper tag class for the table below.
-// It allows fast `char -> ConversionChar/LengthMod/Flags` checking and
+// It allows fast `char -> ConversionChar/LengthMod/Flags` checking and
// conversions.
class ConvTag {
public:
constexpr ConvTag(FormatConversionChar conversion_char) // NOLINT
- : tag_(static_cast<uint8_t>(conversion_char)) {}
+ : tag_(static_cast<uint8_t>(conversion_char)) {}
constexpr ConvTag(LengthMod length_mod) // NOLINT
- : tag_(0x80 | static_cast<uint8_t>(length_mod)) {}
- constexpr ConvTag(Flags flags) // NOLINT
- : tag_(0xc0 | static_cast<uint8_t>(flags)) {}
- constexpr ConvTag() : tag_(0xFF) {}
-
- bool is_conv() const { return (tag_ & 0x80) == 0; }
- bool is_length() const { return (tag_ & 0xC0) == 0x80; }
- bool is_flags() const { return (tag_ & 0xE0) == 0xC0; }
-
+ : tag_(0x80 | static_cast<uint8_t>(length_mod)) {}
+ constexpr ConvTag(Flags flags) // NOLINT
+ : tag_(0xc0 | static_cast<uint8_t>(flags)) {}
+ constexpr ConvTag() : tag_(0xFF) {}
+
+ bool is_conv() const { return (tag_ & 0x80) == 0; }
+ bool is_length() const { return (tag_ & 0xC0) == 0x80; }
+ bool is_flags() const { return (tag_ & 0xE0) == 0xC0; }
+
FormatConversionChar as_conv() const {
assert(is_conv());
- assert(!is_length());
- assert(!is_flags());
+ assert(!is_length());
+ assert(!is_flags());
return static_cast<FormatConversionChar>(tag_);
}
LengthMod as_length() const {
- assert(!is_conv());
+ assert(!is_conv());
assert(is_length());
- assert(!is_flags());
- return static_cast<LengthMod>(tag_ & 0x3F);
- }
- Flags as_flags() const {
- assert(!is_conv());
- assert(!is_length());
- assert(is_flags());
- return static_cast<Flags>(tag_ & 0x1F);
+ assert(!is_flags());
+ return static_cast<LengthMod>(tag_ & 0x3F);
}
+ Flags as_flags() const {
+ assert(!is_conv());
+ assert(!is_length());
+ assert(is_flags());
+ return static_cast<Flags>(tag_ & 0x1F);
+ }
private:
- uint8_t tag_;
+ uint8_t tag_;
};
extern const ConvTag kTags[256];
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/ya.make b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/ya.make
index ff8069cd0f..ecc59f2d1c 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/ya.make
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_format/ya.make
@@ -19,7 +19,7 @@ PEERDIR(
contrib/restricted/abseil-cpp-tstring/y_absl/base/log_severity
contrib/restricted/abseil-cpp-tstring/y_absl/numeric
contrib/restricted/abseil-cpp-tstring/y_absl/strings
- contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/absl_strings_internal
)
ADDINCL(
diff --git a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_split_internal.h b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_split_internal.h
index 237864c0ed..31699a12bb 100644
--- a/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_split_internal.h
+++ b/contrib/restricted/abseil-cpp-tstring/y_absl/strings/internal/str_split_internal.h
@@ -32,7 +32,7 @@
#include <array>
#include <initializer_list>
#include <iterator>
-#include <tuple>
+#include <tuple>
#include <type_traits>
#include <utility>
#include <vector>
@@ -64,7 +64,7 @@ class ConvertibleToStringView {
ConvertibleToStringView(const TString& s) // NOLINT(runtime/explicit)
: value_(s) {}
- // Disable conversion from rvalue strings.
+ // Disable conversion from rvalue strings.
ConvertibleToStringView(TString&& s) = delete;
ConvertibleToStringView(const TString&& s) = delete;
@@ -182,13 +182,13 @@ template <typename T>
struct HasConstIterator<T, y_absl::void_t<typename T::const_iterator>>
: std::true_type {};
-// HasEmplace<T>::value is true iff there exists a method T::emplace().
-template <typename T, typename = void>
-struct HasEmplace : std::false_type {};
-template <typename T>
-struct HasEmplace<T, y_absl::void_t<decltype(std::declval<T>().emplace())>>
- : std::true_type {};
-
+// HasEmplace<T>::value is true iff there exists a method T::emplace().
+template <typename T, typename = void>
+struct HasEmplace : std::false_type {};
+template <typename T>
+struct HasEmplace<T, y_absl::void_t<decltype(std::declval<T>().emplace())>>
+ : std::true_type {};
+
// IsInitializerList<T>::value is true iff T is an std::initializer_list. More
// details below in Splitter<> where this is used.
std::false_type IsInitializerListDispatch(...); // default: No
@@ -379,43 +379,43 @@ class Splitter {
// value.
template <typename Container, typename First, typename Second>
struct ConvertToContainer<Container, std::pair<const First, Second>, true> {
- using iterator = typename Container::iterator;
-
+ using iterator = typename Container::iterator;
+
Container operator()(const Splitter& splitter) const {
Container m;
- iterator it;
+ iterator it;
bool insert = true;
- for (const y_absl::string_view sv : splitter) {
+ for (const y_absl::string_view sv : splitter) {
if (insert) {
- it = InsertOrEmplace(&m, sv);
+ it = InsertOrEmplace(&m, sv);
} else {
- it->second = Second(sv);
+ it->second = Second(sv);
}
insert = !insert;
}
return m;
}
- // Inserts the key and an empty value into the map, returning an iterator to
- // the inserted item. We use emplace() if available, otherwise insert().
- template <typename M>
- static y_absl::enable_if_t<HasEmplace<M>::value, iterator> InsertOrEmplace(
- M* m, y_absl::string_view key) {
- // Use piecewise_construct to support old versions of gcc in which pair
- // constructor can't otherwise construct string from string_view.
- return ToIter(m->emplace(std::piecewise_construct, std::make_tuple(key),
- std::tuple<>()));
- }
- template <typename M>
- static y_absl::enable_if_t<!HasEmplace<M>::value, iterator> InsertOrEmplace(
- M* m, y_absl::string_view key) {
- return ToIter(m->insert(std::make_pair(First(key), Second(""))));
- }
-
- static iterator ToIter(std::pair<iterator, bool> pair) {
- return pair.first;
- }
- static iterator ToIter(iterator iter) { return iter; }
+ // Inserts the key and an empty value into the map, returning an iterator to
+ // the inserted item. We use emplace() if available, otherwise insert().
+ template <typename M>
+ static y_absl::enable_if_t<HasEmplace<M>::value, iterator> InsertOrEmplace(
+ M* m, y_absl::string_view key) {
+ // Use piecewise_construct to support old versions of gcc in which pair
+ // constructor can't otherwise construct string from string_view.
+ return ToIter(m->emplace(std::piecewise_construct, std::make_tuple(key),
+ std::tuple<>()));
+ }
+ template <typename M>
+ static y_absl::enable_if_t<!HasEmplace<M>::value, iterator> InsertOrEmplace(
+ M* m, y_absl::string_view key) {
+ return ToIter(m->insert(std::make_pair(First(key), Second(""))));
+ }
+
+ static iterator ToIter(std::pair<iterator, bool> pair) {
+ return pair.first;
+ }
+ static iterator ToIter(iterator iter) { return iter; }
};
StringType text_;