aboutsummaryrefslogtreecommitdiffstats
path: root/library/cpp/yt/memory
diff options
context:
space:
mode:
authorDevtools Arcadia <arcadia-devtools@yandex-team.ru>2022-02-07 18:08:42 +0300
committerDevtools Arcadia <arcadia-devtools@mous.vla.yp-c.yandex.net>2022-02-07 18:08:42 +0300
commit1110808a9d39d4b808aef724c861a2e1a38d2a69 (patch)
treee26c9fed0de5d9873cce7e00bc214573dc2195b7 /library/cpp/yt/memory
downloadydb-1110808a9d39d4b808aef724c861a2e1a38d2a69.tar.gz
intermediate changes
ref:cde9a383711a11544ce7e107a78147fb96cc4029
Diffstat (limited to 'library/cpp/yt/memory')
-rw-r--r--library/cpp/yt/memory/blob.cpp224
-rw-r--r--library/cpp/yt/memory/blob.h221
-rw-r--r--library/cpp/yt/memory/intrusive_ptr.h360
-rw-r--r--library/cpp/yt/memory/leaky_ref_counted_singleton-inl.h43
-rw-r--r--library/cpp/yt/memory/leaky_ref_counted_singleton.h18
-rw-r--r--library/cpp/yt/memory/leaky_singleton-inl.h34
-rw-r--r--library/cpp/yt/memory/leaky_singleton.h34
-rw-r--r--library/cpp/yt/memory/new-inl.h310
-rw-r--r--library/cpp/yt/memory/new.h127
-rw-r--r--library/cpp/yt/memory/range.h556
-rw-r--r--library/cpp/yt/memory/ref-inl.h517
-rw-r--r--library/cpp/yt/memory/ref.cpp378
-rw-r--r--library/cpp/yt/memory/ref.h384
-rw-r--r--library/cpp/yt/memory/ref_counted-inl.h278
-rw-r--r--library/cpp/yt/memory/ref_counted.h190
-rw-r--r--library/cpp/yt/memory/ref_tracked-inl.h49
-rw-r--r--library/cpp/yt/memory/ref_tracked.cpp38
-rw-r--r--library/cpp/yt/memory/ref_tracked.h111
-rw-r--r--library/cpp/yt/memory/shared_range.h297
-rw-r--r--library/cpp/yt/memory/unittests/intrusive_ptr_ut.cpp562
-rw-r--r--library/cpp/yt/memory/unittests/weak_ptr_ut.cpp433
-rw-r--r--library/cpp/yt/memory/unittests/ya.make19
-rw-r--r--library/cpp/yt/memory/weak_ptr.h314
-rw-r--r--library/cpp/yt/memory/ya.make31
24 files changed, 5528 insertions, 0 deletions
diff --git a/library/cpp/yt/memory/blob.cpp b/library/cpp/yt/memory/blob.cpp
new file mode 100644
index 0000000000..86000b033b
--- /dev/null
+++ b/library/cpp/yt/memory/blob.cpp
@@ -0,0 +1,224 @@
+#include "blob.h"
+#include "ref.h"
+
+#include <library/cpp/ytalloc/api/ytalloc.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+static constexpr size_t InitialBlobCapacity = 16;
+static constexpr double BlobCapacityMultiplier = 1.5;
+
+TBlob::TBlob(
+ TRefCountedTypeCookie tagCookie,
+ size_t size,
+ bool initiailizeStorage,
+ bool pageAligned)
+ : PageAligned_(pageAligned)
+{
+ SetTagCookie(tagCookie);
+ if (size == 0) {
+ Reset();
+ } else {
+ Allocate(std::max(size, InitialBlobCapacity));
+ Size_ = size;
+ if (initiailizeStorage) {
+ ::memset(Begin_, 0, Size_);
+ }
+ }
+}
+
+TBlob::TBlob(
+ TRefCountedTypeCookie tagCookie,
+ TRef data,
+ bool pageAligned)
+ : PageAligned_(pageAligned)
+{
+ SetTagCookie(tagCookie);
+ Reset();
+ Append(data);
+}
+
+TBlob::TBlob(const TBlob& other)
+ : PageAligned_(other.PageAligned_)
+{
+ SetTagCookie(other);
+ if (other.Size_ == 0) {
+ Reset();
+ } else {
+ Allocate(std::max(InitialBlobCapacity, other.Size_));
+ ::memcpy(Begin_, other.Begin_, other.Size_);
+ Size_ = other.Size_;
+ }
+}
+
+TBlob::TBlob(TBlob&& other) noexcept
+ : Begin_(other.Begin_)
+ , Size_(other.Size_)
+ , Capacity_(other.Capacity_)
+ , PageAligned_(other.PageAligned_)
+{
+ SetTagCookie(other);
+ other.Reset();
+}
+
+TBlob::~TBlob()
+{
+ Free();
+}
+
+void TBlob::Reserve(size_t newCapacity)
+{
+ if (newCapacity > Capacity_) {
+ Reallocate(newCapacity);
+ }
+}
+
+void TBlob::Resize(size_t newSize, bool initializeStorage /*= true*/)
+{
+ if (newSize > Size_) {
+ if (newSize > Capacity_) {
+ size_t newCapacity;
+ if (Capacity_ == 0) {
+ newCapacity = std::max(InitialBlobCapacity, newSize);
+ } else {
+ newCapacity = std::max(static_cast<size_t>(Capacity_ * BlobCapacityMultiplier), newSize);
+ }
+ Reallocate(newCapacity);
+ }
+ if (initializeStorage) {
+ ::memset(Begin_ + Size_, 0, newSize - Size_);
+ }
+ }
+ Size_ = newSize;
+}
+
+TBlob& TBlob::operator = (const TBlob& rhs)
+{
+ if (this != &rhs) {
+ this->~TBlob();
+ new(this) TBlob(rhs);
+ }
+ return *this;
+}
+
+TBlob& TBlob::operator = (TBlob&& rhs) noexcept
+{
+ if (this != &rhs) {
+ this->~TBlob();
+ new(this) TBlob(std::move(rhs));
+ }
+ return *this;
+}
+
+void TBlob::Append(const void* data, size_t size)
+{
+ if (Size_ + size > Capacity_) {
+ Resize(Size_ + size, false);
+ ::memcpy(Begin_ + Size_ - size, data, size);
+ } else {
+ ::memcpy(Begin_ + Size_, data, size);
+ Size_ += size;
+ }
+}
+
+void TBlob::Append(TRef ref)
+{
+ Append(ref.Begin(), ref.Size());
+}
+
+void TBlob::Append(char ch)
+{
+ if (Size_ + 1 > Capacity_) {
+ Resize(Size_ + 1, false);
+ Begin_[Size_ - 1] = ch;
+ } else {
+ Begin_[Size_++] = ch;
+ }
+}
+
+void TBlob::Reset()
+{
+ Begin_ = nullptr;
+ Size_ = Capacity_ = 0;
+}
+
+char* TBlob::DoAllocate(size_t size)
+{
+ return static_cast<char*>(PageAligned_
+ ? NYTAlloc::AllocatePageAligned(size)
+ : NYTAlloc::Allocate(size));
+}
+
+void TBlob::Allocate(size_t newCapacity)
+{
+ YT_VERIFY(!Begin_);
+ Begin_ = DoAllocate(newCapacity);
+ Capacity_ = newCapacity;
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::AllocateTagInstance(TagCookie_);
+ TRefCountedTrackerFacade::AllocateSpace(TagCookie_, newCapacity);
+#endif
+}
+
+void TBlob::Reallocate(size_t newCapacity)
+{
+ if (!Begin_) {
+ Allocate(newCapacity);
+ return;
+ }
+ char* newBegin = DoAllocate(newCapacity);
+ ::memcpy(newBegin, Begin_, Size_);
+ NYTAlloc::FreeNonNull(Begin_);
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::AllocateSpace(TagCookie_, newCapacity);
+ TRefCountedTrackerFacade::FreeSpace(TagCookie_, Capacity_);
+#endif
+ Begin_ = newBegin;
+ Capacity_ = newCapacity;
+}
+
+void TBlob::Free()
+{
+ if (!Begin_) {
+ return;
+ }
+ NYTAlloc::FreeNonNull(Begin_);
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::FreeTagInstance(TagCookie_);
+ TRefCountedTrackerFacade::FreeSpace(TagCookie_, Capacity_);
+#endif
+ Reset();
+}
+
+void TBlob::SetTagCookie(TRefCountedTypeCookie tagCookie)
+{
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TagCookie_ = tagCookie;
+#endif
+}
+
+void TBlob::SetTagCookie(const TBlob& other)
+{
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TagCookie_ = other.TagCookie_;
+#endif
+}
+
+void swap(TBlob& left, TBlob& right)
+{
+ if (&left != &right) {
+ std::swap(left.Begin_, right.Begin_);
+ std::swap(left.Size_, right.Size_);
+ std::swap(left.Capacity_, right.Capacity_);
+ std::swap(left.PageAligned_, right.PageAligned_);
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ std::swap(left.TagCookie_, right.TagCookie_);
+#endif
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/blob.h b/library/cpp/yt/memory/blob.h
new file mode 100644
index 0000000000..99441fb8c9
--- /dev/null
+++ b/library/cpp/yt/memory/blob.h
@@ -0,0 +1,221 @@
+#pragma once
+
+#include "ref.h"
+#include "ref_counted.h"
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Default memory tag for TBlob.
+struct TDefaultBlobTag
+{ };
+
+//! A home-grown optimized replacement for |std::vector<char>| suitable for carrying
+//! large chunks of data.
+/*!
+ * Compared to |std::vector<char>|, this class supports uninitialized allocations
+ * when explicitly requested to.
+ */
+class TBlob
+{
+public:
+ //! Constructs a blob with a given size.
+ TBlob(
+ TRefCountedTypeCookie tagCookie,
+ size_t size,
+ bool initiailizeStorage = true,
+ bool pageAligned = false);
+
+ //! Copies a chunk of memory into a new instance.
+ TBlob(
+ TRefCountedTypeCookie tagCookie,
+ TRef data,
+ bool pageAligned = false);
+
+ //! Constructs an empty blob.
+ template <class TTag = TDefaultBlobTag>
+ explicit TBlob(TTag tag = {})
+ : TBlob(tag, 0, true, false)
+ { }
+
+ //! Constructs a blob with a given size.
+ template <class TTag>
+ explicit TBlob(
+ TTag,
+ size_t size,
+ bool initiailizeStorage = true,
+ bool pageAligned = false)
+ : TBlob(
+ GetRefCountedTypeCookie<TTag>(),
+ size,
+ initiailizeStorage,
+ pageAligned)
+ { }
+
+ //! Copies a chunk of memory into a new instance.
+ template <class TTag>
+ TBlob(
+ TTag,
+ TRef data,
+ bool pageAligned = false)
+ : TBlob(
+ GetRefCountedTypeCookie<TTag>(),
+ data,
+ pageAligned)
+ { }
+
+ //! Remind user about the tag argument.
+ TBlob(i32 size, bool initiailizeStorage = true) = delete;
+ TBlob(i64 size, bool initiailizeStorage = true) = delete;
+ TBlob(ui32 size, bool initiailizeStorage = true) = delete;
+ TBlob(ui64 size, bool initiailizeStorage = true) = delete;
+ template <typename T, typename U>
+ TBlob(const T*, U) = delete;
+
+ //! Copies the data.
+ TBlob(const TBlob& other);
+
+ //! Moves the data (takes the ownership).
+ TBlob(TBlob&& other) noexcept;
+
+ //! Reclaims the memory.
+ ~TBlob();
+
+ //! Ensures that capacity is at least #capacity.
+ void Reserve(size_t newCapacity);
+
+ //! Changes the size to #newSize.
+ /*!
+ * If #size exceeds the current capacity,
+ * we make sure the new capacity grows exponentially.
+ * Hence calling #Resize N times to increase the size by N only
+ * takes amortized O(1) time per call.
+ */
+ void Resize(size_t newSize, bool initializeStorage = true);
+
+ //! Returns the start pointer.
+ Y_FORCE_INLINE const char* Begin() const
+ {
+ return Begin_;
+ }
+
+ //! Returns the start pointer.
+ Y_FORCE_INLINE char* Begin()
+ {
+ return Begin_;
+ }
+
+ //! Returns the end pointer.
+ Y_FORCE_INLINE const char* End() const
+ {
+ return Begin_ + Size_;
+ }
+
+ //! Returns the end pointer.
+ Y_FORCE_INLINE char* End()
+ {
+ return Begin_ + Size_;
+ }
+
+ //! Returns the size.
+ Y_FORCE_INLINE size_t size() const
+ {
+ return Size_;
+ }
+
+ //! Returns the size.
+ Y_FORCE_INLINE size_t Size() const
+ {
+ return Size_;
+ }
+
+ //! Returns the capacity.
+ Y_FORCE_INLINE size_t Capacity() const
+ {
+ return Capacity_;
+ }
+
+ //! Returns the TStringBuf instance for the occupied part of the blob.
+ Y_FORCE_INLINE TStringBuf ToStringBuf() const
+ {
+ return TStringBuf(Begin_, Size_);
+ }
+
+ //! Returns the TRef instance for the occupied part of the blob.
+ Y_FORCE_INLINE TRef ToRef() const
+ {
+ return TRef(Begin_, Size_);
+ }
+
+ //! Provides by-value access to the underlying storage.
+ Y_FORCE_INLINE char operator [] (size_t index) const
+ {
+ return Begin_[index];
+ }
+
+ //! Provides by-ref access to the underlying storage.
+ Y_FORCE_INLINE char& operator [] (size_t index)
+ {
+ return Begin_[index];
+ }
+
+ //! Clears the instance but does not reclaim the memory.
+ Y_FORCE_INLINE void Clear()
+ {
+ Size_ = 0;
+ }
+
+ //! Returns |true| if size is zero.
+ Y_FORCE_INLINE bool IsEmpty() const
+ {
+ return Size_ == 0;
+ }
+
+ //! Overwrites the current instance.
+ TBlob& operator = (const TBlob& rhs);
+
+ //! Takes the ownership.
+ TBlob& operator = (TBlob&& rhs) noexcept;
+
+ //! Appends a chunk of memory to the end.
+ void Append(const void* data, size_t size);
+
+ //! Appends a chunk of memory to the end.
+ void Append(TRef ref);
+
+ //! Appends a single char to the end.
+ void Append(char ch);
+
+ //! Swaps the current and other instances
+ void Swap(TBlob& other);
+
+ friend void swap(TBlob& left, TBlob& right);
+
+private:
+ char* Begin_ = nullptr;
+ size_t Size_ = 0;
+ size_t Capacity_ = 0;
+ bool PageAligned_ = false;
+
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTypeCookie TagCookie_ = NullRefCountedTypeCookie;
+#endif
+
+ char* DoAllocate(size_t newCapacity);
+ void Allocate(size_t newCapacity);
+ void Reallocate(size_t newCapacity);
+ void Free();
+
+ void Reset();
+
+ void SetTagCookie(TRefCountedTypeCookie tagCookie);
+ void SetTagCookie(const TBlob& other);
+};
+
+void swap(TBlob& left, TBlob& right);
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
diff --git a/library/cpp/yt/memory/intrusive_ptr.h b/library/cpp/yt/memory/intrusive_ptr.h
new file mode 100644
index 0000000000..3dead7db1d
--- /dev/null
+++ b/library/cpp/yt/memory/intrusive_ptr.h
@@ -0,0 +1,360 @@
+#pragma once
+
+#include "ref_counted.h"
+
+#include <util/generic/hash.h>
+#include <util/generic/utility.h>
+
+#include <utility>
+#include <type_traits>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+class TIntrusivePtr
+{
+public:
+ typedef T TUnderlying;
+
+ constexpr TIntrusivePtr() noexcept
+ { }
+
+ constexpr TIntrusivePtr(std::nullptr_t) noexcept
+ { }
+
+ //! Constructor from an unqualified reference.
+ /*!
+ * Note that this constructor could be racy due to unsynchronized operations
+ * on the object and on the counter.
+ *
+ * Note that it notoriously hard to make this constructor explicit
+ * given the current amount of code written.
+ */
+ TIntrusivePtr(T* obj, bool addReference = true) noexcept
+ : T_(obj)
+ {
+ if (T_ && addReference) {
+ Ref(T_);
+ }
+ }
+
+ //! Copy constructor.
+ TIntrusivePtr(const TIntrusivePtr& other) noexcept
+ : T_(other.Get())
+ {
+ if (T_) {
+ Ref(T_);
+ }
+ }
+
+ //! Copy constructor with an upcast.
+ template <class U, class = typename std::enable_if_t<std::is_convertible_v<U*, T*>>>
+ TIntrusivePtr(const TIntrusivePtr<U>& other) noexcept
+ : T_(other.Get())
+ {
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ if (T_) {
+ Ref(T_);
+ }
+ }
+
+ //! Move constructor.
+ TIntrusivePtr(TIntrusivePtr&& other) noexcept
+ : T_(other.Get())
+ {
+ other.T_ = nullptr;
+ }
+
+ //! Move constructor with an upcast.
+ template <class U, class = typename std::enable_if_t<std::is_convertible_v<U*, T*>>>
+ TIntrusivePtr(TIntrusivePtr<U>&& other) noexcept
+ : T_(other.Get())
+ {
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ other.T_ = nullptr;
+ }
+
+ //! Destructor.
+ ~TIntrusivePtr()
+ {
+ if (T_) {
+ Unref(T_);
+ }
+ }
+
+ //! Copy assignment operator.
+ TIntrusivePtr& operator=(const TIntrusivePtr& other) noexcept
+ {
+ TIntrusivePtr(other).Swap(*this);
+ return *this;
+ }
+
+ //! Copy assignment operator with an upcast.
+ template <class U>
+ TIntrusivePtr& operator=(const TIntrusivePtr<U>& other) noexcept
+ {
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ TIntrusivePtr(other).Swap(*this);
+ return *this;
+ }
+
+ //! Move assignment operator.
+ TIntrusivePtr& operator=(TIntrusivePtr&& other) noexcept
+ {
+ TIntrusivePtr(std::move(other)).Swap(*this);
+ return *this;
+ }
+
+ //! Move assignment operator with an upcast.
+ template <class U>
+ TIntrusivePtr& operator=(TIntrusivePtr<U>&& other) noexcept
+ {
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ TIntrusivePtr(std::move(other)).Swap(*this);
+ return *this;
+ }
+
+ //! Drop the pointer.
+ void Reset() // noexcept
+ {
+ TIntrusivePtr().Swap(*this);
+ }
+
+ //! Replace the pointer with a specified one.
+ void Reset(T* p) // noexcept
+ {
+ TIntrusivePtr(p).Swap(*this);
+ }
+
+ //! Returns the pointer.
+ T* Get() const noexcept
+ {
+ return T_;
+ }
+
+ //! Returns the pointer and releases the ownership.
+ T* Release() noexcept
+ {
+ auto* p = T_;
+ T_ = nullptr;
+ return p;
+ }
+
+ T& operator*() const noexcept
+ {
+ YT_ASSERT(T_);
+ return *T_;
+ }
+
+ T* operator->() const noexcept
+ {
+ YT_ASSERT(T_);
+ return T_;
+ }
+
+ explicit operator bool() const noexcept
+ {
+ return T_ != nullptr;
+ }
+
+ //! Swap the pointer with the other one.
+ void Swap(TIntrusivePtr& r) noexcept
+ {
+ DoSwap(T_, r.T_);
+ }
+
+private:
+ template <class U>
+ friend class TIntrusivePtr;
+
+ T* T_ = nullptr;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Creates a strong pointer wrapper for a given raw pointer.
+//! Compared to |TIntrusivePtr<T>::ctor|, type inference enables omitting |T|.
+template <class T>
+TIntrusivePtr<T> MakeStrong(T* p)
+{
+ return TIntrusivePtr<T>(p);
+}
+
+//! Tries to obtain an intrusive pointer for an object that may had
+//! already lost all of its references and, thus, is about to be deleted.
+/*!
+ * You may call this method at any time provided that you have a valid
+ * raw pointer to an object. The call either returns an intrusive pointer
+ * for the object (thus ensuring that the object won't be destroyed until
+ * you're holding this pointer) or NULL indicating that the last reference
+ * had already been lost and the object is on its way to heavens.
+ * All these steps happen atomically.
+ *
+ * Under all circumstances it is caller's responsibility the make sure that
+ * the object is not destroyed during the call to #DangerousGetPtr.
+ * Typically this is achieved by keeping a (lock-protected) collection of
+ * raw pointers, taking a lock in object's destructor, and unregistering
+ * its raw pointer from the collection there.
+ */
+
+template <class T>
+Y_FORCE_INLINE TIntrusivePtr<T> DangerousGetPtr(T* object)
+{
+ return object->TryRef()
+ ? TIntrusivePtr<T>(object, false)
+ : TIntrusivePtr<T>();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class U>
+TIntrusivePtr<T> StaticPointerCast(const TIntrusivePtr<U>& ptr)
+{
+ return {static_cast<T*>(ptr.Get())};
+}
+
+template <class T, class U>
+TIntrusivePtr<T> StaticPointerCast(TIntrusivePtr<U>&& ptr)
+{
+ return {static_cast<T*>(ptr.Release()), false};
+}
+
+template <class T, class U>
+TIntrusivePtr<T> ConstPointerCast(const TIntrusivePtr<U>& ptr)
+{
+ return {const_cast<T*>(ptr.Get())};
+}
+
+template <class T, class U>
+TIntrusivePtr<T> ConstPointerCast(TIntrusivePtr<U>&& ptr)
+{
+ return {const_cast<T*>(ptr.Release()), false};
+}
+
+template <class T, class U>
+TIntrusivePtr<T> DynamicPointerCast(const TIntrusivePtr<U>& ptr)
+{
+ return {dynamic_cast<T*>(ptr.Get())};
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+bool operator<(const TIntrusivePtr<T>& lhs, const TIntrusivePtr<T>& rhs)
+{
+ return lhs.Get() < rhs.Get();
+}
+
+template <class T>
+bool operator>(const TIntrusivePtr<T>& lhs, const TIntrusivePtr<T>& rhs)
+{
+ return lhs.Get() > rhs.Get();
+}
+
+template <class T, class U>
+bool operator==(const TIntrusivePtr<T>& lhs, const TIntrusivePtr<U>& rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs.Get() == rhs.Get();
+}
+
+template <class T, class U>
+bool operator!=(const TIntrusivePtr<T>& lhs, const TIntrusivePtr<U>& rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs.Get() != rhs.Get();
+}
+
+template <class T, class U>
+bool operator==(const TIntrusivePtr<T>& lhs, U* rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs.Get() == rhs;
+}
+
+template <class T, class U>
+bool operator!=(const TIntrusivePtr<T>& lhs, U* rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs.Get() != rhs;
+}
+
+template <class T, class U>
+bool operator==(T* lhs, const TIntrusivePtr<U>& rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs == rhs.Get();
+}
+
+template <class T, class U>
+bool operator!=(T* lhs, const TIntrusivePtr<U>& rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs != rhs.Get();
+}
+
+template <class T>
+bool operator==(std::nullptr_t, const TIntrusivePtr<T>& rhs)
+{
+ return nullptr == rhs.Get();
+}
+
+template <class T>
+bool operator!=(std::nullptr_t, const TIntrusivePtr<T>& rhs)
+{
+ return nullptr != rhs.Get();
+}
+
+template <class T>
+bool operator==(const TIntrusivePtr<T>& lhs, std::nullptr_t)
+{
+ return nullptr == lhs.Get();
+}
+
+template <class T>
+bool operator!=(const TIntrusivePtr<T>& lhs, std::nullptr_t)
+{
+ return nullptr != lhs.Get();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} //namespace NYT
+
+//! A hasher for TIntrusivePtr.
+template <class T>
+struct THash<NYT::TIntrusivePtr<T>>
+{
+ Y_FORCE_INLINE size_t operator () (const NYT::TIntrusivePtr<T>& ptr) const
+ {
+ return THash<T*>()(ptr.Get());
+ }
+};
diff --git a/library/cpp/yt/memory/leaky_ref_counted_singleton-inl.h b/library/cpp/yt/memory/leaky_ref_counted_singleton-inl.h
new file mode 100644
index 0000000000..a68ec5ed6a
--- /dev/null
+++ b/library/cpp/yt/memory/leaky_ref_counted_singleton-inl.h
@@ -0,0 +1,43 @@
+#ifndef LEAKY_REF_COUNTED_SINGLETON_INL_H_
+#error "Direct inclusion of this file is not allowed, include leaky_ref_counted_singleton.h"
+// For the sake of sane code completion.
+#include "leaky_ref_counted_singleton.h"
+#endif
+
+#include "new.h"
+
+#include <atomic>
+#include <mutex>
+
+#include <util/system/compiler.h>
+#include <util/system/sanitizers.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+TIntrusivePtr<T> LeakyRefCountedSingleton()
+{
+ static std::atomic<T*> Ptr;
+ auto* ptr = Ptr.load(std::memory_order_acquire);
+ if (Y_LIKELY(ptr)) {
+ return ptr;
+ }
+
+ static std::once_flag Initialized;
+ std::call_once(Initialized, [] {
+ auto ptr = New<T>();
+ Ref(ptr.Get());
+ Ptr.store(ptr.Get());
+#if defined(_asan_enabled_)
+ NSan::MarkAsIntentionallyLeaked(ptr.Get());
+#endif
+ });
+
+ return Ptr.load();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/leaky_ref_counted_singleton.h b/library/cpp/yt/memory/leaky_ref_counted_singleton.h
new file mode 100644
index 0000000000..1d5761bd9d
--- /dev/null
+++ b/library/cpp/yt/memory/leaky_ref_counted_singleton.h
@@ -0,0 +1,18 @@
+#pragma once
+
+#include "intrusive_ptr.h"
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+TIntrusivePtr<T> LeakyRefCountedSingleton();
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+#define LEAKY_REF_COUNTED_SINGLETON_INL_H_
+#include "leaky_ref_counted_singleton-inl.h"
+#undef LEAKY_REF_COUNTED_SINGLETON_INL_H_
diff --git a/library/cpp/yt/memory/leaky_singleton-inl.h b/library/cpp/yt/memory/leaky_singleton-inl.h
new file mode 100644
index 0000000000..932747c921
--- /dev/null
+++ b/library/cpp/yt/memory/leaky_singleton-inl.h
@@ -0,0 +1,34 @@
+#ifndef LEAKY_SINGLETON_INL_H_
+#error "Direct inclusion of this file is not allowed, include leaky_singleton.h"
+// For the sake of sane code completion.
+#include "leaky_singleton.h"
+#endif
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+TLeakyStorage<T>::TLeakyStorage()
+{
+ new (Get()) T();
+}
+
+template <class T>
+T* TLeakyStorage<T>::Get()
+{
+ return reinterpret_cast<T*>(Buffer_);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+T* LeakySingleton()
+{
+ static TLeakyStorage<T> Storage;
+ return Storage.Get();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/leaky_singleton.h b/library/cpp/yt/memory/leaky_singleton.h
new file mode 100644
index 0000000000..03b5e51d78
--- /dev/null
+++ b/library/cpp/yt/memory/leaky_singleton.h
@@ -0,0 +1,34 @@
+#pragma once
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+class TLeakyStorage
+{
+public:
+ TLeakyStorage();
+
+ T* Get();
+
+private:
+ alignas(T) char Buffer_[sizeof(T)];
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+#define DECLARE_LEAKY_SINGLETON_FRIEND() \
+ template <class T> \
+ friend class ::NYT::TLeakyStorage;
+
+template <class T>
+T* LeakySingleton();
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+#define LEAKY_SINGLETON_INL_H_
+#include "leaky_singleton-inl.h"
+#undef LEAKY_SINGLETON_INL_H_
diff --git a/library/cpp/yt/memory/new-inl.h b/library/cpp/yt/memory/new-inl.h
new file mode 100644
index 0000000000..0a84818516
--- /dev/null
+++ b/library/cpp/yt/memory/new-inl.h
@@ -0,0 +1,310 @@
+#ifndef NEW_INL_H_
+#error "Direct inclusion of this file is not allowed, include new.h"
+// For the sake of sane code completion.
+#include "new.h"
+#endif
+
+#include <library/cpp/ytalloc/api/ytalloc.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct TRefCountedCookieHolder
+{
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTypeCookie Cookie = NullRefCountedTypeCookie;
+
+ void InitializeTracking(TRefCountedTypeCookie cookie)
+ {
+ YT_ASSERT(Cookie == NullRefCountedTypeCookie);
+ Cookie = cookie;
+ TRefCountedTrackerFacade::AllocateInstance(Cookie);
+ }
+
+ ~TRefCountedCookieHolder()
+ {
+ if (Cookie != NullRefCountedTypeCookie) {
+ TRefCountedTrackerFacade::FreeInstance(Cookie);
+ }
+ }
+#endif
+};
+
+template <class T>
+struct TRefCountedWrapper final
+ : public T
+ , public TRefTracked<T>
+{
+ template <class... TArgs>
+ explicit TRefCountedWrapper(TArgs&&... args)
+ : T(std::forward<TArgs>(args)...)
+ { }
+
+ ~TRefCountedWrapper() = default;
+
+ void DestroyRefCounted() override
+ {
+ T::DestroyRefCountedImpl(this);
+ }
+};
+
+template <class T, class TDeleter>
+class TRefCountedWrapperWithDeleter final
+ : public T
+ , public TRefTracked<T>
+{
+public:
+ template <class... TArgs>
+ explicit TRefCountedWrapperWithDeleter(const TDeleter& deleter, TArgs&&... args)
+ : T(std::forward<TArgs>(args)...)
+ , Deleter_(deleter)
+ { }
+
+ ~TRefCountedWrapperWithDeleter() = default;
+
+ void DestroyRefCounted() override
+ {
+ Deleter_(this);
+ }
+
+private:
+ TDeleter Deleter_;
+};
+
+template <class T>
+struct TRefCountedWrapperWithCookie final
+ : public T
+ , public TRefCountedCookieHolder
+{
+ template <class... TArgs>
+ explicit TRefCountedWrapperWithCookie(TArgs&&... args)
+ : T(std::forward<TArgs>(args)...)
+ { }
+
+ ~TRefCountedWrapperWithCookie() = default;
+
+ void DestroyRefCounted() override
+ {
+ T::DestroyRefCountedImpl(this);
+ }
+};
+
+namespace NDetail {
+
+Y_FORCE_INLINE void* AllignedMalloc(size_t size, size_t allignment)
+{
+#ifdef _win_
+ return ::_aligned_malloc(size, allignment);
+#else
+ void* ptr = nullptr;
+ ::posix_memalign(&ptr, allignment, size);
+ return ptr;
+#endif
+}
+
+template <class... Args>
+Y_FORCE_INLINE void CustomInitialize(Args... args)
+{
+ Y_UNUSED(args...);
+}
+
+template <class T>
+Y_FORCE_INLINE auto CustomInitialize(T* ptr) -> decltype(&T::InitializeRefCounted, void())
+{
+ ptr->InitializeRefCounted();
+}
+
+template <class T, class... As>
+Y_FORCE_INLINE T* NewEpilogue(void* ptr, As&& ... args)
+{
+ try {
+ auto* instance = static_cast<T*>(ptr);
+ new (instance) T(std::forward<As>(args)...);
+ CustomInitialize(instance);
+ return instance;
+ } catch (const std::exception& ex) {
+ // Do not forget to free the memory.
+ TFreeMemory<T>::Do(ptr);
+ throw;
+ }
+}
+
+template <class T, bool = std::is_base_of_v<TRefCountedBase, T>>
+struct TConstructHelper
+{
+ static constexpr size_t RefCounterSpace = (sizeof(TRefCounter) + alignof(T) - 1) & ~(alignof(T) - 1);
+ static constexpr size_t RefCounterOffset = RefCounterSpace - sizeof(TRefCounter);
+ static constexpr size_t Size = RefCounterSpace + sizeof(T);
+ static constexpr size_t Alignment = alignof(T);
+
+ template <class... As>
+ Y_FORCE_INLINE static T* Construct(void* ptr, As&&... args)
+ {
+ auto* refCounter = reinterpret_cast<TRefCounter*>(static_cast<char*>(ptr) + RefCounterOffset);
+ new (refCounter) TRefCounter();
+ auto* object = reinterpret_cast<T*>(refCounter + 1);
+ if constexpr (std::is_constructible_v<T, As...>) {
+ new(object) T(std::forward<As>(args)...);
+ } else {
+ new(object) T{std::forward<As>(args)...};
+ }
+ CustomInitialize(object);
+ return object;
+ }
+};
+
+template <class T>
+struct TConstructHelper<T, true>
+{
+ static constexpr size_t Size = sizeof(TRefCountedWrapper<T>);
+ static constexpr size_t Alignment = alignof(TRefCountedWrapper<T>);
+
+ template <class... As>
+ Y_FORCE_INLINE static TRefCountedWrapper<T>* Construct(void* ptr, As&&... args)
+ {
+ using TDerived = TRefCountedWrapper<T>;
+ auto* object = new(static_cast<TDerived*>(ptr)) TDerived(std::forward<As>(args)...);
+ CustomInitialize(object);
+ return object;
+ }
+};
+
+template <class T, class... As>
+Y_FORCE_INLINE TIntrusivePtr<T> SafeConstruct(void* ptr, As&&... args)
+{
+ try {
+ auto* instance = TConstructHelper<T>::Construct(ptr, std::forward<As>(args)...);
+ return TIntrusivePtr<T>(instance, false);
+ } catch (const std::exception& ex) {
+ // Do not forget to free the memory.
+ TFreeMemory<T>::Do(ptr);
+ throw;
+ }
+}
+
+template <size_t Size, size_t Alignment>
+void* AllocateConstSizeAligned()
+{
+ if (Alignment <= 16) {
+ return NYTAlloc::AllocateConstSize<Size>();
+ } else {
+ return AllignedMalloc(Size, Alignment);
+ }
+}
+
+} // namespace NDetail
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class... As, class>
+Y_FORCE_INLINE TIntrusivePtr<T> New(
+ As&&... args)
+{
+ void* ptr = NDetail::AllocateConstSizeAligned<
+ NDetail::TConstructHelper<T>::Size,
+ NDetail::TConstructHelper<T>::Alignment>();
+
+ return NDetail::SafeConstruct<T>(ptr, std::forward<As>(args)...);
+}
+
+template <class T, class... As, class>
+Y_FORCE_INLINE TIntrusivePtr<T> New(
+ typename T::TAllocator* allocator,
+ As&&... args)
+{
+ auto* ptr = allocator->Allocate(NDetail::TConstructHelper<T>::Size);
+ if (!ptr) {
+ return nullptr;
+ }
+ return NDetail::SafeConstruct<T>(ptr, std::forward<As>(args)...);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class... As, class>
+Y_FORCE_INLINE TIntrusivePtr<T> NewWithExtraSpace(
+ size_t extraSpaceSize,
+ As&&... args)
+{
+ auto totalSize = NYT::NDetail::TConstructHelper<T>::Size + extraSpaceSize;
+ void* ptr = nullptr;
+
+ if (NYT::NDetail::TConstructHelper<T>::Alignment <= 16) {
+ ptr = NYTAlloc::Allocate(totalSize);
+ } else {
+ ptr = NYT::NDetail::AllignedMalloc(totalSize, NYT::NDetail::TConstructHelper<T>::Alignment);
+ }
+
+ return NYT::NDetail::SafeConstruct<T>(ptr, std::forward<As>(args)...);
+}
+
+template <class T, class... As, class>
+Y_FORCE_INLINE TIntrusivePtr<T> NewWithExtraSpace(
+ typename T::TAllocator* allocator,
+ size_t extraSpaceSize,
+ As&&... args)
+{
+ auto totalSize = NYT::NDetail::TConstructHelper<T>::Size + extraSpaceSize;
+ auto* ptr = allocator->Allocate(totalSize);
+ if (!ptr) {
+ return nullptr;
+ }
+ return NYT::NDetail::SafeConstruct<T>(ptr, std::forward<As>(args)...);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+// Support for polymorphic only
+template <class T, class TDeleter, class... As>
+Y_FORCE_INLINE TIntrusivePtr<T> NewWithDelete(const TDeleter& deleter, As&&... args)
+{
+ using TWrapper = TRefCountedWrapperWithDeleter<T, TDeleter>;
+ void* ptr = NDetail::AllocateConstSizeAligned<sizeof(TWrapper), alignof(TWrapper)>();
+
+ auto* instance = NDetail::NewEpilogue<TWrapper>(
+ ptr,
+ deleter,
+ std::forward<As>(args)...);
+
+ return TIntrusivePtr<T>(instance, false);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class TTag, int Counter, class... As>
+Y_FORCE_INLINE TIntrusivePtr<T> NewWithLocation(
+ const TSourceLocation& location,
+ As&&... args)
+{
+ using TWrapper = TRefCountedWrapperWithCookie<T>;
+ void* ptr = NDetail::AllocateConstSizeAligned<sizeof(TWrapper), alignof(TWrapper)>();
+
+ auto* instance = NDetail::NewEpilogue<TWrapper>(ptr, std::forward<As>(args)...);
+
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ instance->InitializeTracking(GetRefCountedTypeCookieWithLocation<T, TTag, Counter>(location));
+#else
+ Y_UNUSED(location);
+#endif
+
+ return TIntrusivePtr<T>(instance, false);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+const void* TWithExtraSpace<T>::GetExtraSpacePtr() const
+{
+ return static_cast<const T*>(this) + 1;
+}
+
+template <class T>
+void* TWithExtraSpace<T>::GetExtraSpacePtr()
+{
+ return static_cast<T*>(this) + 1;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/new.h b/library/cpp/yt/memory/new.h
new file mode 100644
index 0000000000..2db45e0465
--- /dev/null
+++ b/library/cpp/yt/memory/new.h
@@ -0,0 +1,127 @@
+#pragma once
+
+#include "intrusive_ptr.h"
+#include "ref_tracked.h"
+
+#include <library/cpp/yt/misc/source_location.h>
+
+#include <util/system/defaults.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+/*!
+ * \defgroup yt_new New<T> safe smart pointer constructors
+ * \ingroup yt_new
+ *
+ * This is collection of safe smart pointer constructors.
+ *
+ * \page yt_new_rationale Rationale
+ * New<T> function family was designed to prevent the following problem.
+ * Consider the following piece of code.
+ *
+ * \code
+ * class TFoo
+ * : public virtual TRefCounted
+ * {
+ * public:
+ * TFoo();
+ * };
+ *
+ * typedef TIntrusivePtr<TFoo> TFooPtr;
+ *
+ * void RegisterObject(TFooPtr foo)
+ * {
+ * ...
+ * }
+ *
+ * TFoo::TFoo()
+ * {
+ * // ... do something before
+ * RegisterObject(this);
+ * // ... do something after
+ * }
+ * \endcode
+ *
+ * What will happen on <tt>new TFoo()</tt> construction? After memory allocation
+ * the reference counter for newly created instance would be initialized to zero.
+ * Afterwards, the control goes to TFoo constructor. To invoke
+ * <tt>RegisterObject</tt> a new temporary smart pointer to the current instance
+ * have to be created effectively incrementing the reference counter (now one).
+ * After <tt>RegisterObject</tt> returns the control to the constructor
+ * the temporary pointer is destroyed effectively decrementing the reference
+ * counter to zero hence triggering object destruction during its initialization.
+ *
+ * To avoid this undefined behavior <tt>New<T></tt> was introduced.
+ * <tt>New<T></tt> holds a fake
+ * reference to the object during its construction effectively preventing
+ * premature destruction.
+ *
+ * \note An initialization like <tt>TIntrusivePtr&lt;T&gt; p = new T()</tt>
+ * would result in a dangling reference due to internals of #New<T> and
+ * #TRefCountedBase.
+ */
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class = void>
+struct THasAllocator
+{
+ using TFalse = void;
+};
+
+template <class T>
+struct THasAllocator<T, std::void_t<typename T::TAllocator>>
+{
+ using TTrue = void;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Allocates a new instance of |T|.
+template <class T, class... As, class = typename THasAllocator<T>::TFalse>
+TIntrusivePtr<T> New(As&&... args);
+
+template <class T, class... As, class = typename THasAllocator<T>::TTrue>
+TIntrusivePtr<T> New(typename T::TAllocator* allocator, As&&... args);
+
+//! Allocates an instance of |T| with additional storage of #extraSpaceSize bytes.
+template <class T, class... As, class = typename THasAllocator<T>::TFalse>
+TIntrusivePtr<T> NewWithExtraSpace(size_t extraSpaceSize, As&&... args);
+
+template <class T, class... As, class = typename THasAllocator<T>::TTrue>
+TIntrusivePtr<T> NewWithExtraSpace(typename T::TAllocator* allocator, size_t extraSpaceSize, As&&... args);
+
+//! Allocates a new instance of |T| with user deleter.
+template <class T, class TDeleter, class... As>
+TIntrusivePtr<T> NewWithDelete(const TDeleter& deleter, As&&... args);
+
+//! Allocates a new instance of |T|.
+//! The allocation is additionally marked with #location.
+template <class T, class TTag, int Counter, class... As>
+TIntrusivePtr<T> NewWithLocation(const TSourceLocation& location, As&&... args);
+
+//! Enables calling #New and co for types with private ctors.
+#define DECLARE_NEW_FRIEND() \
+ template <class DECLARE_NEW_FRIEND_T> \
+ friend struct NYT::TRefCountedWrapper;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! CRTP mixin enabling access to instance's extra space.
+template <class T>
+class TWithExtraSpace
+{
+protected:
+ const void* GetExtraSpacePtr() const;
+ void* GetExtraSpacePtr();
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+#define NEW_INL_H_
+#include "new-inl.h"
+#undef NEW_INL_H_
diff --git a/library/cpp/yt/memory/range.h b/library/cpp/yt/memory/range.h
new file mode 100644
index 0000000000..6c71aa9496
--- /dev/null
+++ b/library/cpp/yt/memory/range.h
@@ -0,0 +1,556 @@
+#pragma once
+
+#include <library/cpp/yt/assert/assert.h>
+
+#include <library/cpp/yt/misc/hash.h>
+
+#include <vector>
+#include <array>
+#include <optional>
+#include <initializer_list>
+
+// For size_t.
+#include <stddef.h>
+
+namespace google::protobuf {
+
+////////////////////////////////////////////////////////////////////////////////
+// Forward declarations
+
+template <class T>
+class RepeatedField;
+
+template <class T>
+class RepeatedPtrField;
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace google::protobuf
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+// Forward declarations
+
+template <class T, size_t N>
+class TCompactVector;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! TRange (inspired by TArrayRef from LLVM)
+/*!
+ * Represents a constant reference to an array (zero or more elements
+ * consecutively in memory), i. e. a start pointer and a length. It allows
+ * various APIs to take consecutive elements easily and conveniently.
+ *
+ * This class does not own the underlying data, it is expected to be used in
+ * situations where the data resides in some other buffer, whose lifetime
+ * extends past that of the TRange. For this reason, it is not in general
+ * safe to store an TRange.
+ *
+ * This is intended to be trivially copyable, so it should be passed by
+ * value.
+ */
+template <class T>
+class TRange
+{
+public:
+ typedef const T* iterator;
+ typedef const T* const_iterator;
+ typedef size_t size_type;
+
+ //! Constructs a null TRange.
+ TRange()
+ : Data_(nullptr)
+ , Length_(0)
+ { }
+
+ //! Constructs a TRange from a pointer and length.
+ TRange(const T* data, size_t length)
+ : Data_(data)
+ , Length_(length)
+ { }
+
+ //! Constructs a TRange from a range.
+ TRange(const T* begin, const T* end)
+ : Data_(begin)
+ , Length_(end - begin)
+ { }
+
+ //! Constructs a TRange from a TCompactVector.
+ template <size_t N>
+ TRange(const TCompactVector<T, N>& elements)
+ : Data_(elements.data())
+ , Length_(elements.size())
+ { }
+
+ //! Constructs a TRange from an std::vector.
+ template <class A>
+ TRange(const std::vector<T, A>& elements)
+ : Data_(elements.empty() ? nullptr : elements.data())
+ , Length_(elements.size())
+ { }
+
+ //! Constructs a TRange from a C array.
+ template <size_t N>
+ TRange(const T (&elements)[N])
+ : Data_(elements)
+ , Length_(N)
+ { }
+
+ //! Constructs a TRange from std::initializer_list.
+ TRange(std::initializer_list<T> elements)
+ : Data_(elements.begin())
+ , Length_(elements.size())
+ { }
+
+ //! Constructs a TRange from std::array.
+ template <size_t N>
+ TRange(const std::array<T, N>& elements)
+ : Data_(elements.data())
+ , Length_(N)
+ { }
+
+ //! Constructs a TRange from std::optional.
+ //! Range will contain 0-1 elements.
+ explicit TRange(const std::optional<T>& element)
+ : Data_(element ? &*element : nullptr)
+ , Length_(element ? 1 : 0)
+ { }
+
+ const_iterator Begin() const
+ {
+ return Data_;
+ }
+
+ // STL interop, for gcc.
+ const_iterator begin() const
+ {
+ return Begin();
+ }
+
+ const_iterator End() const
+ {
+ return Data_ + Length_;
+ }
+
+ // STL interop, for gcc.
+ const_iterator end() const
+ {
+ return End();
+ }
+
+ bool Empty() const
+ {
+ return Length_ == 0;
+ }
+
+ bool empty() const
+ {
+ return Empty();
+ }
+
+ explicit operator bool() const
+ {
+ return Data_ != nullptr;
+ }
+
+ size_t Size() const
+ {
+ return Length_;
+ }
+
+ size_t size() const
+ {
+ return Size();
+ }
+
+ const T& operator[](size_t index) const
+ {
+ YT_ASSERT(index < Size());
+ return Data_[index];
+ }
+
+
+ const T& Front() const
+ {
+ YT_ASSERT(Length_ > 0);
+ return Data_[0];
+ }
+
+ const T& Back() const
+ {
+ YT_ASSERT(Length_ > 0);
+ return Data_[Length_ - 1];
+ }
+
+
+ TRange<T> Slice(size_t startOffset, size_t endOffset) const
+ {
+ YT_ASSERT(startOffset <= endOffset && endOffset <= Size());
+ return TRange<T>(Begin() + startOffset, endOffset - startOffset);
+ }
+
+ std::vector<T> ToVector() const
+ {
+ return std::vector<T>(Data_, Data_ + Length_);
+ }
+
+protected:
+ //! The start of the array, in an external buffer.
+ const T* Data_;
+
+ //! The number of elements.
+ size_t Length_;
+
+};
+
+// STL interop.
+template <class T>
+typename TRange<T>::const_iterator begin(TRange<T> ref)
+{
+ return ref.Begin();
+}
+
+template <class T>
+typename TRange<T>::const_iterator end(TRange<T> ref)
+{
+ return ref.End();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Constructs a TRange from a pointer and length.
+template <class T>
+TRange<T> MakeRange(const T* data, size_t length)
+{
+ return TRange<T>(data, length);
+}
+
+//! Constructs a TRange from a native range.
+template <class T>
+TRange<T> MakeRange(const T* begin, const T* end)
+{
+ return TRange<T>(begin, end);
+}
+
+//! Constructs a TRange from a TCompactVector.
+template <class T, size_t N>
+TRange<T> MakeRange(const TCompactVector<T, N>& elements)
+{
+ return elements;
+}
+
+//! "Copy-constructor".
+template <class T>
+TRange<T> MakeRange(TRange<T> range)
+{
+ return range;
+}
+
+//! Constructs a TRange from an std::vector.
+template <class T>
+TRange<T> MakeRange(const std::vector<T>& elements)
+{
+ return elements;
+}
+
+//! Constructs a TRange from an std::array.
+template <class T, size_t N>
+TRange<T> MakeRange(const std::array<T, N>& elements)
+{
+ return elements;
+}
+
+//! Constructs a TRange from a C array.
+template <class T, size_t N>
+TRange<T> MakeRange(const T (& elements)[N])
+{
+ return TRange<T>(elements);
+}
+
+//! Constructs a TRange from RepeatedField.
+template <class T>
+TRange<T> MakeRange(const google::protobuf::RepeatedField<T>& elements)
+{
+ return TRange<T>(elements.data(), elements.size());
+}
+
+//! Constructs a TRange from RepeatedPtrField.
+template <class T>
+TRange<const T*> MakeRange(const google::protobuf::RepeatedPtrField<T>& elements)
+{
+ return TRange<const T*>(elements.data(), elements.size());
+}
+
+template <class U, class T>
+TRange<U> ReinterpretCastRange(TRange<T> range)
+{
+ static_assert(sizeof(T) == sizeof(U), "T and U must have equal sizes.");
+ return TRange<U>(reinterpret_cast<const U*>(range.Begin()), range.Size());
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+// TMutableRange (inspired by TMutableArrayRef from LLVM)
+/*
+ * Represents a mutable reference to an array (zero or more elements
+ * consecutively in memory), i. e. a start pointer and a length.
+ * It allows various APIs to take and modify consecutive elements easily and
+ * conveniently.
+ *
+ * This class does not own the underlying data, it is expected to be used in
+ * situations where the data resides in some other buffer, whose lifetime
+ * extends past that of the TMutableRange. For this reason, it is not in
+ * general safe to store a TMutableRange.
+ *
+ * This is intended to be trivially copyable, so it should be passed by value.
+ */
+template <class T>
+class TMutableRange
+ : public TRange<T>
+{
+public:
+ typedef T* iterator;
+
+ //! Constructs a null TMutableRange.
+ TMutableRange()
+ { }
+
+ //! Constructs a TMutableRange from a pointer and length.
+ TMutableRange(T* data, size_t length)
+ : TRange<T>(data, length)
+ { }
+
+ //! Constructs a TMutableRange from a range.
+ TMutableRange(T* begin, T* end)
+ : TRange<T>(begin, end)
+ { }
+
+ //! Constructs a TMutableRange from a TCompactVector.
+ template <size_t N>
+ TMutableRange(TCompactVector<T, N>& elements)
+ : TRange<T>(elements)
+ { }
+
+ //! Constructs a TMutableRange from an std::vector.
+ TMutableRange(std::vector<T>& elements)
+ : TRange<T>(elements)
+ { }
+
+ //! Constructs a TMutableRange from std::array.
+ template <size_t N>
+ TMutableRange(std::array<T, N>& elements)
+ : TRange<T>(elements.data(), N)
+ { }
+
+ //! Construct a TMutableRange from an std::optional
+ //! Range will contain 0-1 elements.
+ explicit TMutableRange(std::optional<T>& optional)
+ : TRange<T>(optional)
+ { }
+
+ //! Constructs a TMutableRange from a C array.
+ template <size_t N>
+ TMutableRange(T (& elements)[N])
+ : TRange<T>(elements)
+ { }
+
+ using TRange<T>::Begin;
+ using TRange<T>::End;
+ using TRange<T>::Front;
+ using TRange<T>::Back;
+ using TRange<T>::operator[];
+
+ iterator Begin() const
+ {
+ return const_cast<T*>(this->Data_);
+ }
+
+ // STL interop, for gcc.
+ iterator begin() const
+ {
+ return Begin();
+ }
+
+ iterator End() const
+ {
+ return this->Begin() + this->Size();
+ }
+
+ // STL interop, for gcc.
+ iterator end() const
+ {
+ return End();
+ }
+
+ T& operator[](size_t index)
+ {
+ YT_ASSERT(index <= this->Size());
+ return Begin()[index];
+ }
+
+ T& Front()
+ {
+ YT_ASSERT(this->Length_ > 0);
+ return Begin()[0];
+ }
+
+ T& Back()
+ {
+ YT_ASSERT(this->Length_ > 0);
+ return Begin()[this->Length_ - 1];
+ }
+
+ TMutableRange<T> Slice(size_t startOffset, size_t endOffset) const
+ {
+ YT_ASSERT(startOffset <= endOffset && endOffset <= this->Size());
+ return TMutableRange<T>(Begin() + startOffset, endOffset - startOffset);
+ }
+
+ TMutableRange<T> Slice(T* begin, T* end) const
+ {
+ YT_ASSERT(begin >= Begin());
+ YT_ASSERT(end <= End());
+ return TMutableRange<T>(begin, end);
+ }
+};
+
+// STL interop.
+template <class T>
+typename TMutableRange<T>::iterator begin(TMutableRange<T> ref)
+{
+ return ref.Begin();
+}
+
+template <class T>
+typename TMutableRange<T>::iterator end(TMutableRange<T> ref)
+{
+ return ref.End();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Constructs a TMutableRange from a pointer and length.
+template <class T>
+TMutableRange<T> MakeMutableRange(T* data, size_t length)
+{
+ return TMutableRange<T>(data, length);
+}
+
+//! Constructs a TMutableRange from a native range.
+template <class T>
+TMutableRange<T> MakeMutableRange(T* begin, T* end)
+{
+ return TMutableRange<T>(begin, end);
+}
+
+//! Constructs a TMutableRange from a TCompactVector.
+template <class T, size_t N>
+TMutableRange<T> MakeMutableRange(TCompactVector<T, N>& elements)
+{
+ return elements;
+}
+
+//! "Copy-constructor".
+template <class T>
+TMutableRange<T> MakeMutableRange(TMutableRange<T> range)
+{
+ return range;
+}
+
+//! Constructs a TMutableRange from an std::vector.
+template <class T>
+TMutableRange<T> MakeMutableRange(std::vector<T>& elements)
+{
+ return elements;
+}
+
+//! Constructs a TMutableRange from an std::array.
+template <class T, size_t N>
+TMutableRange<T> MakeMutableRange(std::array<T, N>& elements)
+{
+ return elements;
+}
+
+//! Constructs a TMutableRange from a C array.
+template <class T, size_t N>
+TMutableRange<T> MakeMutableRange(T (& elements)[N])
+{
+ return TMutableRange<T>(elements);
+}
+
+//! Constructs a TMutableRange from RepeatedField.
+template <class T>
+TMutableRange<T> MakeMutableRange(google::protobuf::RepeatedField<T>& elements)
+{
+ return TMutableRange<T>(elements.data(), elements.size());
+}
+
+//! Constructs a TMutableRange from RepeatedPtrField.
+template <class T>
+TMutableRange<T*> MakeMutableRange(google::protobuf::RepeatedPtrField<T>& elements)
+{
+ return TMutableRange<const T*>(elements.data(), elements.size());
+}
+
+template <class U, class T>
+TMutableRange<U> ReinterpretCastMutableRange(TMutableRange<T> range)
+{
+ static_assert(sizeof(T) == sizeof(U), "T and U must have equal sizes.");
+ return TMutableRange<U>(reinterpret_cast<U*>(range.Begin()), range.Size());
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+// Mark TMutableRange and TMutableRange as PODs.
+namespace NMpl {
+
+template <class T>
+struct TIsPod;
+
+template <class T>
+struct TIsPod<TRange<T>>
+{
+ static const bool Value = true;
+};
+
+template <class T>
+struct TIsPod<TMutableRange<T>>
+{
+ static const bool Value = true;
+};
+
+} // namespace NMpl
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+template <class T>
+struct hash<NYT::TRange<T>>
+{
+ size_t operator()(const NYT::TRange<T>& range) const
+ {
+ size_t result = 0;
+ for (const auto& element : range) {
+ NYT::HashCombine(result, element);
+ }
+ return result;
+ }
+};
+
+template <class T>
+struct hash<NYT::TMutableRange<T>>
+{
+ size_t operator()(const NYT::TMutableRange<T>& range) const
+ {
+ size_t result = 0;
+ for (const auto& element : range) {
+ NYT::HashCombine(result, element);
+ }
+ return result;
+ }
+};
+
+
diff --git a/library/cpp/yt/memory/ref-inl.h b/library/cpp/yt/memory/ref-inl.h
new file mode 100644
index 0000000000..79be8356c5
--- /dev/null
+++ b/library/cpp/yt/memory/ref-inl.h
@@ -0,0 +1,517 @@
+#ifndef REF_INL_H_
+#error "Direct inclusion of this file is not allowed, include ref.h"
+// For the sake of sane code completion.
+#include "ref.h"
+#endif
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+namespace NDetail {
+
+extern const char EmptyRefData[];
+extern char MutableEmptyRefData[];
+
+} // namespace NDetail
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE TRef::TRef(const void* data, size_t size)
+ : TRange<char>(static_cast<const char*>(data), size)
+{ }
+
+Y_FORCE_INLINE TRef::TRef(const void* begin, const void* end)
+ : TRange<char>(static_cast<const char*>(begin), static_cast<const char*>(end))
+{ }
+
+Y_FORCE_INLINE TRef TRef::MakeEmpty()
+{
+ return TRef(NDetail::EmptyRefData, NDetail::EmptyRefData);
+}
+
+Y_FORCE_INLINE TRef TRef::FromString(const TString& str)
+{
+ return FromStringBuf(str);
+}
+
+Y_FORCE_INLINE TRef TRef::FromStringBuf(TStringBuf strBuf)
+{
+ return TRef(strBuf.data(), strBuf.length());
+}
+
+template <class T>
+Y_FORCE_INLINE TRef TRef::FromPod(const T& data)
+{
+ static_assert(TTypeTraits<T>::IsPod || std::is_pod<T>::value, "T must be a pod-type.");
+ return TRef(&data, sizeof (data));
+}
+
+Y_FORCE_INLINE TRef TRef::Slice(size_t startOffset, size_t endOffset) const
+{
+ YT_ASSERT(endOffset >= startOffset && endOffset <= Size());
+ return TRef(Begin() + startOffset, endOffset - startOffset);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE TMutableRef::TMutableRef(void* data, size_t size)
+ : TMutableRange<char>(static_cast<char*>(data), size)
+{ }
+
+Y_FORCE_INLINE TMutableRef::TMutableRef(void* begin, void* end)
+ : TMutableRange<char>(static_cast<char*>(begin), static_cast<char*>(end))
+{ }
+
+Y_FORCE_INLINE TMutableRef TMutableRef::MakeEmpty()
+{
+ return TMutableRef(NDetail::MutableEmptyRefData, NDetail::MutableEmptyRefData);
+}
+
+Y_FORCE_INLINE TMutableRef::operator TRef() const
+{
+ return TRef(Begin(), Size());
+}
+
+template <class T>
+Y_FORCE_INLINE TMutableRef TMutableRef::FromPod(T& data)
+{
+ static_assert(TTypeTraits<T>::IsPod || std::is_pod<T>::value, "T must be a pod-type.");
+ return TMutableRef(&data, sizeof (data));
+}
+
+Y_FORCE_INLINE TMutableRef TMutableRef::FromString(TString& str)
+{
+ // NB: begin() invokes CloneIfShared().
+ return TMutableRef(str.begin(), str.length());
+}
+
+Y_FORCE_INLINE TMutableRef TMutableRef::Slice(size_t startOffset, size_t endOffset) const
+{
+ YT_ASSERT(endOffset >= startOffset && endOffset <= Size());
+ return TMutableRef(Begin() + startOffset, endOffset - startOffset);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE TSharedRef::TSharedRef(TRef ref, TSharedRange<char>::THolderPtr holder)
+ : TSharedRange<char>(ref, std::move(holder))
+{ }
+
+Y_FORCE_INLINE TSharedRef::TSharedRef(const void* data, size_t length, TSharedRange<char>::THolderPtr holder)
+ : TSharedRange<char>(static_cast<const char*>(data), length, std::move(holder))
+{ }
+
+Y_FORCE_INLINE TSharedRef::TSharedRef(const void* begin, const void* end, TSharedRange<char>::THolderPtr holder)
+ : TSharedRange<char>(static_cast<const char*>(begin), static_cast<const char*>(end), std::move(holder))
+{ }
+
+Y_FORCE_INLINE TSharedRef TSharedRef::MakeEmpty()
+{
+ return TSharedRef(TRef::MakeEmpty(), nullptr);
+}
+
+Y_FORCE_INLINE TSharedRef::operator TRef() const
+{
+ return TRef(Begin(), Size());
+}
+
+template <class TTag>
+Y_FORCE_INLINE TSharedRef TSharedRef::FromString(TString str)
+{
+ return FromString(std::move(str), GetRefCountedTypeCookie<TTag>());
+}
+
+Y_FORCE_INLINE TSharedRef TSharedRef::FromString(TString str)
+{
+ return FromString<TDefaultSharedBlobTag>(std::move(str));
+}
+
+template <class TTag>
+Y_FORCE_INLINE TSharedRef TSharedRef::MakeCopy(TRef ref)
+{
+ return MakeCopy(ref, GetRefCountedTypeCookie<TTag>());
+}
+
+Y_FORCE_INLINE TSharedRef TSharedRef::Slice(size_t startOffset, size_t endOffset) const
+{
+ YT_ASSERT(endOffset >= startOffset && endOffset <= Size());
+ return TSharedRef(Begin() + startOffset, endOffset - startOffset, Holder_);
+}
+
+Y_FORCE_INLINE TSharedRef TSharedRef::Slice(const void* begin, const void* end) const
+{
+ YT_ASSERT(begin >= Begin());
+ YT_ASSERT(end <= End());
+ return TSharedRef(begin, end, Holder_);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE TSharedMutableRef::TSharedMutableRef(const TMutableRef& ref, TSharedMutableRange<char>::THolderPtr holder)
+ : TSharedMutableRange<char>(ref, std::move(holder))
+{ }
+
+Y_FORCE_INLINE TSharedMutableRef::TSharedMutableRef(void* data, size_t length, TSharedMutableRange<char>::THolderPtr holder)
+ : TSharedMutableRange<char>(static_cast<char*>(data), length, std::move(holder))
+{ }
+
+Y_FORCE_INLINE TSharedMutableRef::TSharedMutableRef(void* begin, void* end, TSharedMutableRange<char>::THolderPtr holder)
+ : TSharedMutableRange<char>(static_cast<char*>(begin), static_cast<char*>(end), std::move(holder))
+{ }
+
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::MakeEmpty()
+{
+ return TSharedMutableRef(TMutableRef::MakeEmpty(), nullptr);
+}
+
+Y_FORCE_INLINE TSharedMutableRef::operator TMutableRef() const
+{
+ return TMutableRef(Begin(), Size());
+}
+
+Y_FORCE_INLINE TSharedMutableRef::operator TSharedRef() const
+{
+ return TSharedRef(Begin(), Size(), Holder_);
+}
+
+Y_FORCE_INLINE TSharedMutableRef::operator TRef() const
+{
+ return TRef(Begin(), Size());
+}
+
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::Allocate(size_t size, bool initializeStorage)
+{
+ return Allocate<TDefaultSharedBlobTag>(size, initializeStorage);
+}
+
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::AllocatePageAligned(size_t size, bool initializeStorage)
+{
+ return AllocatePageAligned<TDefaultSharedBlobTag>(size, initializeStorage);
+}
+
+template <class TTag>
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::MakeCopy(TRef ref)
+{
+ return MakeCopy(ref, GetRefCountedTypeCookie<TTag>());
+}
+
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::Slice(size_t startOffset, size_t endOffset) const
+{
+ YT_ASSERT(endOffset >= startOffset && endOffset <= Size());
+ return TSharedMutableRef(Begin() + startOffset, endOffset - startOffset, Holder_);
+}
+
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::Slice(void* begin, void* end) const
+{
+ YT_ASSERT(begin >= Begin());
+ YT_ASSERT(end <= End());
+ return TSharedMutableRef(begin, end, Holder_);
+}
+
+template <class TTag>
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::Allocate(size_t size, bool initializeStorage)
+{
+ return Allocate(size, initializeStorage, GetRefCountedTypeCookie<TTag>());
+}
+
+template <class TTag>
+Y_FORCE_INLINE TSharedMutableRef TSharedMutableRef::AllocatePageAligned(size_t size, bool initializeStorage)
+{
+ return AllocatePageAligned(size, initializeStorage, GetRefCountedTypeCookie<TTag>());
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE size_t GetByteSize(TRef ref)
+{
+ return ref ? ref.Size() : 0;
+}
+
+template <class T>
+size_t GetByteSize(TRange<T> parts)
+{
+ size_t size = 0;
+ for (const auto& part : parts) {
+ size += part.Size();
+ }
+ return size;
+}
+
+template <class T>
+size_t GetByteSize(const std::vector<T>& parts)
+{
+ return GetByteSize(MakeRange(parts));
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TSharedRefArrayImpl
+ : public TRefCounted
+ , public TWithExtraSpace<TSharedRefArrayImpl>
+{
+public:
+ TSharedRefArrayImpl(
+ size_t extraSpaceSize,
+ TRefCountedTypeCookie tagCookie,
+ size_t size)
+ : Size_(size)
+ , ExtraSpaceSize_(extraSpaceSize)
+ , TagCookie_(tagCookie)
+ {
+ for (size_t index = 0; index < Size_; ++index) {
+ new (MutableBegin() + index) TSharedRef();
+ }
+ RegisterWithRefCountedTracker();
+ }
+
+ TSharedRefArrayImpl(
+ size_t extraSpaceSize,
+ TRefCountedTypeCookie tagCookie,
+ const TSharedRef& part)
+ : Size_(1)
+ , ExtraSpaceSize_(extraSpaceSize)
+ , TagCookie_(tagCookie)
+ {
+ new (MutableBegin()) TSharedRef(part);
+ RegisterWithRefCountedTracker();
+ }
+
+ TSharedRefArrayImpl(
+ size_t extraSpaceSize,
+ TRefCountedTypeCookie tagCookie,
+ TSharedRef&& part)
+ : Size_(1)
+ , ExtraSpaceSize_(extraSpaceSize)
+ , TagCookie_(tagCookie)
+ {
+ new (MutableBegin()) TSharedRef(std::move(part));
+ RegisterWithRefCountedTracker();
+ }
+
+ template <class TParts>
+ TSharedRefArrayImpl(
+ size_t extraSpaceSize,
+ TRefCountedTypeCookie tagCookie,
+ const TParts& parts,
+ TSharedRefArray::TCopyParts)
+ : Size_(parts.size())
+ , ExtraSpaceSize_(extraSpaceSize)
+ , TagCookie_(tagCookie)
+ {
+ for (size_t index = 0; index < Size_; ++index) {
+ new (MutableBegin() + index) TSharedRef(parts[index]);
+ }
+ RegisterWithRefCountedTracker();
+ }
+
+ template <class TParts>
+ TSharedRefArrayImpl(
+ size_t extraSpaceSize,
+ TRefCountedTypeCookie tagCookie,
+ TParts&& parts,
+ TSharedRefArray::TMoveParts)
+ : Size_(parts.size())
+ , ExtraSpaceSize_(extraSpaceSize)
+ , TagCookie_(tagCookie)
+ {
+ for (size_t index = 0; index < Size_; ++index) {
+ new (MutableBegin() + index) TSharedRef(std::move(parts[index]));
+ }
+ RegisterWithRefCountedTracker();
+ }
+
+ ~TSharedRefArrayImpl()
+ {
+ for (size_t index = 0; index < Size_; ++index) {
+ auto& part = MutableBegin()[index];
+ if (part.GetHolder() == this) {
+ part.Holder_.Release();
+ }
+ part.TSharedRef::~TSharedRef();
+ }
+ UnregisterFromRefCountedTracker();
+ }
+
+
+ size_t Size() const
+ {
+ return Size_;
+ }
+
+ bool Empty() const
+ {
+ return Size_ == 0;
+ }
+
+ const TSharedRef& operator [] (size_t index) const
+ {
+ YT_ASSERT(index < Size());
+ return Begin()[index];
+ }
+
+
+ const TSharedRef* Begin() const
+ {
+ return static_cast<const TSharedRef*>(GetExtraSpacePtr());
+ }
+
+ const TSharedRef* End() const
+ {
+ return Begin() + Size_;
+ }
+
+private:
+ friend class TSharedRefArrayBuilder;
+
+ const size_t Size_;
+ const size_t ExtraSpaceSize_;
+ const TRefCountedTypeCookie TagCookie_;
+
+
+ void RegisterWithRefCountedTracker()
+ {
+ TRefCountedTrackerFacade::AllocateTagInstance(TagCookie_);
+ TRefCountedTrackerFacade::AllocateSpace(TagCookie_, ExtraSpaceSize_);
+ }
+
+ void UnregisterFromRefCountedTracker()
+ {
+ TRefCountedTrackerFacade::FreeTagInstance(TagCookie_);
+ TRefCountedTrackerFacade::FreeSpace(TagCookie_, ExtraSpaceSize_);
+ }
+
+
+ TSharedRef* MutableBegin()
+ {
+ return static_cast<TSharedRef*>(GetExtraSpacePtr());
+ }
+
+ TSharedRef* MutableEnd()
+ {
+ return MutableBegin() + Size_;
+ }
+
+ char* GetBeginAllocationPtr()
+ {
+ return static_cast<char*>(static_cast<void*>(MutableEnd()));
+ }
+};
+
+DEFINE_REFCOUNTED_TYPE(TSharedRefArrayImpl)
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct TSharedRefArrayTag { };
+
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(TIntrusivePtr<TSharedRefArrayImpl> impl)
+ : Impl_(std::move(impl))
+{ }
+
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(const TSharedRefArray& other)
+ : Impl_(other.Impl_)
+{ }
+
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(TSharedRefArray&& other) noexcept
+ : Impl_(std::move(other.Impl_))
+{ }
+
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(const TSharedRef& part)
+ : Impl_(NewImpl(1, 0, GetRefCountedTypeCookie<TSharedRefArrayTag>(), part))
+{ }
+
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(TSharedRef&& part)
+ : Impl_(NewImpl(1, 0, GetRefCountedTypeCookie<TSharedRefArrayTag>(), std::move(part)))
+{ }
+
+template <class TParts>
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(const TParts& parts, TSharedRefArray::TCopyParts)
+ : Impl_(NewImpl(parts.size(), 0, GetRefCountedTypeCookie<TSharedRefArrayTag>(), parts, TSharedRefArray::TCopyParts{}))
+{ }
+
+template <class TParts>
+Y_FORCE_INLINE TSharedRefArray::TSharedRefArray(TParts&& parts, TSharedRefArray::TMoveParts)
+ : Impl_(NewImpl(parts.size(), 0, GetRefCountedTypeCookie<TSharedRefArrayTag>(), std::move(parts), TSharedRefArray::TMoveParts{}))
+{ }
+
+Y_FORCE_INLINE TSharedRefArray& TSharedRefArray::operator=(const TSharedRefArray& other)
+{
+ Impl_ = other.Impl_;
+ return *this;
+}
+
+Y_FORCE_INLINE TSharedRefArray& TSharedRefArray::operator=(TSharedRefArray&& other)
+{
+ Impl_ = std::move(other.Impl_);
+ return *this;
+}
+
+Y_FORCE_INLINE void TSharedRefArray::Reset()
+{
+ Impl_.Reset();
+}
+
+Y_FORCE_INLINE TSharedRefArray::operator bool() const
+{
+ return Impl_.operator bool();
+}
+
+Y_FORCE_INLINE size_t TSharedRefArray::Size() const
+{
+ return Impl_ ? Impl_->Size() : 0;
+}
+
+Y_FORCE_INLINE size_t TSharedRefArray::size() const
+{
+ return Impl_ ? Impl_->Size() : 0;
+}
+
+Y_FORCE_INLINE bool TSharedRefArray::Empty() const
+{
+ return Impl_ ? Impl_->Empty() : true;
+}
+
+Y_FORCE_INLINE const TSharedRef& TSharedRefArray::operator[](size_t index) const
+{
+ YT_ASSERT(Impl_);
+ return (*Impl_)[index];
+}
+
+Y_FORCE_INLINE const TSharedRef* TSharedRefArray::Begin() const
+{
+ return Impl_ ? Impl_->Begin() : nullptr;
+}
+
+Y_FORCE_INLINE const TSharedRef* TSharedRefArray::End() const
+{
+ return Impl_ ? Impl_->End() : nullptr;
+}
+
+template <class... As>
+TSharedRefArrayImplPtr TSharedRefArray::NewImpl(
+ size_t size,
+ size_t poolCapacity,
+ TRefCountedTypeCookie tagCookie,
+ As&&... args)
+{
+ auto extraSpaceSize = sizeof (TSharedRef) * size + poolCapacity;
+ return NewWithExtraSpace<TSharedRefArrayImpl>(
+ extraSpaceSize,
+ extraSpaceSize,
+ tagCookie,
+ std::forward<As>(args)...);
+}
+
+Y_FORCE_INLINE const TSharedRef* begin(const TSharedRefArray& array)
+{
+ return array.Begin();
+}
+
+Y_FORCE_INLINE const TSharedRef* end(const TSharedRefArray& array)
+{
+ return array.End();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/ref.cpp b/library/cpp/yt/memory/ref.cpp
new file mode 100644
index 0000000000..e8ff42e976
--- /dev/null
+++ b/library/cpp/yt/memory/ref.cpp
@@ -0,0 +1,378 @@
+#include "ref.h"
+#include "blob.h"
+
+#include <library/cpp/ytalloc/api/ytalloc.h>
+
+#include <util/system/info.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+namespace NDetail {
+
+// N.B. We would prefer these arrays to be zero sized
+// but zero sized arrays are not supported in MSVC.
+const char EmptyRefData[1] = {0};
+char MutableEmptyRefData[1] = {0};
+
+} // namespace NDetail
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TBlobHolder
+ : public TRefCounted
+{
+public:
+ explicit TBlobHolder(TBlob&& blob)
+ : Blob_(std::move(blob))
+ { }
+
+private:
+ const TBlob Blob_;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TStringHolder
+ : public TRefCounted
+{
+public:
+ TStringHolder(TString&& string, TRefCountedTypeCookie cookie)
+ : String_(std::move(string))
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ , Cookie_(cookie)
+#endif
+ {
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::AllocateTagInstance(Cookie_);
+ TRefCountedTrackerFacade::AllocateSpace(Cookie_, String_.length());
+#endif
+ }
+ ~TStringHolder()
+ {
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::FreeTagInstance(Cookie_);
+ TRefCountedTrackerFacade::FreeSpace(Cookie_, String_.length());
+#endif
+ }
+
+ const TString& String() const
+ {
+ return String_;
+ }
+
+private:
+ const TString String_;
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ const TRefCountedTypeCookie Cookie_;
+#endif
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class TDerived>
+class TAllocationHolderBase
+ : public TRefCounted
+{
+public:
+ TAllocationHolderBase(size_t size, TRefCountedTypeCookie cookie)
+ : Size_(size)
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ , Cookie_(cookie)
+#endif
+ { }
+
+ ~TAllocationHolderBase()
+ {
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::FreeTagInstance(Cookie_);
+ TRefCountedTrackerFacade::FreeSpace(Cookie_, Size_);
+#endif
+ }
+
+ TMutableRef GetRef()
+ {
+ return TMutableRef(static_cast<TDerived*>(this)->GetBegin(), Size_);
+ }
+
+protected:
+ const size_t Size_;
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ const TRefCountedTypeCookie Cookie_;
+#endif
+
+ void Initialize(bool initializeStorage)
+ {
+ if (initializeStorage) {
+ ::memset(static_cast<TDerived*>(this)->GetBegin(), 0, Size_);
+ }
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefCountedTrackerFacade::AllocateTagInstance(Cookie_);
+ TRefCountedTrackerFacade::AllocateSpace(Cookie_, Size_);
+#endif
+ }
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TDefaultAllocationHolder
+ : public TAllocationHolderBase<TDefaultAllocationHolder>
+ , public TWithExtraSpace<TDefaultAllocationHolder>
+{
+public:
+ TDefaultAllocationHolder(size_t size, bool initializeStorage, TRefCountedTypeCookie cookie)
+ : TAllocationHolderBase(size, cookie)
+ {
+ Initialize(initializeStorage);
+ }
+
+ char* GetBegin()
+ {
+ return static_cast<char*>(GetExtraSpacePtr());
+ }
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TPageAlignedAllocationHolder
+ : public TAllocationHolderBase<TPageAlignedAllocationHolder>
+{
+public:
+ TPageAlignedAllocationHolder(size_t size, bool initializeStorage, TRefCountedTypeCookie cookie)
+ : TAllocationHolderBase(size, cookie)
+ , Begin_(static_cast<char*>(NYTAlloc::AllocatePageAligned(size)))
+ {
+ Initialize(initializeStorage);
+ }
+
+ ~TPageAlignedAllocationHolder()
+ {
+ NYTAlloc::Free(Begin_);
+ }
+
+ char* GetBegin()
+ {
+ return Begin_;
+ }
+
+private:
+ char* const Begin_;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+TRef TRef::FromBlob(const TBlob& blob)
+{
+ return TRef(blob.Begin(), blob.Size());
+}
+
+bool TRef::AreBitwiseEqual(TRef lhs, TRef rhs)
+{
+ if (lhs.Size() != rhs.Size()) {
+ return false;
+ }
+ if (lhs.Size() == 0) {
+ return true;
+ }
+ return ::memcmp(lhs.Begin(), rhs.Begin(), lhs.Size()) == 0;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TMutableRef TMutableRef::FromBlob(TBlob& blob)
+{
+ return TMutableRef(blob.Begin(), blob.Size());
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TSharedRef TSharedRef::FromString(TString str, TRefCountedTypeCookie tagCookie)
+{
+ auto holder = New<TStringHolder>(std::move(str), tagCookie);
+ auto ref = TRef::FromString(holder->String());
+ return TSharedRef(ref, std::move(holder));
+}
+
+TSharedRef TSharedRef::FromBlob(TBlob&& blob)
+{
+ auto ref = TRef::FromBlob(blob);
+ auto holder = New<TBlobHolder>(std::move(blob));
+ return TSharedRef(ref, std::move(holder));
+}
+
+TSharedRef TSharedRef::MakeCopy(TRef ref, TRefCountedTypeCookie tagCookie)
+{
+ if (!ref) {
+ return {};
+ }
+ if (ref.Empty()) {
+ return TSharedRef::MakeEmpty();
+ }
+ auto result = TSharedMutableRef::Allocate(ref.Size(), false, tagCookie);
+ ::memcpy(result.Begin(), ref.Begin(), ref.Size());
+ return result;
+}
+
+std::vector<TSharedRef> TSharedRef::Split(size_t partSize) const
+{
+ YT_VERIFY(partSize > 0);
+ std::vector<TSharedRef> result;
+ result.reserve(Size() / partSize + 1);
+ auto sliceBegin = Begin();
+ while (sliceBegin < End()) {
+ auto sliceEnd = sliceBegin + partSize;
+ if (sliceEnd < sliceBegin || sliceEnd > End()) {
+ sliceEnd = End();
+ }
+ result.push_back(Slice(sliceBegin, sliceEnd));
+ sliceBegin = sliceEnd;
+ }
+ return result;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TSharedMutableRef TSharedMutableRef::Allocate(size_t size, bool initializeStorage, TRefCountedTypeCookie tagCookie)
+{
+ auto holder = NewWithExtraSpace<TDefaultAllocationHolder>(size, size, initializeStorage, tagCookie);
+ auto ref = holder->GetRef();
+ return TSharedMutableRef(ref, std::move(holder));
+}
+
+TSharedMutableRef TSharedMutableRef::AllocatePageAligned(size_t size, bool initializeStorage, TRefCountedTypeCookie tagCookie)
+{
+ auto holder = New<TPageAlignedAllocationHolder>(size, initializeStorage, tagCookie);
+ auto ref = holder->GetRef();
+ return TSharedMutableRef(ref, std::move(holder));
+}
+
+TSharedMutableRef TSharedMutableRef::FromBlob(TBlob&& blob)
+{
+ auto ref = TMutableRef::FromBlob(blob);
+ auto holder = New<TBlobHolder>(std::move(blob));
+ return TSharedMutableRef(ref, std::move(holder));
+}
+
+TSharedMutableRef TSharedMutableRef::MakeCopy(TRef ref, TRefCountedTypeCookie tagCookie)
+{
+ if (!ref) {
+ return {};
+ }
+ if (ref.Empty()) {
+ return TSharedMutableRef::MakeEmpty();
+ }
+ auto result = Allocate(ref.Size(), false, tagCookie);
+ ::memcpy(result.Begin(), ref.Begin(), ref.Size());
+ return result;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TString ToString(TRef ref)
+{
+ return TString(ref.Begin(), ref.End());
+}
+
+TString ToString(const TMutableRef& ref)
+{
+ return ToString(TRef(ref));
+}
+
+TString ToString(const TSharedRef& ref)
+{
+ return ToString(TRef(ref));
+}
+
+TString ToString(const TSharedMutableRef& ref)
+{
+ return ToString(TRef(ref));
+}
+
+size_t GetPageSize()
+{
+ static const size_t PageSize = NSystemInfo::GetPageSize();
+ return PageSize;
+}
+
+size_t RoundUpToPage(size_t bytes)
+{
+ static const size_t PageSize = NSystemInfo::GetPageSize();
+ YT_ASSERT((PageSize & (PageSize - 1)) == 0);
+ return (bytes + PageSize - 1) & (~(PageSize - 1));
+}
+
+size_t GetByteSize(const TSharedRefArray& array)
+{
+ size_t size = 0;
+ if (array) {
+ for (const auto& part : array) {
+ size += part.Size();
+ }
+ }
+ return size;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+i64 TSharedRefArray::ByteSize() const
+{
+ i64 result = 0;
+ if (*this) {
+ for (const auto& part : *this) {
+ result += part.Size();
+ }
+ }
+ return result;
+}
+
+std::vector<TSharedRef> TSharedRefArray::ToVector() const
+{
+ if (!Impl_) {
+ return {};
+ }
+
+ return std::vector<TSharedRef>(Begin(), End());
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+TSharedRefArrayBuilder::TSharedRefArrayBuilder(
+ size_t size,
+ size_t poolCapacity,
+ TRefCountedTypeCookie tagCookie)
+ : AllocationCapacity_(poolCapacity)
+ , Impl_(TSharedRefArray::NewImpl(
+ size,
+ poolCapacity,
+ tagCookie,
+ size))
+ , CurrentAllocationPtr_(Impl_->GetBeginAllocationPtr())
+{ }
+
+void TSharedRefArrayBuilder::Add(TSharedRef part)
+{
+ YT_ASSERT(CurrentPartIndex_ < Impl_->Size());
+ Impl_->MutableBegin()[CurrentPartIndex_++] = std::move(part);
+}
+
+TMutableRef TSharedRefArrayBuilder::AllocateAndAdd(size_t size)
+{
+ YT_ASSERT(CurrentPartIndex_ < Impl_->Size());
+ YT_ASSERT(CurrentAllocationPtr_ + size <= Impl_->GetBeginAllocationPtr() + AllocationCapacity_);
+ TMutableRef ref(CurrentAllocationPtr_, size);
+ CurrentAllocationPtr_ += size;
+ TRefCountedPtr holder(Impl_.Get(), false);
+ TSharedRef sharedRef(ref, std::move(holder));
+ Add(std::move(sharedRef));
+ return ref;
+}
+
+TSharedRefArray TSharedRefArrayBuilder::Finish()
+{
+ return TSharedRefArray(std::move(Impl_));
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/ref.h b/library/cpp/yt/memory/ref.h
new file mode 100644
index 0000000000..73d19d9013
--- /dev/null
+++ b/library/cpp/yt/memory/ref.h
@@ -0,0 +1,384 @@
+#pragma once
+
+#include "new.h"
+#include "range.h"
+#include "shared_range.h"
+
+#include <type_traits>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+// Forward declaration.
+class TBlob;
+
+//! A non-owning reference to a range of memory.
+class TRef
+ : public TRange<char>
+{
+public:
+ //! Creates a null TRef.
+ TRef() = default;
+
+ //! Creates a TRef for a given block of memory.
+ TRef(const void* data, size_t size);
+
+ //! Creates a TRef for a given range of memory.
+ TRef(const void* begin, const void* end);
+
+ //! Creates an empty TRef.
+ static TRef MakeEmpty();
+
+ //! Creates a non-owning TRef for a given blob.
+ static TRef FromBlob(const TBlob& blob);
+
+ //! Creates a non-owning TRef for a given string.
+ static TRef FromString(const TString& str);
+
+ //! Creates a non-owning TRef for a given stringbuf.
+ static TRef FromStringBuf(TStringBuf strBuf);
+
+ //! Creates a non-owning TRef for a given pod structure.
+ template <class T>
+ static TRef FromPod(const T& data);
+
+ //! Creates a TRef for a part of existing range.
+ TRef Slice(size_t startOffset, size_t endOffset) const;
+
+ //! Compares the content for bitwise equality.
+ static bool AreBitwiseEqual(TRef lhs, TRef rhs);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! A non-owning reference to a mutable range of memory.
+//! Use with caution :)
+class TMutableRef
+ : public TMutableRange<char>
+{
+public:
+ //! Creates a null TMutableRef.
+ //! Note empty TMutableRef is not the same as null TMutableRef.
+ //! `operator bool` can be used to check if ref is nonnull.
+ TMutableRef() = default;
+
+ //! Creates a TMutableRef for a given block of memory.
+ TMutableRef(void* data, size_t size);
+
+ //! Creates a TMutableRef for a given range of memory.
+ TMutableRef(void* begin, void* end);
+
+ //! Creates an empty TMutableRef.
+ static TMutableRef MakeEmpty();
+
+ //! Converts a TMutableRef to TRef.
+ operator TRef() const;
+
+ //! Creates a non-owning TMutableRef for a given blob.
+ static TMutableRef FromBlob(TBlob& blob);
+
+ //! Creates a non-owning TMutableRef for a given pod structure.
+ template <class T>
+ static TMutableRef FromPod(T& data);
+
+ //! Creates a non-owning TMutableRef for a given string.
+ //! Ensures that the string is not shared.
+ static TMutableRef FromString(TString& str);
+
+ //! Creates a TMutableRef for a part of existing range.
+ TMutableRef Slice(size_t startOffset, size_t endOffset) const;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Default tag type for memory blocks allocated via TSharedRef.
+/*!
+ * Each newly allocated TSharedRef blob is associated with a tag type
+ * that appears in ref-counted statistics.
+ */
+struct TDefaultSharedBlobTag { };
+
+//! A reference to a range of memory with shared ownership.
+class TSharedRef
+ : public TSharedRange<char>
+{
+public:
+ //! Creates a null TSharedRef.
+ TSharedRef() = default;
+
+ //! Creates a TSharedRef with a given holder.
+ TSharedRef(TRef ref, THolderPtr holder);
+
+ //! Creates a TSharedRef from a pointer and length.
+ TSharedRef(const void* data, size_t length, THolderPtr holder);
+
+ //! Creates a TSharedRef from a range.
+ TSharedRef(const void* begin, const void* end, THolderPtr holder);
+
+ //! Creates an empty TSharedRef.
+ static TSharedRef MakeEmpty();
+
+ //! Converts a TSharedRef to TRef.
+ operator TRef() const;
+
+
+ //! Creates a TSharedRef from a string.
+ //! Since strings are ref-counted, no data is copied.
+ //! The memory is marked with a given tag.
+ template <class TTag>
+ static TSharedRef FromString(TString str);
+
+ //! Creates a TSharedRef from a string.
+ //! Since strings are ref-counted, no data is copied.
+ //! The memory is marked with TDefaultSharedBlobTag.
+ static TSharedRef FromString(TString str);
+
+ //! Creates a TSharedRef reference from a string.
+ //! Since strings are ref-counted, no data is copied.
+ //! The memory is marked with a given tag.
+ static TSharedRef FromString(TString str, TRefCountedTypeCookie tagCookie);
+
+ //! Creates a TSharedRef for a given blob taking ownership of its content.
+ static TSharedRef FromBlob(TBlob&& blob);
+
+ //! Creates a copy of a given TRef.
+ //! The memory is marked with a given tag.
+ static TSharedRef MakeCopy(TRef ref, TRefCountedTypeCookie tagCookie);
+
+ //! Creates a copy of a given TRef.
+ //! The memory is marked with a given tag.
+ template <class TTag>
+ static TSharedRef MakeCopy(TRef ref);
+
+ //! Creates a TSharedRef for a part of existing range.
+ TSharedRef Slice(size_t startOffset, size_t endOffset) const;
+
+ //! Creates a TSharedRef for a part of existing range.
+ TSharedRef Slice(const void* begin, const void* end) const;
+
+ //! Creates a vector of slices with specified size.
+ std::vector<TSharedRef> Split(size_t partSize) const;
+
+private:
+ friend class TSharedRefArrayImpl;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! A reference to a mutable range of memory with shared ownership.
+//! Use with caution :)
+class TSharedMutableRef
+ : public TSharedMutableRange<char>
+{
+public:
+ //! Creates a null TSharedMutableRef.
+ TSharedMutableRef() = default;
+
+ //! Creates a TSharedMutableRef with a given holder.
+ TSharedMutableRef(const TMutableRef& ref, THolderPtr holder);
+
+ //! Creates a TSharedMutableRef from a pointer and length.
+ TSharedMutableRef(void* data, size_t length, THolderPtr holder);
+
+ //! Creates a TSharedMutableRef from a range.
+ TSharedMutableRef(void* begin, void* end, THolderPtr holder);
+
+ //! Creates an empty TSharedMutableRef.
+ static TSharedMutableRef MakeEmpty();
+
+ //! Converts a TSharedMutableRef to TMutableRef.
+ operator TMutableRef() const;
+
+ //! Converts a TSharedMutableRef to TSharedRef.
+ operator TSharedRef() const;
+
+ //! Converts a TSharedMutableRef to TRef.
+ operator TRef() const;
+
+
+ //! Allocates a new shared block of memory.
+ //! The memory is marked with a given tag.
+ template <class TTag>
+ static TSharedMutableRef Allocate(size_t size, bool initializeStorage = true);
+
+ //! Allocates a new shared block of memory.
+ //! The memory is marked with TDefaultSharedBlobTag.
+ static TSharedMutableRef Allocate(size_t size, bool initializeStorage = true);
+
+ //! Allocates a new shared block of memory.
+ //! The memory is marked with a given tag.
+ static TSharedMutableRef Allocate(size_t size, bool initializeStorage, TRefCountedTypeCookie tagCookie);
+
+ //! Allocates a new page aligned shared block of memory.
+ //! #size must be divisible by page size.
+ //! The memory is marked with a given tag.
+ template <class TTag>
+ static TSharedMutableRef AllocatePageAligned(size_t size, bool initializeStorage = true);
+
+ //! Allocates a new page aligned shared block of memory.
+ //! #size must be divisible by page size.
+ //! The memory is marked with TDefaultSharedBlobTag.
+ static TSharedMutableRef AllocatePageAligned(size_t size, bool initializeStorage = true);
+
+ //! Allocates a new page aligned shared block of memory.
+ //! #size must be divisible by page size.
+ //! The memory is marked with a given tag.
+ static TSharedMutableRef AllocatePageAligned(size_t size, bool initializeStorage, TRefCountedTypeCookie tagCookie);
+
+ //! Creates a TSharedMutableRef for the whole blob taking ownership of its content.
+ static TSharedMutableRef FromBlob(TBlob&& blob);
+
+ //! Creates a copy of a given TRef.
+ //! The memory is marked with a given tag.
+ static TSharedMutableRef MakeCopy(TRef ref, TRefCountedTypeCookie tagCookie);
+
+ //! Creates a copy of a given TRef.
+ //! The memory is marked with a given tag.
+ template <class TTag>
+ static TSharedMutableRef MakeCopy(TRef ref);
+
+ //! Creates a reference for a part of existing range.
+ TSharedMutableRef Slice(size_t startOffset, size_t endOffset) const;
+
+ //! Creates a reference for a part of existing range.
+ TSharedMutableRef Slice(void* begin, void* end) const;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+DECLARE_REFCOUNTED_CLASS(TSharedRefArrayImpl)
+
+//! A smart-pointer to a ref-counted immutable sequence of TSharedRef-s.
+class TSharedRefArray
+{
+public:
+ TSharedRefArray() = default;
+ TSharedRefArray(const TSharedRefArray& other);
+ TSharedRefArray(TSharedRefArray&& other) noexcept;
+
+ explicit TSharedRefArray(const TSharedRef& part);
+ explicit TSharedRefArray(TSharedRef&& part);
+
+ struct TCopyParts
+ { };
+ struct TMoveParts
+ { };
+
+ template <class TParts>
+ TSharedRefArray(const TParts& parts, TCopyParts);
+ template <class TParts>
+ TSharedRefArray(TParts&& parts, TMoveParts);
+
+ TSharedRefArray& operator = (const TSharedRefArray& other);
+ TSharedRefArray& operator = (TSharedRefArray&& other);
+
+ explicit operator bool() const;
+
+ void Reset();
+
+ size_t Size() const;
+ size_t size() const;
+ i64 ByteSize() const;
+ bool Empty() const;
+ const TSharedRef& operator [] (size_t index) const;
+
+ const TSharedRef* Begin() const;
+ const TSharedRef* End() const;
+
+ std::vector<TSharedRef> ToVector() const;
+
+private:
+ friend class TSharedRefArrayBuilder;
+
+ TSharedRefArrayImplPtr Impl_;
+
+ explicit TSharedRefArray(TSharedRefArrayImplPtr impl);
+
+ template <class... As>
+ static TSharedRefArrayImplPtr NewImpl(
+ size_t size,
+ size_t poolCapacity,
+ TRefCountedTypeCookie cookie,
+ As&&... args);
+};
+
+// STL interop.
+const TSharedRef* begin(const TSharedRefArray& array);
+const TSharedRef* end(const TSharedRefArray& array);
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct TDefaultSharedRefArrayBuilderTag { };
+
+//! A helper for creating TSharedRefArray.
+class TSharedRefArrayBuilder
+{
+public:
+ //! Creates a builder instance.
+ /*
+ * The user must provide the total (resulting) part count in #size.
+ *
+ * Additionally, the user may request a certain memory pool of size #poolCapacity
+ * to be created. Parts occupiying space in the above pool are created with #AllocateAndAdd
+ * calls.
+ *
+ * The pool (if any) and the array are created within a single memory allocation tagged with
+ * #tagCookie.
+ *
+ * If less than #size parts are added, the trailing ones are null.
+ */
+ explicit TSharedRefArrayBuilder(
+ size_t size,
+ size_t poolCapacity = 0,
+ TRefCountedTypeCookie tagCookie = GetRefCountedTypeCookie<TDefaultSharedRefArrayBuilderTag>());
+
+ //! Adds an existing TSharedRef part to the constructed array.
+ void Add(TSharedRef part);
+
+ //! Allocates #size memory from the pool and adds a part to the constuctured array.
+ /*!
+ * The resulting TMutableRef enables the user to fill the just-created part appropriately.
+ * The total sum of #size during all #AllocateAndAll calls must now exceed #allocationCapacity
+ * passed to the ctor.
+ *
+ * The memory is being claimed from the pool contiguously; the user must
+ * take care of the alignment issues on its own.
+ */
+ TMutableRef AllocateAndAdd(size_t size);
+
+ //! Finalizes the construction; returns the constructed TSharedRefArray.
+ TSharedRefArray Finish();
+
+private:
+ const size_t AllocationCapacity_;
+ TSharedRefArrayImplPtr Impl_;
+ char* CurrentAllocationPtr_;
+ size_t CurrentPartIndex_ = 0;
+};
+
+
+////////////////////////////////////////////////////////////////////////////////
+
+TString ToString(TRef ref);
+TString ToString(const TMutableRef& ref);
+TString ToString(const TSharedRef& ref);
+TString ToString(const TSharedMutableRef& ref);
+
+size_t GetPageSize();
+size_t RoundUpToPage(size_t bytes);
+
+size_t GetByteSize(TRef ref);
+size_t GetByteSize(const TSharedRefArray& array);
+template <class T>
+size_t GetByteSize(TRange<T> parts);
+template <class T>
+size_t GetByteSize(const std::vector<T>& parts);
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+#define REF_INL_H_
+#include "ref-inl.h"
+#undef REF_INL_H_
diff --git a/library/cpp/yt/memory/ref_counted-inl.h b/library/cpp/yt/memory/ref_counted-inl.h
new file mode 100644
index 0000000000..e6d64fec18
--- /dev/null
+++ b/library/cpp/yt/memory/ref_counted-inl.h
@@ -0,0 +1,278 @@
+#ifndef REF_COUNTED_INL_H_
+#error "Direct inclusion of this file is not allowed, include ref_counted.h"
+// For the sake of sane code completion.
+#include "ref_counted.h"
+#endif
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+constexpr uint16_t PtrBits = 48;
+constexpr uintptr_t PtrMask = (1ULL << PtrBits) - 1;
+
+template <class T>
+Y_FORCE_INLINE char* PackPointer(T* ptr, uint16_t data)
+{
+ return reinterpret_cast<char*>((static_cast<uintptr_t>(data) << PtrBits) | reinterpret_cast<uintptr_t>(ptr));
+}
+
+template <class T>
+struct TPackedPointer
+{
+ uint16_t Data;
+ T* Ptr;
+};
+
+template <class T>
+Y_FORCE_INLINE TPackedPointer<T> UnpackPointer(void* packedPtr)
+{
+ auto castedPtr = reinterpret_cast<uintptr_t>(packedPtr);
+ return {static_cast<uint16_t>(castedPtr >> PtrBits), reinterpret_cast<T*>(castedPtr & PtrMask)};
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class = void>
+struct TMemoryReleaser
+{
+ static void Do(void* ptr, uint16_t /*offset*/)
+ {
+ TFreeMemory<T>::Do(ptr);
+ }
+};
+
+using TDeleter = void (*)(void*);
+
+void ScheduleObjectDeletion(void* ptr, TDeleter deleter);
+
+template <class T>
+struct TMemoryReleaser<T, std::enable_if_t<T::EnableHazard>>
+{
+ static void Do(void* ptr, uint16_t offset)
+ {
+ // Base pointer is used in HazardPtr as the identity of object.
+ auto* basePtr = PackPointer(static_cast<char*>(ptr) + offset, offset);
+
+ ScheduleObjectDeletion(basePtr, [] (void* ptr) {
+ // Base ptr and the beginning of allocated memory region may differ.
+ auto [offset, basePtr] = UnpackPointer<char>(ptr);
+ TFreeMemory<T>::Do(basePtr - offset);
+ });
+ }
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE int TRefCounter::GetRefCount() const noexcept
+{
+ return StrongCount_.load(std::memory_order_relaxed);
+}
+
+Y_FORCE_INLINE void TRefCounter::Ref() const noexcept
+{
+ // It is safe to use relaxed here, since new reference is always created from another live reference.
+ StrongCount_.fetch_add(1, std::memory_order_relaxed);
+
+ YT_ASSERT(WeakCount_.load(std::memory_order_relaxed) > 0);
+}
+
+Y_FORCE_INLINE bool TRefCounter::TryRef() const noexcept
+{
+ auto value = StrongCount_.load(std::memory_order_relaxed);
+ YT_ASSERT(WeakCount_.load(std::memory_order_relaxed) > 0);
+
+ while (value != 0 && !StrongCount_.compare_exchange_weak(value, value + 1));
+ return value != 0;
+}
+
+Y_FORCE_INLINE bool TRefCounter::Unref() const
+{
+ // We must properly synchronize last access to object with it destruction.
+ // Otherwise compiler might reorder access to object past this decrement.
+ //
+ // See http://www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html#boost_atomic.usage_examples.example_reference_counters
+ //
+ auto oldStrongCount = StrongCount_.fetch_sub(1, std::memory_order_release);
+ YT_ASSERT(oldStrongCount > 0);
+ if (oldStrongCount == 1) {
+ StrongCount_.load(std::memory_order_acquire);
+ return true;
+ } else {
+ return false;
+ }
+}
+
+Y_FORCE_INLINE int TRefCounter::GetWeakRefCount() const noexcept
+{
+ return WeakCount_.load(std::memory_order_acquire);
+}
+
+Y_FORCE_INLINE void TRefCounter::WeakRef() const noexcept
+{
+ auto oldWeakCount = WeakCount_.fetch_add(1, std::memory_order_relaxed);
+ YT_ASSERT(oldWeakCount > 0);
+}
+
+Y_FORCE_INLINE bool TRefCounter::WeakUnref() const
+{
+ auto oldWeakCount = WeakCount_.fetch_sub(1, std::memory_order_release);
+ YT_ASSERT(oldWeakCount > 0);
+ if (oldWeakCount == 1) {
+ WeakCount_.load(std::memory_order_acquire);
+ return true;
+ } else {
+ return false;
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, bool = std::is_base_of_v<TRefCountedBase, T>>
+struct TRefCountedHelper
+{
+ static_assert(
+ std::is_final_v<T>,
+ "Ref-counted objects must be derived from TRefCountedBase or to be final");
+
+ static constexpr size_t RefCounterSpace = (sizeof(TRefCounter) + alignof(T) - 1) & ~(alignof(T) - 1);
+ static constexpr size_t RefCounterOffset = RefCounterSpace - sizeof(TRefCounter);
+
+ Y_FORCE_INLINE static const TRefCounter* GetRefCounter(const T* obj)
+ {
+ return reinterpret_cast<const TRefCounter*>(obj) - 1;
+ }
+
+ Y_FORCE_INLINE static void Destroy(const T* obj)
+ {
+ auto* refCounter = GetRefCounter(obj);
+
+ // No virtual call when T is final.
+ obj->~T();
+
+ char* ptr = reinterpret_cast<char*>(const_cast<TRefCounter*>(refCounter));
+
+ // Fast path. Weak refs cannot appear if there are neither strong nor weak refs.
+ if (refCounter->GetWeakRefCount() == 1) {
+ TMemoryReleaser<T>::Do(ptr - RefCounterOffset, RefCounterSpace);
+ return;
+ }
+
+ if (refCounter->WeakUnref()) {
+ TMemoryReleaser<T>::Do(ptr - RefCounterOffset, RefCounterSpace);
+ }
+ }
+
+ Y_FORCE_INLINE static void Deallocate(const T* obj)
+ {
+ char* ptr = reinterpret_cast<char*>(const_cast<TRefCounter*>(GetRefCounter(obj)));
+ TMemoryReleaser<T>::Do(ptr - RefCounterOffset, RefCounterSpace);
+ }
+};
+
+template <class T>
+struct TRefCountedHelper<T, true>
+{
+ Y_FORCE_INLINE static const TRefCounter* GetRefCounter(const T* obj)
+ {
+ return obj;
+ }
+
+ Y_FORCE_INLINE static void Destroy(const TRefCountedBase* obj)
+ {
+ const_cast<TRefCountedBase*>(obj)->DestroyRefCounted();
+ }
+
+ Y_FORCE_INLINE static void Deallocate(const TRefCountedBase* obj)
+ {
+ auto* ptr = reinterpret_cast<void**>(const_cast<TRefCountedBase*>(obj));
+ auto [offset, ptrToDeleter] = UnpackPointer<void(void*, uint16_t)>(*ptr);
+
+ // The most derived type is erased here. So we cannot call TMemoryReleaser with derived type.
+ ptrToDeleter(reinterpret_cast<char*>(ptr) - offset, offset);
+ }
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+Y_FORCE_INLINE const TRefCounter* GetRefCounter(const T* obj)
+{
+ return TRefCountedHelper<T>::GetRefCounter(obj);
+}
+
+template <class T>
+Y_FORCE_INLINE void DestroyRefCounted(const T* obj)
+{
+ TRefCountedHelper<T>::Destroy(obj);
+}
+
+template <class T>
+Y_FORCE_INLINE void DeallocateRefCounted(const T* obj)
+{
+ TRefCountedHelper<T>::Deallocate(obj);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+Y_FORCE_INLINE void Ref(T* obj)
+{
+ GetRefCounter(obj)->Ref();
+}
+
+template <class T>
+Y_FORCE_INLINE void Unref(T* obj)
+{
+ if (GetRefCounter(obj)->Unref()) {
+ DestroyRefCounted(obj);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_FORCE_INLINE void TRefCounted::Unref() const
+{
+ ::NYT::Unref(this);
+}
+
+Y_FORCE_INLINE void TRefCounted::WeakUnref() const
+{
+ if (TRefCounter::WeakUnref()) {
+ DeallocateRefCounted(this);
+ }
+}
+
+
+template <class T>
+void TRefCounted::DestroyRefCountedImpl(T* ptr)
+{
+ // No standard way to statically calculate the base offset even if T is final.
+ // static_cast<TFinalDerived*>(virtualBasePtr) does not work.
+
+ auto* basePtr = static_cast<TRefCountedBase*>(ptr);
+ auto offset = reinterpret_cast<uintptr_t>(basePtr) - reinterpret_cast<uintptr_t>(ptr);
+ auto* refCounter = GetRefCounter(ptr);
+
+ // No virtual call when T is final.
+ ptr->~T();
+
+ // Fast path. Weak refs cannot appear if there are neither strong nor weak refs.
+ if (refCounter->GetWeakRefCount() == 1) {
+ TMemoryReleaser<T>::Do(ptr, offset);
+ return;
+ }
+
+ YT_ASSERT(offset < std::numeric_limits<uint16_t>::max());
+
+ auto* vTablePtr = reinterpret_cast<char**>(basePtr);
+ *vTablePtr = PackPointer(&TMemoryReleaser<T>::Do, offset);
+
+ if (refCounter->WeakUnref()) {
+ TMemoryReleaser<T>::Do(ptr, offset);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/ref_counted.h b/library/cpp/yt/memory/ref_counted.h
new file mode 100644
index 0000000000..b683615b83
--- /dev/null
+++ b/library/cpp/yt/memory/ref_counted.h
@@ -0,0 +1,190 @@
+#pragma once
+
+#include <library/cpp/yt/misc/port.h>
+
+#include <library/cpp/yt/assert/assert.h>
+
+#include <library/cpp/ytalloc/api/ytalloc.h>
+
+#include <atomic>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! A technical base class for ref-counted objects and promise states.
+class TRefCountedBase
+{
+public:
+ TRefCountedBase() = default;
+
+ // Make destructor protected
+ virtual ~TRefCountedBase() noexcept = default;
+
+ virtual void DestroyRefCounted() = 0;
+
+private:
+ TRefCountedBase(const TRefCountedBase&) = delete;
+ TRefCountedBase(TRefCountedBase&&) = delete;
+
+ TRefCountedBase& operator=(const TRefCountedBase&) = delete;
+ TRefCountedBase& operator=(TRefCountedBase&&) = delete;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, class = void>
+struct TFreeMemory
+{
+ static void Do(void* ptr)
+ {
+ NYTAlloc::FreeNonNull(ptr);
+ }
+};
+
+template <class T>
+struct TFreeMemory<T, std::void_t<typename T::TAllocator>>
+{
+ static void Do(void* ptr)
+ {
+ using TAllocator = typename T::TAllocator;
+ TAllocator::Free(ptr);
+ }
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TRefCounter
+{
+public:
+ //! Returns current number of strong references to the object.
+ /*!
+ * Note that you should never ever use this method in production code.
+ * This method is mainly for debugging purposes.
+ */
+ int GetRefCount() const noexcept;
+
+ //! Increments the strong reference counter.
+ void Ref() const noexcept;
+
+ //! Increments the strong reference counter if it is not null.
+ bool TryRef() const noexcept;
+
+ //! Decrements the strong reference counter.
+ bool Unref() const;
+
+ //! Returns current number of weak references to the object.
+ int GetWeakRefCount() const noexcept;
+
+ //! Increments the weak reference counter.
+ void WeakRef() const noexcept;
+
+ //! Decrements the weak reference counter.
+ bool WeakUnref() const;
+
+private:
+ mutable std::atomic<int> StrongCount_ = 1;
+ mutable std::atomic<int> WeakCount_ = 1;
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+const TRefCounter* GetRefCounter(const T* obj);
+
+template <class T>
+void DestroyRefCounted(const T* obj);
+
+template <class T>
+void DeallocateRefCounted(const T* obj);
+
+////////////////////////////////////////////////////////////////////////////////
+
+// API
+
+template <class T>
+void Ref(T* obj);
+
+template <class T>
+void Unref(T* obj);
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct TRefCounted
+ : public TRefCountedBase
+ , public TRefCounter
+{
+ void Unref() const;
+
+ void WeakUnref() const;
+
+ template <class T>
+ static void DestroyRefCountedImpl(T* ptr);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+// Forward declaration.
+template <class T>
+class TIntrusivePtr;
+
+using TRefCountedPtr = TIntrusivePtr<TRefCounted>;
+
+// A bunch of helpful macros that enable working with intrusive pointers to incomplete types.
+/*
+ * Typically when you have a forward-declared type |T| and an instance
+ * of |TIntrusivePtr<T>| you need the complete definition of |T| to work with
+ * the pointer even if you're not actually using the members of |T|.
+ * E.g. the dtor of |TIntrusivePtr<T>|, should you ever need it, must be able
+ * to unref an instance of |T| and eventually destroy it.
+ * This may force #inclusion of way more headers than really seems necessary.
+ *
+ * |DECLARE_REFCOUNTED_STRUCT|, |DECLARE_REFCOUNTED_CLASS|, and |DEFINE_REFCOUNTED_TYPE|
+ * alleviate this issue by forcing TIntrusivePtr to work with the free-standing overloads
+ * of |Ref| and |Unref| instead of their template version.
+ * These overloads are declared together with the forward declaration of |T| and
+ * are subsequently defined afterwards.
+ */
+
+#define DECLARE_REFCOUNTED_TYPE(type) \
+ using type ## Ptr = ::NYT::TIntrusivePtr<type>; \
+ \
+ [[maybe_unused]] ATTRIBUTE_USED const ::NYT::TRefCounter* GetRefCounter(const type* obj); \
+ [[maybe_unused]] ATTRIBUTE_USED void DestroyRefCounted(const type* obj); \
+ [[maybe_unused]] ATTRIBUTE_USED void DeallocateRefCounted(const type* obj);
+
+//! Forward-declares a class type, defines an intrusive pointer for it, and finally
+//! declares Ref/Unref overloads. Use this macro in |public.h|-like files.
+#define DECLARE_REFCOUNTED_CLASS(type) \
+ class type; \
+ DECLARE_REFCOUNTED_TYPE(type)
+
+//! Forward-declares a struct type, defines an intrusive pointer for it, and finally
+//! declares Ref/Unref overloads. Use this macro in |public.h|-like files.
+#define DECLARE_REFCOUNTED_STRUCT(type) \
+ struct type; \
+ DECLARE_REFCOUNTED_TYPE(type)
+
+//! Provides implementations for Ref/Unref overloads. Use this macro right
+//! after the type's full definition.
+#define DEFINE_REFCOUNTED_TYPE(type) \
+ [[maybe_unused]] ATTRIBUTE_USED Y_FORCE_INLINE const ::NYT::TRefCounter* GetRefCounter(const type* obj) \
+ { \
+ return ::NYT::TRefCountedHelper<type>::GetRefCounter(obj); \
+ } \
+ [[maybe_unused]] ATTRIBUTE_USED Y_FORCE_INLINE void DestroyRefCounted(const type* obj) \
+ { \
+ ::NYT::TRefCountedHelper<type>::Destroy(obj); \
+ } \
+ [[maybe_unused]] ATTRIBUTE_USED Y_FORCE_INLINE void DeallocateRefCounted(const type* obj) \
+ { \
+ ::NYT::TRefCountedHelper<type>::Deallocate(obj); \
+ }
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+#define REF_COUNTED_INL_H_
+#include "ref_counted-inl.h"
+#undef REF_COUNTED_INL_H_
diff --git a/library/cpp/yt/memory/ref_tracked-inl.h b/library/cpp/yt/memory/ref_tracked-inl.h
new file mode 100644
index 0000000000..4bde72881d
--- /dev/null
+++ b/library/cpp/yt/memory/ref_tracked-inl.h
@@ -0,0 +1,49 @@
+#ifndef REF_TRACKED_INL_H_
+#error "Direct inclusion of this file is not allowed, include ref_tracked.h"
+// For the sake of sane code completion.
+#include "ref_tracked.h"
+#endif
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+TRefCountedTypeKey GetRefCountedTypeKey()
+{
+ return &typeid(T);
+}
+
+template <class T>
+Y_FORCE_INLINE TRefCountedTypeCookie GetRefCountedTypeCookie()
+{
+ static std::atomic<TRefCountedTypeCookie> cookie{NullRefCountedTypeCookie};
+ auto cookieValue = cookie.load(std::memory_order_relaxed);
+ if (Y_UNLIKELY(cookieValue == NullRefCountedTypeCookie)) {
+ cookieValue = TRefCountedTrackerFacade::GetCookie(
+ GetRefCountedTypeKey<T>(),
+ sizeof(T),
+ NYT::TSourceLocation());
+ cookie.store(cookieValue, std::memory_order_relaxed);
+ }
+ return cookieValue;
+}
+
+template <class T, class TTag, int Counter>
+Y_FORCE_INLINE TRefCountedTypeCookie GetRefCountedTypeCookieWithLocation(const TSourceLocation& location)
+{
+ static std::atomic<TRefCountedTypeCookie> cookie{NullRefCountedTypeCookie};
+ auto cookieValue = cookie.load(std::memory_order_relaxed);
+ if (Y_UNLIKELY(cookieValue == NullRefCountedTypeCookie)) {
+ cookieValue = TRefCountedTrackerFacade::GetCookie(
+ GetRefCountedTypeKey<T>(),
+ sizeof(T),
+ location);
+ cookie.store(cookieValue, std::memory_order_relaxed);
+ }
+ return cookieValue;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/ref_tracked.cpp b/library/cpp/yt/memory/ref_tracked.cpp
new file mode 100644
index 0000000000..4dafbc0849
--- /dev/null
+++ b/library/cpp/yt/memory/ref_tracked.cpp
@@ -0,0 +1,38 @@
+#include "ref_tracked.h"
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+Y_WEAK TRefCountedTypeCookie TRefCountedTrackerFacade::GetCookie(
+ TRefCountedTypeKey /*typeKey*/,
+ size_t /*instanceSize*/,
+ const TSourceLocation& /*location*/)
+{
+ return NullRefCountedTypeCookie;
+}
+
+Y_WEAK void TRefCountedTrackerFacade::AllocateInstance(TRefCountedTypeCookie /*cookie*/)
+{ }
+
+Y_WEAK void TRefCountedTrackerFacade::FreeInstance(TRefCountedTypeCookie /*cookie*/)
+{ }
+
+Y_WEAK void TRefCountedTrackerFacade::AllocateTagInstance(TRefCountedTypeCookie /*cookie*/)
+{ }
+
+Y_WEAK void TRefCountedTrackerFacade::FreeTagInstance(TRefCountedTypeCookie /*cookie*/)
+{ }
+
+Y_WEAK void TRefCountedTrackerFacade::AllocateSpace(TRefCountedTypeCookie /*cookie*/, size_t /*size*/)
+{ }
+
+Y_WEAK void TRefCountedTrackerFacade::FreeSpace(TRefCountedTypeCookie /*cookie*/, size_t /*size*/)
+{ }
+
+Y_WEAK void TRefCountedTrackerFacade::Dump()
+{ }
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/ref_tracked.h b/library/cpp/yt/memory/ref_tracked.h
new file mode 100644
index 0000000000..75c1eb5985
--- /dev/null
+++ b/library/cpp/yt/memory/ref_tracked.h
@@ -0,0 +1,111 @@
+#pragma once
+
+#include <library/cpp/yt/misc/port.h>
+#include <library/cpp/yt/misc/source_location.h>
+
+#include <util/system/defaults.h>
+
+#include <atomic>
+#include <typeinfo>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+using TRefCountedTypeCookie = int;
+const int NullRefCountedTypeCookie = -1;
+
+using TRefCountedTypeKey = const void*;
+
+////////////////////////////////////////////////////////////////////////////////
+
+// Used to avoid including heavy ref_counted_tracker.h
+class TRefCountedTrackerFacade
+{
+public:
+ static TRefCountedTypeCookie GetCookie(
+ TRefCountedTypeKey typeKey,
+ size_t instanceSize,
+ const NYT::TSourceLocation& location);
+
+ static void AllocateInstance(TRefCountedTypeCookie cookie);
+ static void FreeInstance(TRefCountedTypeCookie cookie);
+
+ static void AllocateTagInstance(TRefCountedTypeCookie cookie);
+ static void FreeTagInstance(TRefCountedTypeCookie cookie);
+
+ static void AllocateSpace(TRefCountedTypeCookie cookie, size_t size);
+ static void FreeSpace(TRefCountedTypeCookie cookie, size_t size);
+
+ // Typically invoked from GDB console.
+ // Dumps the ref-counted statistics sorted by "bytes alive".
+ static void Dump();
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+namespace {
+
+//! A per-translation unit tag type.
+struct TCurrentTranslationUnitTag
+{ };
+
+} // namespace
+
+template <class T>
+TRefCountedTypeKey GetRefCountedTypeKey();
+
+template <class T>
+TRefCountedTypeCookie GetRefCountedTypeCookie();
+
+template <class T, class TTag, int Counter>
+TRefCountedTypeCookie GetRefCountedTypeCookieWithLocation(
+ const TSourceLocation& location);
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! A lightweight mix-in that integrates any class into TRefCountedTracker statistics.
+/*!
+ * |T| must be the actual derived type.
+ *
+ * This mix-in provides statistical tracking only, |T| is responsible for implementing
+ * lifetime management on its own.
+ */
+template <class T>
+class TRefTracked
+{
+public:
+#ifdef YT_ENABLE_REF_COUNTED_TRACKING
+ TRefTracked()
+ {
+ auto cookie = GetRefCountedTypeCookie<T>();
+ TRefCountedTrackerFacade::AllocateInstance(cookie);
+ }
+
+ TRefTracked(const TRefTracked&)
+ {
+ auto cookie = GetRefCountedTypeCookie<T>();
+ TRefCountedTrackerFacade::AllocateInstance(cookie);
+ }
+
+ TRefTracked(TRefTracked&&)
+ {
+ auto cookie = GetRefCountedTypeCookie<T>();
+ TRefCountedTrackerFacade::AllocateInstance(cookie);
+ }
+
+ ~TRefTracked()
+ {
+ auto cookie = GetRefCountedTypeCookie<T>();
+ TRefCountedTrackerFacade::FreeInstance(cookie);
+ }
+#endif
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+#define REF_TRACKED_INL_H_
+#include "ref_tracked-inl.h"
+#undef REF_TRACKED_INL_H_
diff --git a/library/cpp/yt/memory/shared_range.h b/library/cpp/yt/memory/shared_range.h
new file mode 100644
index 0000000000..9841d7a0df
--- /dev/null
+++ b/library/cpp/yt/memory/shared_range.h
@@ -0,0 +1,297 @@
+#pragma once
+
+#include "intrusive_ptr.h"
+#include "range.h"
+#include "ref_counted.h"
+
+#include <library/cpp/yt/assert/assert.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T, size_t N>
+class TCompactVector;
+
+//! TRange with ownership semantics.
+template <class T>
+class TSharedRange
+ : public TRange<T>
+{
+public:
+ using THolderPtr = TRefCountedPtr;
+
+ //! Constructs a null TSharedRange.
+ TSharedRange()
+ { }
+
+ //! Constructs a TSharedRange from TRange.
+ TSharedRange(TRange<T> range, THolderPtr holder)
+ : TRange<T>(range)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedRange from a pointer and length.
+ TSharedRange(const T* data, size_t length, THolderPtr holder)
+ : TRange<T>(data, length)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedRange from a range.
+ TSharedRange(const T* begin, const T* end, THolderPtr holder)
+ : TRange<T>(begin, end)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedRange from a TCompactVector.
+ template <size_t N>
+ TSharedRange(const TCompactVector<T, N>& elements, THolderPtr holder)
+ : TRange<T>(elements)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedRange from an std::vector.
+ TSharedRange(const std::vector<T>& elements, THolderPtr holder)
+ : TRange<T>(elements)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedRange from a C array.
+ template <size_t N>
+ TSharedRange(const T (& elements)[N], THolderPtr holder)
+ : TRange<T>(elements)
+ , Holder_(std::move(holder))
+ { }
+
+
+ void Reset()
+ {
+ TRange<T>::Data_ = nullptr;
+ TRange<T>::Length_ = 0;
+ Holder_.Reset();
+ }
+
+ TSharedRange<T> Slice(size_t startOffset, size_t endOffset) const
+ {
+ YT_ASSERT(startOffset <= this->Size());
+ YT_ASSERT(endOffset >= startOffset && endOffset <= this->Size());
+ return TSharedRange<T>(this->Begin() + startOffset, endOffset - startOffset, Holder_);
+ }
+
+ TSharedRange<T> Slice(const T* begin, const T* end) const
+ {
+ YT_ASSERT(begin >= this->Begin());
+ YT_ASSERT(end <= this->End());
+ return TSharedRange<T>(begin, end, Holder_);
+ }
+
+ const THolderPtr& GetHolder() const
+ {
+ return Holder_;
+ }
+
+ THolderPtr&& ReleaseHolder()
+ {
+ return std::move(Holder_);
+ }
+
+protected:
+ THolderPtr Holder_;
+
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Constructs a combined holder instance by taking ownership of a given list of holders.
+template <class... THolders>
+TRefCountedPtr MakeCompositeHolder(THolders&&... holders)
+{
+ struct THolder
+ : public TRefCounted
+ {
+ std::tuple<typename std::decay<THolders>::type...> Holders;
+ };
+
+ auto holder = New<THolder>();
+ holder->Holders = std::tuple<THolders...>(std::forward<THolders>(holders)...);
+ return holder;
+}
+
+template <class T, class TContainer, class... THolders>
+TSharedRange<T> DoMakeSharedRange(TContainer&& elements, THolders&&... holders)
+{
+ struct THolder
+ : public TRefCounted
+ {
+ typename std::decay<TContainer>::type Elements;
+ std::tuple<typename std::decay<THolders>::type...> Holders;
+ };
+
+ auto holder = New<THolder>();
+ holder->Holders = std::tuple<THolders...>(std::forward<THolders>(holders)...);
+ holder->Elements = std::forward<TContainer>(elements);
+
+ auto range = MakeRange<T>(holder->Elements);
+
+ return TSharedRange<T>(range, std::move(holder));
+}
+
+//! Constructs a TSharedRange by taking ownership of an std::vector.
+template <class T, class... THolders>
+TSharedRange<T> MakeSharedRange(std::vector<T>&& elements, THolders&&... holders)
+{
+ return DoMakeSharedRange<T>(std::move(elements), std::forward<THolders>(holders)...);
+}
+
+//! Constructs a TSharedRange by taking ownership of an TCompactVector.
+template <class T, size_t N, class... THolders>
+TSharedRange<T> MakeSharedRange(TCompactVector<T, N>&& elements, THolders&&... holders)
+{
+ return DoMakeSharedRange<T>(std::move(elements), std::forward<THolders>(holders)...);
+}
+
+//! Constructs a TSharedRange by copying an std::vector.
+template <class T, class... THolders>
+TSharedRange<T> MakeSharedRange(const std::vector<T>& elements, THolders&&... holders)
+{
+ return DoMakeSharedRange<T>(elements, std::forward<THolders>(holders)...);
+}
+
+template <class T, class... THolders>
+TSharedRange<T> MakeSharedRange(TRange<T> range, THolders&&... holders)
+{
+ return TSharedRange<T>(range, MakeCompositeHolder(std::forward<THolders>(holders)...));
+}
+
+template <class T, class THolder>
+TSharedRange<T> MakeSharedRange(TRange<T> range, TIntrusivePtr<THolder> holder)
+{
+ return TSharedRange<T>(range, std::move(holder));
+}
+
+template <class U, class T>
+TSharedRange<U> ReinterpretCastRange(const TSharedRange<T>& range)
+{
+ static_assert(sizeof(T) == sizeof(U), "T and U must have equal sizes.");
+ return TSharedRange<U>(reinterpret_cast<const U*>(range.Begin()), range.Size(), range.GetHolder());
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! TMutableRange with ownership semantics.
+//! Use with caution :)
+template <class T>
+class TSharedMutableRange
+ : public TMutableRange<T>
+{
+public:
+ using THolderPtr = TRefCountedPtr;
+
+ //! Constructs a null TSharedMutableRange.
+ TSharedMutableRange()
+ { }
+
+ //! Constructs a TSharedMutableRange from TMutableRange.
+ TSharedMutableRange(TMutableRange<T> range, THolderPtr holder)
+ : TMutableRange<T>(range)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedMutableRange from a pointer and length.
+ TSharedMutableRange(T* data, size_t length, THolderPtr holder)
+ : TMutableRange<T>(data, length)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedMutableRange from a range.
+ TSharedMutableRange(T* begin, T* end, THolderPtr holder)
+ : TMutableRange<T>(begin, end)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedMutableRange from a TCompactVector.
+ template <size_t N>
+ TSharedMutableRange(TCompactVector<T, N>& elements, THolderPtr holder)
+ : TMutableRange<T>(elements)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedMutableRange from an std::vector.
+ TSharedMutableRange(std::vector<T>& elements, THolderPtr holder)
+ : TMutableRange<T>(elements)
+ , Holder_(std::move(holder))
+ { }
+
+ //! Constructs a TSharedMutableRange from a C array.
+ template <size_t N>
+ TSharedMutableRange(T (& elements)[N], THolderPtr holder)
+ : TMutableRange<T>(elements)
+ , Holder_(std::move(holder))
+ { }
+
+
+ void Reset()
+ {
+ TRange<T>::Data_ = nullptr;
+ TRange<T>::Length_ = 0;
+ Holder_.Reset();
+ }
+
+ TSharedMutableRange<T> Slice(size_t startOffset, size_t endOffset) const
+ {
+ YT_ASSERT(startOffset <= this->Size());
+ YT_ASSERT(endOffset >= startOffset && endOffset <= this->Size());
+ return TSharedMutableRange<T>(this->Begin() + startOffset, endOffset - startOffset, Holder_);
+ }
+
+ TSharedMutableRange<T> Slice(T* begin, T* end) const
+ {
+ YT_ASSERT(begin >= this->Begin());
+ YT_ASSERT(end <= this->End());
+ return TSharedMutableRange<T>(begin, end, Holder_);
+ }
+
+ THolderPtr GetHolder() const
+ {
+ return Holder_;
+ }
+
+ THolderPtr&& ReleaseHolder()
+ {
+ return std::move(Holder_);
+ }
+
+protected:
+ THolderPtr Holder_;
+
+};
+
+template <class T, class TContainer, class... THolders>
+TSharedMutableRange<T> DoMakeSharedMutableRange(TContainer&& elements, THolders&&... holders)
+{
+ struct THolder
+ : public TRefCounted
+ {
+ typename std::decay<TContainer>::type Elements;
+ std::tuple<typename std::decay<THolders>::type...> Holders;
+ };
+
+ auto holder = New<THolder>();
+ holder->Holders = std::tuple<THolders...>(std::forward<THolders>(holders)...);
+ holder->Elements = std::forward<TContainer>(elements);
+
+ auto range = TMutableRange<T>(holder->Elements);
+
+ return TSharedMutableRange<T>(range, holder);
+}
+
+//! Constructs a TSharedMutableRange by taking ownership of an std::vector.
+template <class T, class... THolders>
+TSharedMutableRange<T> MakeSharedMutableRange(std::vector<T>&& elements, THolders&&... holders)
+{
+ return DoMakeSharedMutableRange<T>(std::move(elements), std::forward<THolders>(holders)...);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
diff --git a/library/cpp/yt/memory/unittests/intrusive_ptr_ut.cpp b/library/cpp/yt/memory/unittests/intrusive_ptr_ut.cpp
new file mode 100644
index 0000000000..622bed0eb0
--- /dev/null
+++ b/library/cpp/yt/memory/unittests/intrusive_ptr_ut.cpp
@@ -0,0 +1,562 @@
+#include <library/cpp/testing/gtest/gtest.h>
+
+#include <library/cpp/yt/memory/new.h>
+#include <library/cpp/yt/memory/ref_counted.h>
+
+namespace NYT {
+namespace {
+
+////////////////////////////////////////////////////////////////////////////////
+
+using ::testing::IsNull;
+using ::testing::NotNull;
+using ::testing::InSequence;
+using ::testing::MockFunction;
+using ::testing::StrictMock;
+
+////////////////////////////////////////////////////////////////////////////////
+// Auxiliary types and functions.
+////////////////////////////////////////////////////////////////////////////////
+
+// This object tracks number of increments and decrements
+// to the reference counter (see traits specialization below).
+struct TIntricateObject
+ : private TNonCopyable
+{
+ int Increments = 0;
+ int Decrements = 0;
+ int Zeros = 0;
+
+ void Ref()
+ {
+ ++Increments;
+ }
+
+ void Unref()
+ {
+ ++Decrements;
+ if (Increments == Decrements) {
+ ++Zeros;
+ }
+ }
+};
+
+typedef TIntrusivePtr<TIntricateObject> TIntricateObjectPtr;
+
+void Ref(TIntricateObject* obj)
+{
+ obj->Ref();
+}
+
+void Unref(TIntricateObject* obj)
+{
+ obj->Unref();
+}
+
+MATCHER_P3(HasRefCounts, increments, decrements, zeros,
+ "Reference counter " \
+ "was incremented " + ::testing::PrintToString(increments) + " times, " +
+ "was decremented " + ::testing::PrintToString(decrements) + " times, " +
+ "vanished to zero " + ::testing::PrintToString(zeros) + " times")
+{
+ Y_UNUSED(result_listener);
+ return
+ arg.Increments == increments &&
+ arg.Decrements == decrements &&
+ arg.Zeros == zeros;
+}
+
+void PrintTo(const TIntricateObject& arg, ::std::ostream* os)
+{
+ *os << arg.Increments << " increments, "
+ << arg.Decrements << " decrements and "
+ << arg.Zeros << " times vanished";
+}
+
+// This is an object which creates intrusive pointers to the self
+// during its construction.
+class TObjectWithSelfPointers
+ : public TRefCounted
+{
+public:
+ explicit TObjectWithSelfPointers(IOutputStream* output)
+ : Output_(output)
+ {
+ *Output_ << "Cb";
+
+ for (int i = 0; i < 3; ++i) {
+ *Output_ << '!';
+ TIntrusivePtr<TObjectWithSelfPointers> ptr(this);
+ }
+
+ *Output_ << "Ca";
+ }
+
+ virtual ~TObjectWithSelfPointers()
+ {
+ *Output_ << 'D';
+ }
+
+private:
+ IOutputStream* const Output_;
+
+};
+
+// This is a simple object with simple reference counting.
+class TObjectWithSimpleRC
+ : public TRefCounted
+{
+public:
+ explicit TObjectWithSimpleRC(IOutputStream* output)
+ : Output_(output)
+ {
+ *Output_ << 'C';
+ }
+
+ virtual ~TObjectWithSimpleRC()
+ {
+ *Output_ << 'D';
+ }
+
+ void DoSomething()
+ {
+ *Output_ << '!';
+ }
+
+private:
+ IOutputStream* const Output_;
+
+};
+
+// This is a simple object with full-fledged reference counting.
+class TObjectWithFullRC
+ : public TRefCounted
+{
+public:
+ explicit TObjectWithFullRC(IOutputStream* output)
+ : Output_(output)
+ {
+ *Output_ << 'C';
+ }
+
+ virtual ~TObjectWithFullRC()
+ {
+ *Output_ << 'D';
+ }
+
+ void DoSomething()
+ {
+ *Output_ << '!';
+ }
+
+private:
+ IOutputStream* const Output_;
+
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+TEST(TIntrusivePtrTest, Empty)
+{
+ TIntricateObjectPtr emptyPointer;
+ EXPECT_EQ(nullptr, emptyPointer.Get());
+}
+
+TEST(TIntrusivePtrTest, Basic)
+{
+ TIntricateObject object;
+
+ EXPECT_THAT(object, HasRefCounts(0, 0, 0));
+
+ {
+ TIntricateObjectPtr owningPointer(&object);
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_EQ(&object, owningPointer.Get());
+ }
+
+ EXPECT_THAT(object, HasRefCounts(1, 1, 1));
+
+ {
+ TIntricateObjectPtr nonOwningPointer(&object, false);
+ EXPECT_THAT(object, HasRefCounts(1, 1, 1));
+ EXPECT_EQ(&object, nonOwningPointer.Get());
+ }
+
+ EXPECT_THAT(object, HasRefCounts(1, 2, 1));
+}
+
+TEST(TIntrusivePtrTest, ResetToNull)
+{
+ TIntricateObject object;
+ TIntricateObjectPtr ptr(&object);
+
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_EQ(&object, ptr.Get());
+
+ ptr.Reset();
+
+ EXPECT_THAT(object, HasRefCounts(1, 1, 1));
+ EXPECT_EQ(nullptr, ptr.Get());
+}
+
+TEST(TIntrusivePtrTest, ResetToOtherObject)
+{
+ TIntricateObject firstObject;
+ TIntricateObject secondObject;
+
+ TIntricateObjectPtr ptr(&firstObject);
+
+ EXPECT_THAT(firstObject, HasRefCounts(1, 0, 0));
+ EXPECT_THAT(secondObject, HasRefCounts(0, 0, 0));
+ EXPECT_EQ(&firstObject, ptr.Get());
+
+ ptr.Reset(&secondObject);
+
+ EXPECT_THAT(firstObject, HasRefCounts(1, 1, 1));
+ EXPECT_THAT(secondObject, HasRefCounts(1, 0, 0));
+ EXPECT_EQ(&secondObject, ptr.Get());
+}
+
+TEST(TIntrusivePtrTest, CopySemantics)
+{
+ TIntricateObject object;
+
+ TIntricateObjectPtr foo(&object);
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+
+ {
+ TIntricateObjectPtr bar(foo);
+ EXPECT_THAT(object, HasRefCounts(2, 0, 0));
+ EXPECT_EQ(&object, foo.Get());
+ EXPECT_EQ(&object, bar.Get());
+ }
+
+ EXPECT_THAT(object, HasRefCounts(2, 1, 0));
+
+ {
+ TIntricateObjectPtr bar;
+ bar = foo;
+
+ EXPECT_THAT(object, HasRefCounts(3, 1, 0));
+ EXPECT_EQ(&object, foo.Get());
+ EXPECT_EQ(&object, bar.Get());
+ }
+
+ EXPECT_THAT(object, HasRefCounts(3, 2, 0));
+}
+
+TEST(TIntrusivePtrTest, MoveSemantics)
+{
+ TIntricateObject object;
+
+ TIntricateObjectPtr foo(&object);
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+
+ {
+ TIntricateObjectPtr bar(std::move(foo));
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_THAT(foo.Get(), IsNull());
+ EXPECT_EQ(&object, bar.Get());
+ }
+
+ EXPECT_THAT(object, HasRefCounts(1, 1, 1));
+ foo.Reset(&object);
+ EXPECT_THAT(object, HasRefCounts(2, 1, 1));
+
+ {
+ TIntricateObjectPtr bar;
+ bar = std::move(foo);
+ EXPECT_THAT(object, HasRefCounts(2, 1, 1));
+ EXPECT_THAT(foo.Get(), IsNull());
+ EXPECT_EQ(&object, bar.Get());
+ }
+}
+
+TEST(TIntrusivePtrTest, Swap)
+{
+ TIntricateObject object;
+
+ TIntricateObjectPtr foo(&object);
+ TIntricateObjectPtr bar;
+
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_THAT(foo.Get(), NotNull());
+ EXPECT_THAT(bar.Get(), IsNull());
+
+ foo.Swap(bar);
+
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_THAT(foo.Get(), IsNull());
+ EXPECT_THAT(bar.Get(), NotNull());
+
+ foo.Swap(bar);
+
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_THAT(foo.Get(), NotNull());
+ EXPECT_THAT(bar.Get(), IsNull());
+}
+
+TEST(TIntrusivePtrTest, UpCast)
+{
+ //! This is a simple typical reference-counted object.
+ class TSimpleObject
+ : public TRefCounted
+ { };
+
+ //! This is a simple inherited reference-counted object.
+ class TAnotherObject
+ : public TSimpleObject
+ { };
+
+ auto foo = New<TSimpleObject>();
+ auto bar = New<TAnotherObject>();
+ auto baz = New<TAnotherObject>();
+
+ foo = baz;
+
+ EXPECT_TRUE(foo == baz);
+}
+
+TEST(TIntrusivePtrTest, DownCast)
+{
+ class TBaseObject
+ : public TRefCounted
+ { };
+
+ class TDerivedObject
+ : public TBaseObject
+ { };
+
+ //! This is a simple inherited reference-counted object.
+ class TAnotherObject
+ : public TBaseObject
+ { };
+
+ TIntrusivePtr<TBaseObject> foo = New<TDerivedObject>();
+ TIntrusivePtr<TBaseObject> bar = New<TAnotherObject>();
+ {
+ auto baz = StaticPointerCast<TDerivedObject>(foo);
+ EXPECT_TRUE(foo == baz);
+ }
+ {
+ auto baz = StaticPointerCast<TDerivedObject>(TIntrusivePtr<TBaseObject>{foo});
+ EXPECT_TRUE(foo == baz);
+ }
+ {
+ auto baz = DynamicPointerCast<TDerivedObject>(foo);
+ EXPECT_TRUE(foo == baz);
+ }
+ {
+ auto baz = DynamicPointerCast<TDerivedObject>(bar);
+ EXPECT_TRUE(nullptr == baz);
+ }
+ {
+ auto baz = ConstPointerCast<const TBaseObject>(foo);
+ EXPECT_TRUE(foo.Get() == baz.Get());
+ }
+ {
+ auto baz = ConstPointerCast<const TBaseObject>(TIntrusivePtr<TBaseObject>{foo});
+ EXPECT_TRUE(foo.Get() == baz.Get());
+ }
+}
+
+TEST(TIntrusivePtrTest, UnspecifiedBoolType)
+{
+ TIntricateObject object;
+
+ TIntricateObjectPtr foo;
+ TIntricateObjectPtr bar(&object);
+
+ EXPECT_FALSE(foo);
+ EXPECT_TRUE(bar);
+}
+
+TEST(TIntrusivePtrTest, ObjectIsNotDestroyedPrematurely)
+{
+ TStringStream output;
+ New<TObjectWithSelfPointers>(&output);
+
+ // TObject... appends symbols to the output; see definitions.
+ EXPECT_STREQ("Cb!!!CaD", output.Str().c_str());
+}
+
+TEST(TIntrusivePtrTest, EqualityOperator)
+{
+ TIntricateObject object, anotherObject;
+
+ TIntricateObjectPtr emptyPointer;
+ TIntricateObjectPtr somePointer(&object);
+ TIntricateObjectPtr samePointer(&object);
+ TIntricateObjectPtr anotherPointer(&anotherObject);
+
+ EXPECT_FALSE(somePointer == emptyPointer);
+ EXPECT_FALSE(samePointer == emptyPointer);
+
+ EXPECT_TRUE(somePointer != emptyPointer);
+ EXPECT_TRUE(samePointer != emptyPointer);
+
+ EXPECT_TRUE(somePointer == samePointer);
+
+ EXPECT_TRUE(&object == somePointer);
+ EXPECT_TRUE(&object == samePointer);
+
+ EXPECT_FALSE(somePointer == anotherPointer);
+ EXPECT_TRUE(somePointer != anotherPointer);
+
+ EXPECT_TRUE(&anotherObject == anotherPointer);
+}
+
+TEST(TIntrusivePtrTest, Reset)
+{
+ TIntricateObject object;
+ TIntricateObjectPtr pointer(&object);
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+ EXPECT_EQ(&object, pointer.Release());
+ EXPECT_THAT(object, HasRefCounts(1, 0, 0));
+}
+
+TEST(TIntrusivePtrTest, CompareWithNullptr)
+{
+ TIntricateObjectPtr pointer1;
+ EXPECT_TRUE(nullptr == pointer1);
+ EXPECT_FALSE(nullptr != pointer1);
+ TIntricateObject object;
+ TIntricateObjectPtr pointer2(&object);
+ EXPECT_TRUE(pointer2 != nullptr);
+ EXPECT_FALSE(pointer2 == nullptr);
+}
+
+
+template <class T>
+void TestIntrusivePtrBehavior()
+{
+ typedef TIntrusivePtr<T> TMyPtr;
+
+ TStringStream output;
+ {
+ TMyPtr ptr(New<T>(&output));
+ {
+ TMyPtr anotherPtr(ptr);
+ anotherPtr->DoSomething();
+ }
+ {
+ TMyPtr anotherPtr(ptr);
+ anotherPtr->DoSomething();
+ }
+ ptr->DoSomething();
+ }
+
+ // TObject... appends symbols to the output; see definitions.
+ EXPECT_STREQ("C!!!D", output.Str().c_str());
+}
+
+TEST(TIntrusivePtrTest, SimpleRCBehaviour)
+{
+ TestIntrusivePtrBehavior<TObjectWithSimpleRC>();
+}
+
+TEST(TIntrusivePtrTest, FullRCBehaviour)
+{
+ TestIntrusivePtrBehavior<TObjectWithFullRC>();
+}
+
+TEST(TIntrusivePtrTest, ObjectAlignment)
+{
+ struct TObject
+ : public TRefCounted
+ {
+ alignas(64) ui64 Data;
+ };
+
+ struct TPODObject final
+ {
+ alignas(64) ui64 Data;
+ };
+
+ auto foo = New<TObject>();
+ auto bar = New<TPODObject>();
+
+ EXPECT_TRUE(reinterpret_cast<uintptr_t>(foo.Get()) % 64 == 0);
+ EXPECT_TRUE(reinterpret_cast<uintptr_t>(bar.Get()) % 64 == 0);
+}
+
+TEST(TIntrusivePtrTest, InitStruct)
+{
+ struct TObj1 final
+ {
+ const int A;
+ const int B;
+ };
+
+ New<TObj1>(1, 2);
+
+ struct TExplicitObj final
+ {
+ explicit TExplicitObj(int a = 0)
+ : A(a)
+ { }
+
+ const int A;
+ };
+
+ New<TExplicitObj>();
+ New<TExplicitObj>(1);
+
+ struct TObj2 final
+ {
+ TObj2(i64 a = 0)
+ : A(a)
+ { }
+
+ const i64 A;
+ };
+
+ New<TObj2>(123);
+
+ struct TObj3 final
+ {
+ TObj3(ui64 a = 0)
+ : A(a)
+ { }
+
+ const ui64 A;
+ };
+
+ New<TObj3>(123);
+
+ struct TObj4 final
+ {
+ TObj4(int a, ui64 b = 0)
+ : A(a)
+ , B(b)
+ { }
+
+ int A;
+ const ui64 B;
+ };
+
+ New<TObj4>(123);
+ New<TObj4>(123, 123);
+
+ struct TObj5 final
+ {
+ TExplicitObj E;
+ int B;
+ };
+
+ New<TObj5>();
+
+ struct TObj6 final
+ {
+ TObj2 O;
+ int B;
+ };
+
+ New<TObj6>();
+ New<TObj6>(1, 2);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace
+} // namespace NYT
diff --git a/library/cpp/yt/memory/unittests/weak_ptr_ut.cpp b/library/cpp/yt/memory/unittests/weak_ptr_ut.cpp
new file mode 100644
index 0000000000..180c16b5ca
--- /dev/null
+++ b/library/cpp/yt/memory/unittests/weak_ptr_ut.cpp
@@ -0,0 +1,433 @@
+#include <library/cpp/testing/gtest/gtest.h>
+
+#include <library/cpp/yt/memory/new.h>
+#include <library/cpp/yt/memory/weak_ptr.h>
+
+#include <array>
+
+namespace NYT {
+namespace {
+
+using ::testing::IsNull;
+using ::testing::NotNull;
+using ::testing::InSequence;
+using ::testing::MockFunction;
+using ::testing::StrictMock;
+
+////////////////////////////////////////////////////////////////////////////////
+// Auxiliary types and functions.
+////////////////////////////////////////////////////////////////////////////////
+
+static int ConstructorShadowState = 0;
+static int DestructorShadowState = 0;
+
+void ResetShadowState()
+{
+ ConstructorShadowState = 0;
+ DestructorShadowState = 0;
+}
+
+class TIntricateObject
+ : public TRefCounted
+{
+public:
+ TIntricateObject()
+ {
+ ++ConstructorShadowState;
+ }
+
+ virtual ~TIntricateObject()
+ {
+ ++DestructorShadowState;
+ }
+
+ // Prevent the counter from destruction by holding an additional
+ // reference to the counter.
+ void LockCounter()
+ {
+ WeakRef();
+ }
+
+ // Release an additional reference to the reference counter acquired by
+ // #LockCounter().
+ void UnlockCounter()
+ {
+ WeakUnref();
+ }
+
+private:
+ // Explicitly non-copyable.
+ TIntricateObject(const TIntricateObject&);
+ TIntricateObject(TIntricateObject&&);
+ TIntricateObject& operator=(const TIntricateObject&);
+ TIntricateObject& operator=(TIntricateObject&&);
+};
+
+typedef TIntrusivePtr<TIntricateObject> TIntricateObjectPtr;
+typedef TWeakPtr<TIntricateObject> TIntricateObjectWkPtr;
+
+class TDerivedIntricateObject
+ : public TIntricateObject
+{
+private:
+ // Payload.
+ [[maybe_unused]] std::array<char, 32> Payload;
+};
+
+typedef TIntrusivePtr<TDerivedIntricateObject> TDerivedIntricateObjectPtr;
+typedef TWeakPtr<TDerivedIntricateObject> TDerivedIntricateObjectWkPtr;
+
+MATCHER_P2(HasRefCounts, strongRefs, weakRefs,
+ "The object has "
+ + ::testing::PrintToString(strongRefs) + " strong and "
+ + ::testing::PrintToString(weakRefs) + " weak references")
+{
+ Y_UNUSED(result_listener);
+ return
+ arg.GetRefCount() == strongRefs &&
+ arg.GetWeakRefCount() == weakRefs;
+}
+
+template <class T>
+void PrintExtrinsicRefCounted(const T& arg, ::std::ostream* os)
+{
+ *os << arg.GetRefCount() << " strong and "
+ << arg.GetWeakRefCount() << " weak references";
+}
+
+void PrintTo(const TIntricateObject& arg, ::std::ostream* os)
+{
+ PrintExtrinsicRefCounted(arg, os);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+class TWeakPtrTest
+ : public ::testing::Test
+{
+public:
+ virtual void SetUp()
+ {
+ ResetShadowState();
+ }
+};
+
+TEST_F(TWeakPtrTest, Empty)
+{
+ TIntricateObjectWkPtr emptyPointer;
+ EXPECT_EQ(TIntricateObjectPtr(), emptyPointer.Lock());
+}
+
+TEST_F(TWeakPtrTest, Basic)
+{
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ TIntricateObject* objectPtr = object.Get();
+
+ EXPECT_THAT(*object, HasRefCounts(1, 1));
+
+ {
+ TIntricateObjectWkPtr ptr(objectPtr);
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ EXPECT_EQ(object, ptr.Lock());
+ }
+
+ EXPECT_THAT(*object, HasRefCounts(1, 1));
+
+ {
+ TIntricateObjectWkPtr ptr(object);
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ EXPECT_EQ(object, ptr.Lock());
+ }
+
+ EXPECT_THAT(*object, HasRefCounts(1, 1));
+
+ object.Reset();
+
+ EXPECT_EQ(1, ConstructorShadowState);
+ EXPECT_EQ(1, DestructorShadowState);
+}
+
+TEST_F(TWeakPtrTest, ResetToNull)
+{
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ TIntricateObjectWkPtr ptr(object);
+
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ EXPECT_EQ(object, ptr.Lock());
+
+ ptr.Reset();
+
+ EXPECT_THAT(*object, HasRefCounts(1, 1));
+ EXPECT_EQ(TIntricateObjectPtr(), ptr.Lock());
+}
+
+TEST_F(TWeakPtrTest, ResetToOtherObject)
+{
+ TIntricateObjectPtr firstObject = New<TIntricateObject>();
+ TIntricateObjectPtr secondObject = New<TIntricateObject>();
+
+ {
+ TIntricateObjectWkPtr ptr(firstObject);
+
+ EXPECT_THAT(*firstObject, HasRefCounts(1, 2));
+ EXPECT_THAT(*secondObject, HasRefCounts(1, 1));
+ EXPECT_EQ(firstObject, ptr.Lock());
+
+ ptr.Reset(secondObject);
+
+ EXPECT_THAT(*firstObject, HasRefCounts(1, 1));
+ EXPECT_THAT(*secondObject, HasRefCounts(1, 2));
+ EXPECT_EQ(secondObject, ptr.Lock());
+ }
+
+ TIntricateObject* firstObjectPtr = firstObject.Get();
+ TIntricateObject* secondObjectPtr = secondObject.Get();
+
+ {
+ TIntricateObjectWkPtr ptr(firstObjectPtr);
+
+ EXPECT_THAT(*firstObject, HasRefCounts(1, 2));
+ EXPECT_THAT(*secondObject, HasRefCounts(1, 1));
+ EXPECT_EQ(firstObject, ptr.Lock());
+
+ ptr.Reset(secondObjectPtr);
+
+ EXPECT_THAT(*firstObject, HasRefCounts(1, 1));
+ EXPECT_THAT(*secondObject, HasRefCounts(1, 2));
+ EXPECT_EQ(secondObject, ptr.Lock());
+ }
+}
+
+TEST_F(TWeakPtrTest, CopySemantics)
+{
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ TIntricateObjectWkPtr foo(object);
+
+ {
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ TIntricateObjectWkPtr bar(foo);
+ EXPECT_THAT(*object, HasRefCounts(1, 3));
+
+ EXPECT_EQ(object, foo.Lock());
+ EXPECT_EQ(object, bar.Lock());
+ }
+
+ {
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ TIntricateObjectWkPtr bar;
+ bar = foo;
+ EXPECT_THAT(*object, HasRefCounts(1, 3));
+
+ EXPECT_EQ(object, foo.Lock());
+ EXPECT_EQ(object, bar.Lock());
+ }
+}
+
+TEST_F(TWeakPtrTest, MoveSemantics)
+{
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ TIntricateObjectWkPtr foo(object);
+
+ {
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ TIntricateObjectWkPtr bar(std::move(foo));
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+
+ EXPECT_EQ(TIntricateObjectPtr(), foo.Lock());
+ EXPECT_EQ(object, bar.Lock());
+ }
+
+ foo.Reset(object);
+
+ {
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+ TIntricateObjectWkPtr bar;
+ bar = std::move(foo);
+ EXPECT_THAT(*object, HasRefCounts(1, 2));
+
+ EXPECT_EQ(TIntricateObjectPtr(), foo.Lock());
+ EXPECT_EQ(object, bar.Lock());
+ }
+}
+
+TEST_F(TWeakPtrTest, OutOfScope)
+{
+ TIntricateObjectWkPtr ptr;
+
+ EXPECT_EQ(TIntricateObjectPtr(), ptr.Lock());
+ {
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ ptr = object;
+ EXPECT_EQ(object, ptr.Lock());
+ }
+ EXPECT_EQ(TIntricateObjectPtr(), ptr.Lock());
+}
+
+TEST_F(TWeakPtrTest, OutOfNestedScope)
+{
+ TIntricateObjectWkPtr foo;
+
+ EXPECT_EQ(TIntricateObjectPtr(), foo.Lock());
+ {
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ foo = object;
+
+ EXPECT_EQ(object, foo.Lock());
+ {
+ TIntricateObjectWkPtr bar;
+ bar = object;
+
+ EXPECT_EQ(object, bar.Lock());
+ }
+ EXPECT_EQ(object, foo.Lock());
+ }
+ EXPECT_EQ(TIntricateObjectPtr(), foo.Lock());
+
+ EXPECT_EQ(1, ConstructorShadowState);
+ EXPECT_EQ(1, DestructorShadowState);
+}
+
+TEST_F(TWeakPtrTest, IsExpired)
+{
+ TIntricateObjectWkPtr ptr;
+
+ EXPECT_TRUE(ptr.IsExpired());
+ {
+ TIntricateObjectPtr object = New<TIntricateObject>();
+ ptr = object;
+ EXPECT_FALSE(ptr.IsExpired());
+ }
+ EXPECT_TRUE(ptr.IsExpired());
+}
+
+TEST_F(TWeakPtrTest, UpCast)
+{
+ TDerivedIntricateObjectPtr object = New<TDerivedIntricateObject>();
+ TIntricateObjectWkPtr ptr = object;
+
+ EXPECT_EQ(object.Get(), ptr.Lock().Get());
+}
+
+class TIntricateObjectVirtual
+ : public virtual TRefCounted
+{
+public:
+ TIntricateObjectVirtual()
+ {
+ ++ConstructorShadowState;
+ }
+
+ virtual ~TIntricateObjectVirtual()
+ {
+ ++DestructorShadowState;
+ }
+
+ // Prevent the counter from destruction by holding an additional
+ // reference to the counter.
+ void LockCounter()
+ {
+ WeakRef();
+ }
+
+ // Release an additional reference to the reference counter acquired by
+ // #LockCounter().
+ void UnlockCounter()
+ {
+ WeakUnref();
+ }
+
+private:
+ // Explicitly non-copyable.
+ TIntricateObjectVirtual(const TIntricateObjectVirtual&);
+ TIntricateObjectVirtual(TIntricateObjectVirtual&&);
+ TIntricateObjectVirtual& operator=(const TIntricateObjectVirtual&);
+ TIntricateObjectVirtual& operator=(TIntricateObjectVirtual&&);
+};
+
+TEST_F(TWeakPtrTest, VirtualBase)
+{
+ auto object = New<TIntricateObjectVirtual>();
+ TWeakPtr<TIntricateObjectVirtual> ptr = object;
+
+ object.Reset();
+ ptr.Reset();
+}
+
+#if 0
+class TSlowlyDyingObject
+ : public TRefCounted
+{
+public:
+ TSlowlyDyingObject()
+ {
+ ++ConstructorShadowState;
+ }
+
+ virtual ~TSlowlyDyingObject()
+ {
+ ++DestructorShadowState;
+ DeathEvent->Wait();
+ ++DestructorShadowState;
+ }
+};
+
+void PrintTo(const TSlowlyDyingObject& arg, ::std::ostream* os)
+{
+ PrintExtrinsicRefCounted(arg, os);
+}
+
+typedef TIntrusivePtr<TSlowlyDyingObject> TSlowlyDyingObjectPtr;
+typedef TWeakPtr<TSlowlyDyingObject> TSlowlyDyingObjectWkPtr;
+
+static void* AsynchronousDeleter(void* param)
+{
+ TSlowlyDyingObjectPtr* indirectObject =
+ reinterpret_cast<TSlowlyDyingObjectPtr*>(param);
+ indirectObject->Reset();
+ return nullptr;
+}
+
+std::unique_ptr<NThreading::TEvent> DeathEvent;
+
+TEST_F(TWeakPtrTest, DISABLED_AcquisionOfSlowlyDyingObject)
+{
+ DeathEvent.reset(new NThreading::TEvent());
+
+ TSlowlyDyingObjectPtr object = New<TSlowlyDyingObject>();
+ TSlowlyDyingObjectWkPtr ptr(object);
+
+ TSlowlyDyingObject* objectPtr = object.Get();
+
+ EXPECT_EQ(object, ptr.Lock());
+ EXPECT_THAT(*objectPtr, HasRefCounts(1, 2));
+
+ ASSERT_EQ(1, ConstructorShadowState);
+ ASSERT_EQ(0, DestructorShadowState);
+
+ // Kick off object deletion in the background.
+ TThread thread(&AsynchronousDeleter, &object);
+ thread.Start();
+ Sleep(TDuration::Seconds(0.100));
+
+ ASSERT_EQ(1, ConstructorShadowState);
+ ASSERT_EQ(1, DestructorShadowState);
+
+ EXPECT_EQ(TSlowlyDyingObjectPtr(), ptr.Lock());
+ EXPECT_THAT(*objectPtr, HasRefCounts(0, 2));
+
+ // Finalize object destruction.
+ DeathEvent->NotifyAll();
+ thread.Join();
+
+ ASSERT_EQ(1, ConstructorShadowState);
+ ASSERT_EQ(2, DestructorShadowState);
+
+ EXPECT_EQ(TSlowlyDyingObjectPtr(), ptr.Lock());
+}
+
+#endif
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace
+} // namespace NYT
diff --git a/library/cpp/yt/memory/unittests/ya.make b/library/cpp/yt/memory/unittests/ya.make
new file mode 100644
index 0000000000..f09ad7d0c9
--- /dev/null
+++ b/library/cpp/yt/memory/unittests/ya.make
@@ -0,0 +1,19 @@
+GTEST(unittester-library-memory)
+
+OWNER(g:yt)
+
+IF (NOT OS_WINDOWS)
+ ALLOCATOR(YT)
+ENDIF()
+
+SRCS(
+ intrusive_ptr_ut.cpp
+ weak_ptr_ut.cpp
+)
+
+PEERDIR(
+ library/cpp/testing/gtest
+ library/cpp/yt/memory
+)
+
+END()
diff --git a/library/cpp/yt/memory/weak_ptr.h b/library/cpp/yt/memory/weak_ptr.h
new file mode 100644
index 0000000000..25a242bb8a
--- /dev/null
+++ b/library/cpp/yt/memory/weak_ptr.h
@@ -0,0 +1,314 @@
+#pragma once
+
+#include "ref_counted.h"
+
+#include <util/generic/hash.h>
+
+namespace NYT {
+
+////////////////////////////////////////////////////////////////////////////////
+
+template <class T>
+class TWeakPtr
+{
+public:
+ typedef T TUnderlying;
+
+ //! Empty constructor.
+ TWeakPtr() = default;
+
+ TWeakPtr(std::nullptr_t)
+ { }
+
+ //! Constructor from an unqualified reference.
+ /*!
+ * Note that this constructor could be racy due to unsynchronized operations
+ * on the object and on the counter.
+ */
+ explicit TWeakPtr(T* p) noexcept
+ : T_(p)
+ {
+
+#if defined(_tsan_enabled_)
+ if (T_) {
+ RefCounter_ = GetRefCounter(T_);
+ }
+#endif
+ AcquireRef();
+ }
+
+ //! Constructor from a strong reference.
+ TWeakPtr(const TIntrusivePtr<T>& ptr) noexcept
+ : TWeakPtr(ptr.Get())
+ { }
+
+ //! Constructor from a strong reference with an upcast.
+ template <class U, class = typename std::enable_if_t<std::is_convertible_v<U*, T*>>>
+ TWeakPtr(const TIntrusivePtr<U>& ptr) noexcept
+ : TWeakPtr(ptr.Get())
+ {
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ }
+
+ //! Copy constructor.
+ TWeakPtr(const TWeakPtr& other) noexcept
+ : TWeakPtr(other.T_)
+ { }
+
+ //! Copy constructor with an upcast.
+ template <class U, class = typename std::enable_if_t<std::is_convertible_v<U*, T*>>>
+ TWeakPtr(const TWeakPtr<U>& other) noexcept
+ : TWeakPtr(other.Lock())
+ {
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ }
+
+ //! Move constructor.
+ TWeakPtr(TWeakPtr&& other) noexcept
+ {
+ other.Swap(*this);
+ }
+
+ //! Move constructor with an upcast.
+ template <class U, class = typename std::enable_if_t<std::is_convertible_v<U*, T*>>>
+ TWeakPtr(TWeakPtr<U>&& other) noexcept
+ {
+ static_assert(
+ std::is_base_of_v<TRefCountedBase, T>,
+ "Cast allowed only for types derived from TRefCountedBase");
+ TIntrusivePtr<U> strongOther = other.Lock();
+ if (strongOther) {
+ T_ = other.T_;
+ other.T_ = nullptr;
+
+#if defined(_tsan_enabled_)
+ RefCounter_ = other.RefCounter_;
+ other.RefCounter_ = nullptr;
+#endif
+ }
+ }
+
+ //! Destructor.
+ ~TWeakPtr()
+ {
+ ReleaseRef();
+ }
+
+ //! Assignment operator from a strong reference.
+ template <class U>
+ TWeakPtr& operator=(const TIntrusivePtr<U>& ptr) noexcept
+ {
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ TWeakPtr(ptr).Swap(*this);
+ return *this;
+ }
+
+ //! Copy assignment operator.
+ TWeakPtr& operator=(const TWeakPtr& other) noexcept
+ {
+ TWeakPtr(other).Swap(*this);
+ return *this;
+ }
+
+ //! Copy assignment operator with an upcast.
+ template <class U>
+ TWeakPtr& operator=(const TWeakPtr<U>& other) noexcept
+ {
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ TWeakPtr(other).Swap(*this);
+ return *this;
+ }
+
+ //! Move assignment operator.
+ TWeakPtr& operator=(TWeakPtr&& other) noexcept
+ {
+ other.Swap(*this);
+ return *this;
+ }
+
+ //! Move assignment operator with an upcast.
+ template <class U>
+ TWeakPtr& operator=(TWeakPtr<U>&& other) noexcept
+ {
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ TWeakPtr(std::move(other)).Swap(*this);
+ return *this;
+ }
+
+ //! Drop the pointer.
+ void Reset() // noexcept
+ {
+ TWeakPtr().Swap(*this);
+ }
+
+ //! Replace the pointer with a specified one.
+ void Reset(T* p) // noexcept
+ {
+ TWeakPtr(p).Swap(*this);
+ }
+
+ //! Replace the pointer with a specified one.
+ template <class U>
+ void Reset(const TIntrusivePtr<U>& ptr) // noexcept
+ {
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ TWeakPtr(ptr).Swap(*this);
+ }
+
+ //! Swap the pointer with the other one.
+ void Swap(TWeakPtr& other) noexcept
+ {
+ DoSwap(T_, other.T_);
+#if defined(_tsan_enabled_)
+ DoSwap(RefCounter_, other.RefCounter_);
+#endif
+ }
+
+ //! Acquire a strong reference to the pointee and return a strong pointer.
+ TIntrusivePtr<T> Lock() const noexcept
+ {
+ return T_ && RefCounter()->TryRef()
+ ? TIntrusivePtr<T>(T_, false)
+ : TIntrusivePtr<T>();
+ }
+
+ bool IsExpired() const noexcept
+ {
+ return !T_ || (RefCounter()->GetRefCount() == 0);
+ }
+
+private:
+ void AcquireRef()
+ {
+ if (T_) {
+ RefCounter()->WeakRef();
+ }
+ }
+
+ void ReleaseRef()
+ {
+ if (T_) {
+ // Support incomplete type.
+ if (RefCounter()->WeakUnref()) {
+ DeallocateRefCounted(T_);
+ }
+ }
+ }
+
+ template <class U>
+ friend class TWeakPtr;
+ template <class U>
+ friend struct ::THash;
+
+ T* T_ = nullptr;
+#if defined(_tsan_enabled_)
+ const TRefCounter* RefCounter_ = nullptr;
+
+ const TRefCounter* RefCounter() const
+ {
+ return RefCounter_;
+ }
+#else
+ const TRefCounter* RefCounter() const
+ {
+ return GetRefCounter(T_);
+ }
+#endif
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//! Creates a weak pointer wrapper for a given raw pointer.
+//! Compared to |TWeakPtr<T>::ctor|, type inference enables omitting |T|.
+template <class T>
+TWeakPtr<T> MakeWeak(T* p)
+{
+ return TWeakPtr<T>(p);
+}
+
+//! Creates a weak pointer wrapper for a given intrusive pointer.
+//! Compared to |TWeakPtr<T>::ctor|, type inference enables omitting |T|.
+template <class T>
+TWeakPtr<T> MakeWeak(const TIntrusivePtr<T>& p)
+{
+ return TWeakPtr<T>(p);
+}
+
+//! A helper for acquiring weak pointer for pointee, resetting intrusive pointer and then
+//! returning the pointee reference count using the acquired weak pointer.
+//! This helper is designed for best effort in checking that the object is not leaked after
+//! destructing (what seems to be) the last pointer to it.
+//! NB: it is possible to rewrite this helper making it working event with intrinsic refcounted objects,
+//! but it requires much nastier integration with the intrusive pointer destruction routines.
+template <typename T>
+int ResetAndGetResidualRefCount(TIntrusivePtr<T>& pointer)
+{
+ auto weakPointer = MakeWeak(pointer);
+ pointer.Reset();
+ if (pointer = weakPointer.Lock()) {
+ // This _may_ return 0 if we are again the only holder of the pointee.
+ return pointer->GetRefCount() - 1;
+ } else {
+ return 0;
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+// TODO(sandello): Kill comparsions.
+template <class T>
+bool operator<(const TWeakPtr<T>& lhs, const TWeakPtr<T>& rhs)
+{
+ return lhs.Lock().Get() < rhs.Lock().Get();
+}
+
+template <class T>
+bool operator>(const TWeakPtr<T>& lhs, const TWeakPtr<T>& rhs)
+{
+ return lhs.Lock().Get() > rhs.Lock().Get();
+}
+
+template <class T, class U>
+bool operator==(const TWeakPtr<T>& lhs, const TWeakPtr<U>& rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs.Lock().Get() == rhs.Lock().Get();
+}
+
+template <class T, class U>
+bool operator!=(const TWeakPtr<T>& lhs, const TWeakPtr<U>& rhs)
+{
+ static_assert(
+ std::is_convertible_v<U*, T*>,
+ "U* must be convertible to T*");
+ return lhs.Lock().Get() != rhs.Lock().Get();
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+} // namespace NYT
+
+
+//! A hasher for TWeakPtr.
+template <class T>
+struct THash<NYT::TWeakPtr<T>>
+{
+ size_t operator () (const NYT::TWeakPtr<T>& ptr) const
+ {
+ return THash<const NYT::TRefCountedBase*>()(ptr.T_);
+ }
+};
diff --git a/library/cpp/yt/memory/ya.make b/library/cpp/yt/memory/ya.make
new file mode 100644
index 0000000000..a925c714ee
--- /dev/null
+++ b/library/cpp/yt/memory/ya.make
@@ -0,0 +1,31 @@
+LIBRARY()
+
+OWNER(g:yt)
+
+SRCS(
+ blob.cpp
+ ref.cpp
+ ref_tracked.cpp
+)
+
+PEERDIR(
+ library/cpp/yt/assert
+ library/cpp/yt/misc
+ library/cpp/ytalloc/api
+)
+
+CHECK_DEPENDENT_DIRS(
+ ALLOW_ONLY ALL
+ build
+ contrib
+ library
+ util
+ library/cpp/yt/assert
+ library/cpp/yt/misc
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ unittests
+)