aboutsummaryrefslogtreecommitdiffstats
path: root/library/cpp
diff options
context:
space:
mode:
authorbabenko <babenko@yandex-team.com>2024-09-21 18:32:01 +0300
committerbabenko <babenko@yandex-team.com>2024-09-21 18:42:43 +0300
commit22ec41df66d0db33b7c3870298dc2973c51c6241 (patch)
tree8e167134b8ccbcc2336f14cac6a30a4d8264132c /library/cpp
parent7deed162d9d0d42ef21eea39547befcd9e48a784 (diff)
downloadydb-22ec41df66d0db33b7c3870298dc2973c51c6241.tar.gz
Increase RC size to 64 bits
commit_hash:31d021c482ac2a3f99e86bf1a2aca7231c86c11d
Diffstat (limited to 'library/cpp')
-rw-r--r--library/cpp/yt/memory/atomic_intrusive_ptr.h13
-rw-r--r--library/cpp/yt/memory/ref_counted-inl.h8
-rw-r--r--library/cpp/yt/memory/ref_counted.h6
3 files changed, 13 insertions, 14 deletions
diff --git a/library/cpp/yt/memory/atomic_intrusive_ptr.h b/library/cpp/yt/memory/atomic_intrusive_ptr.h
index ae0a307182..72405053c0 100644
--- a/library/cpp/yt/memory/atomic_intrusive_ptr.h
+++ b/library/cpp/yt/memory/atomic_intrusive_ptr.h
@@ -6,13 +6,10 @@ namespace NYT {
////////////////////////////////////////////////////////////////////////////////
-// Atomic ptr based on https://github.com/facebook/folly/blob/main/folly/concurrency/AtomicSharedPtr.h
-
-// Operators * and -> for TAtomicIntrusivePtr are useless because it is not safe to work with atomic ptr such way
-// Safe usage is to convert to TIntrusivePtr.
-
-// Max TAtomicIntrusivePtr count per object is (2**16 = 2**32 / 2**16).
-
+//! Atomic pointer with split reference counting.
+/*
+ * \see https://github.com/facebook/folly/blob/main/folly/concurrency/AtomicSharedPtr.h
+*/
template <class T>
class TAtomicIntrusivePtr
{
@@ -39,7 +36,7 @@ public:
bool CompareAndSwap(TRawPtr& comparePtr, T* target);
bool CompareAndSwap(TRawPtr& comparePtr, TIntrusivePtr<T> target);
- // Result is suitable only for comparison. Not dereference.
+ //! Result is only suitable for comparison, not dereference.
TRawPtr Get() const;
explicit operator bool() const;
diff --git a/library/cpp/yt/memory/ref_counted-inl.h b/library/cpp/yt/memory/ref_counted-inl.h
index 9ffdf328eb..6c20db8c41 100644
--- a/library/cpp/yt/memory/ref_counted-inl.h
+++ b/library/cpp/yt/memory/ref_counted-inl.h
@@ -90,7 +90,7 @@ Y_FORCE_INLINE void DestroyRefCountedImpl(T* obj)
return;
}
- YT_ASSERT(offset < std::numeric_limits<ui16>::max());
+ YT_ASSERT(offset < (1ULL << PackedPtrTagBits));
auto* vTablePtr = reinterpret_cast<TPackedPtr*>(basePtr);
*vTablePtr = TTaggedPtr<void(void*, ui16)>(&NYT::NDetail::TMemoryReleaser<T>::Do, offset).Pack();
@@ -187,7 +187,7 @@ Y_FORCE_INLINE void TRefCounter::Ref(int n) const noexcept
// It is safe to use relaxed here, since new reference is always created from another live reference.
auto value = StrongCount_.fetch_add(n, std::memory_order::relaxed);
YT_ASSERT(value > 0);
- YT_ASSERT(value <= std::numeric_limits<int>::max() - n);
+ YT_ASSERT(value <= std::numeric_limits<TRefCount>::max() - n);
YT_ASSERT(WeakCount_.load(std::memory_order::relaxed) > 0);
}
@@ -199,7 +199,7 @@ Y_FORCE_INLINE void TRefCounter::DangerousRef(int n) const noexcept
// Relaxed is fine as per lukyan@, the caller guarantees object liveness.
auto value = StrongCount_.fetch_add(n, std::memory_order::relaxed);
YT_ASSERT(value >= 0);
- YT_ASSERT(value <= std::numeric_limits<int>::max() - n);
+ YT_ASSERT(value <= std::numeric_limits<TRefCount>::max() - n);
YT_ASSERT(WeakCount_.load(std::memory_order::relaxed) > 0);
}
@@ -207,7 +207,7 @@ Y_FORCE_INLINE void TRefCounter::DangerousRef(int n) const noexcept
Y_FORCE_INLINE bool TRefCounter::TryRef() const noexcept
{
auto value = StrongCount_.load(std::memory_order::relaxed);
- YT_ASSERT(value >= 0 && value < std::numeric_limits<int>::max());
+ YT_ASSERT(value >= 0 && value < std::numeric_limits<TRefCount>::max());
YT_ASSERT(WeakCount_.load(std::memory_order::relaxed) > 0);
while (value != 0 && !StrongCount_.compare_exchange_weak(value, value + 1));
diff --git a/library/cpp/yt/memory/ref_counted.h b/library/cpp/yt/memory/ref_counted.h
index ed55aee6ee..6abef3bf05 100644
--- a/library/cpp/yt/memory/ref_counted.h
+++ b/library/cpp/yt/memory/ref_counted.h
@@ -63,8 +63,10 @@ public:
bool WeakUnref() const;
private:
- mutable std::atomic<int> StrongCount_ = 1;
- mutable std::atomic<int> WeakCount_ = 1;
+ // NB: Must we 64 bit as TAtomicIntrusivePtr grabs refs in 64K batches.
+ using TRefCount = i64;
+ mutable std::atomic<TRefCount> StrongCount_ = 1;
+ mutable std::atomic<TRefCount> WeakCount_ = 1;
};
////////////////////////////////////////////////////////////////////////////////