aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorbabenko <babenko@yandex-team.com>2023-11-26 10:34:01 +0300
committerbabenko <babenko@yandex-team.com>2023-11-26 10:55:45 +0300
commit56f48209380ba0f90ddac5b49ba5b8f74e3c7382 (patch)
tree03ee0a222cbd069fb826694cc2b797e857e82e1d
parentb14839a0aced193eb62df77d69ff6e8d3f51e75d (diff)
downloadydb-56f48209380ba0f90ddac5b49ba5b8f74e3c7382.tar.gz
Refactor TSlabAllocator: drop dependency on ytalloc
-rw-r--r--yt/yt/core/misc/slab_allocator.cpp123
-rw-r--r--yt/yt/core/misc/slab_allocator.h20
2 files changed, 99 insertions, 44 deletions
diff --git a/yt/yt/core/misc/slab_allocator.cpp b/yt/yt/core/misc/slab_allocator.cpp
index 8d0fbd18ee..55a3aed322 100644
--- a/yt/yt/core/misc/slab_allocator.cpp
+++ b/yt/yt/core/misc/slab_allocator.cpp
@@ -1,5 +1,7 @@
#include "slab_allocator.h"
+#include "memory_usage_tracker.h"
+
#include <yt/yt/core/misc/atomic_ptr.h>
#include <yt/yt/library/profiling/sensor.h>
@@ -10,9 +12,58 @@ namespace NYT {
/////////////////////////////////////////////////////////////////////////////
-static_assert(TSlabAllocator::SegmentSize >= NYTAlloc::LargeAllocationSizeThreshold, "Segment size violation");
+namespace {
+
+// Maps small chunk ranks to size in bytes.
+constexpr auto SmallRankToSize = std::to_array<size_t>({
+ 0,
+ 16, 32, 48, 64, 96, 128,
+ 192, 256, 384, 512, 768, 1024, 1536, 2048,
+ 3072, 4096, 6144, 8192, 12288, 16384, 24576, 32768
+});
+
+// Helper array for mapping size to small chunk rank.
+constexpr auto SizeToSmallRank1 = std::to_array<ui64>({
+ 1, 1, 1, 2, 2, // 16, 32
+ 3, 3, 4, 4, // 48, 64
+ 5, 5, 5, 5, 6, 6, 6, 6, // 96, 128
+ 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, // 192, 256
+ 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 384
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, // 512
+});
+
+// Helper array for mapping size to small chunk rank.
+constexpr auto SizeToSmallRank2 = std::to_array<ui8>({
+ 10, 10, 11, 12, // 512, 512, 768, 1022
+ 13, 13, 14, 14, // 1536, 2048
+ 15, 15, 15, 15, 16, 16, 16, 16, // 3072, 4096
+ 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, // 6144, 8192
+ 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, // 12288
+ 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, // 16384
+ 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
+ 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, // 22576
+ 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22,
+ 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, // 32768
+});
+
+constexpr size_t LargeAllocationSizeThreshold = 32_KB;
+
+constexpr size_t SizeToSmallRank(size_t size)
+{
+ if (size <= 512) {
+ return SizeToSmallRank1[(size + 7) >> 3];
+ } else {
+ if (size <= LargeAllocationSizeThreshold) {
+ return SizeToSmallRank2[(size - 1) >> 8];
+ } else {
+ return 0;
+ }
+ }
+}
+
+} // namespace
-static_assert(TSlabAllocator::AcquireMemoryGranularity % 2 == 0, "Must be divisible by 2");
+/////////////////////////////////////////////////////////////////////////////
struct TArenaCounters
{
@@ -31,6 +82,8 @@ struct TArenaCounters
NProfiling::TGauge ArenaSize;
};
+/////////////////////////////////////////////////////////////////////////////
+
class TSmallArena final
: public TRefTracked<TSmallArena>
, public TArenaCounters
@@ -50,7 +103,7 @@ public:
IMemoryUsageTrackerPtr memoryTracker,
const NProfiling::TProfiler& profiler)
: TArenaCounters(profiler.WithTag("rank", ToString(rank)))
- , ObjectSize_(NYTAlloc::SmallRankToSize[rank])
+ , ObjectSize_(SmallRankToSize[rank])
, ObjectCount_(segmentSize / ObjectSize_)
, MemoryTracker_(std::move(memoryTracker))
{
@@ -88,7 +141,7 @@ public:
auto* segment = Segments_.ExtractAll();
while (segment) {
auto* next = segment->Next.load(std::memory_order::acquire);
- NYTAlloc::Free(segment);
+ ::free(segment);
segment = next;
++segmentCount;
}
@@ -185,7 +238,7 @@ private:
TRefCountedTrackerFacade::AllocateSpace(GetRefCountedTypeCookie<TSmallArena>(), totalSize);
#endif
- auto* ptr = NYTAlloc::Allocate(totalSize);
+ auto* ptr = ::malloc(totalSize);
// Save segments in list to free them in destructor.
Segments_.Put(static_cast<TFreeListItem*>(ptr));
@@ -205,7 +258,7 @@ private:
}
};
-DEFINE_REFCOUNTED_TYPE(TSmallArena)
+DEFINE_REFCOUNTED_TYPE(TSmallArena);
/////////////////////////////////////////////////////////////////////////////
@@ -299,7 +352,6 @@ public:
MemoryTracker_->Release(releasedMemory);
auto arenaSize = AcquiredMemory_.fetch_sub(releasedMemory) - releasedMemory;
ArenaSize.Update(arenaSize);
-
return;
}
}
@@ -315,27 +367,16 @@ private:
};
const IMemoryUsageTrackerPtr MemoryTracker_;
+
// One ref from allocator plus refs from allocated objects.
- std::atomic<size_t> RefCount_ = 1;
+ std::atomic<int> RefCount_ = 1;
+
std::atomic<size_t> OverheadMemory_ = 0;
std::atomic<size_t> AcquiredMemory_ = 0;
};
/////////////////////////////////////////////////////////////////////////////
-TSlabAllocator::TSlabAllocator(
- const NProfiling::TProfiler& profiler,
- IMemoryUsageTrackerPtr memoryTracker)
- : Profiler_(profiler)
-{
- for (size_t rank = 1; rank < NYTAlloc::SmallRankCount; ++rank) {
- // There is no std::make_unique overload with custom deleter.
- SmallArenas_[rank].Store(New<TSmallArena>(rank, TSlabAllocator::SegmentSize, memoryTracker, Profiler_));
- }
-
- LargeArena_.reset(new TLargeArena(memoryTracker, profiler));
-}
-
namespace {
TLargeArena* TryGetLargeArenaFromTag(uintptr_t tag)
@@ -350,6 +391,7 @@ TSmallArena* GetSmallArenaFromTag(uintptr_t tag)
uintptr_t MakeTagFromArena(TLargeArena* arena)
{
+
auto result = reinterpret_cast<uintptr_t>(arena);
YT_ASSERT((result & 1ULL) == 0);
return result | 1ULL;
@@ -374,6 +416,25 @@ uintptr_t* GetHeaderFromPtr(void* ptr)
} // namespace
+/////////////////////////////////////////////////////////////////////////////
+
+TSlabAllocator::TSlabAllocator(
+ const NProfiling::TProfiler& profiler,
+ IMemoryUsageTrackerPtr memoryTracker)
+ : Profiler_(profiler)
+{
+ static_assert(SmallRankCount == SmallRankToSize.size(), "Wrong SmallRankCount");
+ static_assert(SegmentSize >= LargeAllocationSizeThreshold, "Segment size violation");
+ static_assert(AcquireMemoryGranularity % 2 == 0, "AcquireMemoryGranularity must be divisible by 2");
+
+ for (size_t rank = 1; rank < SmallRankCount; ++rank) {
+ // There is no std::make_unique overload with custom deleter.
+ SmallArenas_[rank].Store(New<TSmallArena>(rank, TSlabAllocator::SegmentSize, memoryTracker, Profiler_));
+ }
+
+ LargeArena_.reset(new TLargeArena(memoryTracker, profiler));
+}
+
void TSlabAllocator::TLargeArenaDeleter::operator() (TLargeArena* arena)
{
arena->Unref();
@@ -385,8 +446,8 @@ void* TSlabAllocator::Allocate(size_t size)
uintptr_t tag = 0;
void* ptr = nullptr;
- if (size < NYTAlloc::LargeAllocationSizeThreshold) {
- auto rank = NYTAlloc::SizeToSmallRank(size);
+ if (size < LargeAllocationSizeThreshold) {
+ auto rank = SizeToSmallRank(size);
auto arena = SmallArenas_[rank].Acquire();
YT_VERIFY(arena);
@@ -413,7 +474,7 @@ void* TSlabAllocator::Allocate(size_t size)
bool TSlabAllocator::IsReallocationNeeded() const
{
- for (size_t rank = 1; rank < NYTAlloc::SmallRankCount; ++rank) {
+ for (size_t rank = 1; rank < SmallRankCount; ++rank) {
auto arena = SmallArenas_[rank].Acquire();
if (arena->IsReallocationNeeded()) {
return true;
@@ -422,10 +483,16 @@ bool TSlabAllocator::IsReallocationNeeded() const
return false;
}
+bool TSlabAllocator::IsReallocationNeeded(const void* ptr)
+{
+ auto tag = *GetHeaderFromPtr(ptr);
+ return !TryGetLargeArenaFromTag(tag) && GetSmallArenaFromTag(tag)->IsReallocationNeeded();
+}
+
bool TSlabAllocator::ReallocateArenasIfNeeded()
{
bool hasReallocatedArenas = false;
- for (size_t rank = 1; rank < NYTAlloc::SmallRankCount; ++rank) {
+ for (size_t rank = 1; rank < SmallRankCount; ++rank) {
auto arena = SmallArenas_[rank].Acquire();
if (arena->IsReallocationNeeded()) {
SmallArenas_[rank].SwapIfCompare(
@@ -451,12 +518,6 @@ void TSlabAllocator::Free(void* ptr)
}
}
-bool IsReallocationNeeded(const void* ptr)
-{
- auto tag = *GetHeaderFromPtr(ptr);
- return !TryGetLargeArenaFromTag(tag) && GetSmallArenaFromTag(tag)->IsReallocationNeeded();
-}
-
/////////////////////////////////////////////////////////////////////////////
} // namespace NYT
diff --git a/yt/yt/core/misc/slab_allocator.h b/yt/yt/core/misc/slab_allocator.h
index 2313ad2550..878964c300 100644
--- a/yt/yt/core/misc/slab_allocator.h
+++ b/yt/yt/core/misc/slab_allocator.h
@@ -1,15 +1,11 @@
#pragma once
-#include "common.h"
-#include "error.h"
-#include "memory_usage_tracker.h"
+#include "public.h"
#include <yt/yt/core/misc/atomic_ptr.h>
#include <yt/yt/library/profiling/sensor.h>
-#include <library/cpp/ytalloc/api/ytalloc.h>
-
#include <library/cpp/yt/memory/free_list.h>
#include <array>
@@ -18,8 +14,7 @@ namespace NYT {
/////////////////////////////////////////////////////////////////////////////
-DECLARE_REFCOUNTED_CLASS(TSmallArena)
-
+DECLARE_REFCOUNTED_CLASS(TSmallArena);
class TLargeArena;
/////////////////////////////////////////////////////////////////////////////
@@ -35,6 +30,8 @@ public:
static void Free(void* ptr);
bool IsReallocationNeeded() const;
+ static bool IsReallocationNeeded(const void* ptr);
+
bool ReallocateArenasIfNeeded();
static constexpr size_t SegmentSize = 64_KB;
@@ -48,14 +45,11 @@ private:
void operator() (TLargeArena* arena);
};
- using TLargeArenaPtr = std::unique_ptr<TLargeArena, TLargeArenaDeleter>;
-
- TAtomicPtr<TSmallArena> SmallArenas_[NYTAlloc::SmallRankCount];
- TLargeArenaPtr LargeArena_;
+ static constexpr int SmallRankCount = 23;
+ std::array<TAtomicPtr<TSmallArena>, SmallRankCount> SmallArenas_;
+ std::unique_ptr<TLargeArena, TLargeArenaDeleter> LargeArena_;
};
-bool IsReallocationNeeded(const void* ptr);
-
/////////////////////////////////////////////////////////////////////////////
} // namespace NYT