aboutsummaryrefslogtreecommitdiffstats
path: root/util
diff options
context:
space:
mode:
authoromakovski <omakovski@yandex-team.ru>2022-02-10 16:49:30 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:49:30 +0300
commit5feb3b4d96aaa42ce546426241c48d626e6d6685 (patch)
tree728359f39c2eeab2b894f3d8664ea1499ffde216 /util
parent077ab504815199e62ffc54daee873cf1d6b64297 (diff)
downloadydb-5feb3b4d96aaa42ce546426241c48d626e6d6685.tar.gz
Restoring authorship annotation for <omakovski@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'util')
-rw-r--r--util/memory/pool.cpp10
-rw-r--r--util/memory/pool.h22
-rw-r--r--util/memory/pool_ut.cpp80
-rw-r--r--util/system/thread.cpp2
-rw-r--r--util/thread/lfqueue.h44
5 files changed, 79 insertions, 79 deletions
diff --git a/util/memory/pool.cpp b/util/memory/pool.cpp
index 9a011f0e4f..58cbd22ed1 100644
--- a/util/memory/pool.cpp
+++ b/util/memory/pool.cpp
@@ -11,11 +11,11 @@ TMemoryPool::IGrowPolicy* TMemoryPool::TExpGrow::Instance() noexcept {
void TMemoryPool::AddChunk(size_t hint) {
const size_t dataLen = Max(BlockSize_, hint);
- size_t allocSize = dataLen + sizeof(TChunk);
- if (Options_.RoundUpToNextPowerOfTwo) {
- allocSize = FastClp2(allocSize);
- }
- TBlock nb = Alloc_->Allocate(allocSize);
+ size_t allocSize = dataLen + sizeof(TChunk);
+ if (Options_.RoundUpToNextPowerOfTwo) {
+ allocSize = FastClp2(allocSize);
+ }
+ TBlock nb = Alloc_->Allocate(allocSize);
// Add previous chunk's stats
if (Current_ != &Empty_) {
diff --git a/util/memory/pool.h b/util/memory/pool.h
index 13c8b6b9ed..8f04e66969 100644
--- a/util/memory/pool.h
+++ b/util/memory/pool.h
@@ -123,20 +123,20 @@ public:
static IGrowPolicy* Instance() noexcept;
};
- struct TOptions {
- bool RoundUpToNextPowerOfTwo;
- TOptions()
- : RoundUpToNextPowerOfTwo(true)
- {
- }
- };
-
- inline TMemoryPool(size_t initial, IGrowPolicy* grow = TExpGrow::Instance(), IAllocator* alloc = TDefaultAllocator::Instance(), const TOptions& options = TOptions())
+ struct TOptions {
+ bool RoundUpToNextPowerOfTwo;
+ TOptions()
+ : RoundUpToNextPowerOfTwo(true)
+ {
+ }
+ };
+
+ inline TMemoryPool(size_t initial, IGrowPolicy* grow = TExpGrow::Instance(), IAllocator* alloc = TDefaultAllocator::Instance(), const TOptions& options = TOptions())
: Current_(&Empty_)
, BlockSize_(initial)
, GrowPolicy_(grow)
, Alloc_(alloc)
- , Options_(options)
+ , Options_(options)
, Origin_(initial)
, MemoryAllocatedBeforeCurrent_(0)
, MemoryWasteBeforeCurrent_(0)
@@ -299,7 +299,7 @@ private:
size_t BlockSize_;
IGrowPolicy* GrowPolicy_;
IAllocator* Alloc_;
- TOptions Options_;
+ TOptions Options_;
TChunkList Chunks_;
const size_t Origin_;
size_t MemoryAllocatedBeforeCurrent_;
diff --git a/util/memory/pool_ut.cpp b/util/memory/pool_ut.cpp
index 1158a8ca42..2797058336 100644
--- a/util/memory/pool_ut.cpp
+++ b/util/memory/pool_ut.cpp
@@ -83,7 +83,7 @@ class TMemPoolTest: public TTestBase {
UNIT_TEST(TestZeroArray)
UNIT_TEST(TestLargeStartingAlign)
UNIT_TEST(TestMoveAlloc)
- UNIT_TEST(TestRoundUpToNextPowerOfTwoOption)
+ UNIT_TEST(TestRoundUpToNextPowerOfTwoOption)
UNIT_TEST_SUITE_END();
private:
@@ -239,47 +239,47 @@ private:
CheckMoveAlloc<TNoCopy>();
CheckMoveAlloc<TErrorOnCopy>();
}
-
- void TestRoundUpToNextPowerOfTwoOption() {
- const size_t MEMORY_POOL_BLOCK_SIZE = (1024 - 16) * 4096 - 16 - 16 - 32;
-
+
+ void TestRoundUpToNextPowerOfTwoOption() {
+ const size_t MEMORY_POOL_BLOCK_SIZE = (1024 - 16) * 4096 - 16 - 16 - 32;
+
class TFixedBlockSizeMemoryPoolPolicy final: public TMemoryPool::IGrowPolicy {
- public:
- size_t Next(size_t /*prev*/) const noexcept override {
- return MEMORY_POOL_BLOCK_SIZE;
- }
- };
- TFixedBlockSizeMemoryPoolPolicy allocationPolicy;
-
+ public:
+ size_t Next(size_t /*prev*/) const noexcept override {
+ return MEMORY_POOL_BLOCK_SIZE;
+ }
+ };
+ TFixedBlockSizeMemoryPoolPolicy allocationPolicy;
+
class TTestAllocator final: public TDefaultAllocator {
- public:
- TBlock Allocate(size_t len) override {
- Size_ += len;
- return TDefaultAllocator::Allocate(len);
- }
-
- size_t GetSize() const {
- return Size_;
- }
-
- private:
- size_t Size_ = 0;
- };
-
- TTestAllocator allocator;
-
- TMemoryPool::TOptions options;
- options.RoundUpToNextPowerOfTwo = false;
-
- constexpr size_t EXPECTED_ALLOCATION_SIZE = MEMORY_POOL_BLOCK_SIZE + 32;
- TMemoryPool pool(MEMORY_POOL_BLOCK_SIZE, &allocationPolicy, &allocator, options);
-
- pool.Allocate(MEMORY_POOL_BLOCK_SIZE);
- UNIT_ASSERT_VALUES_EQUAL(EXPECTED_ALLOCATION_SIZE, allocator.GetSize());
-
- pool.Allocate(1);
- UNIT_ASSERT_VALUES_EQUAL(2 * EXPECTED_ALLOCATION_SIZE, allocator.GetSize());
- }
+ public:
+ TBlock Allocate(size_t len) override {
+ Size_ += len;
+ return TDefaultAllocator::Allocate(len);
+ }
+
+ size_t GetSize() const {
+ return Size_;
+ }
+
+ private:
+ size_t Size_ = 0;
+ };
+
+ TTestAllocator allocator;
+
+ TMemoryPool::TOptions options;
+ options.RoundUpToNextPowerOfTwo = false;
+
+ constexpr size_t EXPECTED_ALLOCATION_SIZE = MEMORY_POOL_BLOCK_SIZE + 32;
+ TMemoryPool pool(MEMORY_POOL_BLOCK_SIZE, &allocationPolicy, &allocator, options);
+
+ pool.Allocate(MEMORY_POOL_BLOCK_SIZE);
+ UNIT_ASSERT_VALUES_EQUAL(EXPECTED_ALLOCATION_SIZE, allocator.GetSize());
+
+ pool.Allocate(1);
+ UNIT_ASSERT_VALUES_EQUAL(2 * EXPECTED_ALLOCATION_SIZE, allocator.GetSize());
+ }
};
UNIT_TEST_SUITE_REGISTRATION(TMemPoolTest);
diff --git a/util/system/thread.cpp b/util/system/thread.cpp
index 6236746c2d..34bdb39384 100644
--- a/util/system/thread.cpp
+++ b/util/system/thread.cpp
@@ -207,7 +207,7 @@ namespace {
TParams* holdP = P_.Release();
int err = pthread_create(&H_, pattrs, ThreadProxy, holdP);
if (err) {
- H_ = {};
+ H_ = {};
P_.Reset(holdP);
PCHECK(err, "failed to create thread");
}
diff --git a/util/thread/lfqueue.h b/util/thread/lfqueue.h
index ab523631e4..49c21dd0d7 100644
--- a/util/thread/lfqueue.h
+++ b/util/thread/lfqueue.h
@@ -77,7 +77,7 @@ class TLockFreeQueue: public TNonCopyable {
void TryToFreeAsyncMemory() {
TAtomic keepCounter = AtomicAdd(FreeingTaskCounter, 0);
- TRootNode* current = AtomicGet(FreePtr);
+ TRootNode* current = AtomicGet(FreePtr);
if (current == nullptr)
return;
if (AtomicAdd(FreememCounter, 0) == 1) {
@@ -89,8 +89,8 @@ class TLockFreeQueue: public TNonCopyable {
if (AtomicCas(&FreePtr, (TRootNode*)nullptr, current)) {
// free list
while (current) {
- TRootNode* p = AtomicGet(current->NextFree);
- EraseList(AtomicGet(current->ToDelete));
+ TRootNode* p = AtomicGet(current->NextFree);
+ EraseList(AtomicGet(current->ToDelete));
delete current;
current = p;
}
@@ -106,10 +106,10 @@ class TLockFreeQueue: public TNonCopyable {
AtomicAdd(FreememCounter, -1);
}
void AsyncDel(TRootNode* toDelete, TListNode* lst) {
- AtomicSet(toDelete->ToDelete, lst);
+ AtomicSet(toDelete->ToDelete, lst);
for (;;) {
- AtomicSet(toDelete->NextFree, AtomicGet(FreePtr));
- if (AtomicCas(&FreePtr, toDelete, AtomicGet(toDelete->NextFree)))
+ AtomicSet(toDelete->NextFree, AtomicGet(FreePtr));
+ if (AtomicCas(&FreePtr, toDelete, AtomicGet(toDelete->NextFree)))
break;
}
}
@@ -174,11 +174,11 @@ class TLockFreeQueue: public TNonCopyable {
void EnqueueImpl(TListNode* head, TListNode* tail) {
TRootNode* newRoot = new TRootNode;
AsyncRef();
- AtomicSet(newRoot->PushQueue, head);
+ AtomicSet(newRoot->PushQueue, head);
for (;;) {
- TRootNode* curRoot = AtomicGet(JobQueue);
+ TRootNode* curRoot = AtomicGet(JobQueue);
AtomicSet(tail->Next, AtomicGet(curRoot->PushQueue));
- AtomicSet(newRoot->PopQueue, AtomicGet(curRoot->PopQueue));
+ AtomicSet(newRoot->PopQueue, AtomicGet(curRoot->PopQueue));
newRoot->CopyCounter(curRoot);
for (TListNode* node = head;; node = AtomicGet(node->Next)) {
@@ -276,18 +276,18 @@ public:
TListInvertor listInvertor;
AsyncRef();
for (;;) {
- TRootNode* curRoot = AtomicGet(JobQueue);
- TListNode* tail = AtomicGet(curRoot->PopQueue);
+ TRootNode* curRoot = AtomicGet(JobQueue);
+ TListNode* tail = AtomicGet(curRoot->PopQueue);
if (tail) {
// has elems to pop
if (!newRoot)
newRoot = new TRootNode;
- AtomicSet(newRoot->PushQueue, AtomicGet(curRoot->PushQueue));
+ AtomicSet(newRoot->PushQueue, AtomicGet(curRoot->PushQueue));
AtomicSet(newRoot->PopQueue, AtomicGet(tail->Next));
newRoot->CopyCounter(curRoot);
newRoot->DecCount(tail->Data);
- Y_ASSERT(AtomicGet(curRoot->PopQueue) == tail);
+ Y_ASSERT(AtomicGet(curRoot->PopQueue) == tail);
if (AtomicCas(&JobQueue, newRoot, curRoot)) {
*data = std::move(tail->Data);
AtomicSet(tail->Next, nullptr);
@@ -296,7 +296,7 @@ public:
}
continue;
}
- if (AtomicGet(curRoot->PushQueue) == nullptr) {
+ if (AtomicGet(curRoot->PushQueue) == nullptr) {
delete newRoot;
AsyncUnref();
return false; // no elems to pop
@@ -304,17 +304,17 @@ public:
if (!newRoot)
newRoot = new TRootNode;
- AtomicSet(newRoot->PushQueue, nullptr);
- listInvertor.DoCopy(AtomicGet(curRoot->PushQueue));
+ AtomicSet(newRoot->PushQueue, nullptr);
+ listInvertor.DoCopy(AtomicGet(curRoot->PushQueue));
AtomicSet(newRoot->PopQueue, listInvertor.Copy);
newRoot->CopyCounter(curRoot);
- Y_ASSERT(AtomicGet(curRoot->PopQueue) == nullptr);
+ Y_ASSERT(AtomicGet(curRoot->PopQueue) == nullptr);
if (AtomicCas(&JobQueue, newRoot, curRoot)) {
newRoot = nullptr;
listInvertor.CopyWasUsed();
- AsyncDel(curRoot, AtomicGet(curRoot->PushQueue));
+ AsyncDel(curRoot, AtomicGet(curRoot->PushQueue));
} else {
- AtomicSet(newRoot->PopQueue, nullptr);
+ AtomicSet(newRoot->PopQueue, nullptr);
}
}
}
@@ -345,14 +345,14 @@ public:
}
bool IsEmpty() {
AsyncRef();
- TRootNode* curRoot = AtomicGet(JobQueue);
- bool res = AtomicGet(curRoot->PushQueue) == nullptr && AtomicGet(curRoot->PopQueue) == nullptr;
+ TRootNode* curRoot = AtomicGet(JobQueue);
+ bool res = AtomicGet(curRoot->PushQueue) == nullptr && AtomicGet(curRoot->PopQueue) == nullptr;
AsyncUnref();
return res;
}
TCounter GetCounter() {
AsyncRef();
- TRootNode* curRoot = AtomicGet(JobQueue);
+ TRootNode* curRoot = AtomicGet(JobQueue);
TCounter res = *(TCounter*)curRoot;
AsyncUnref();
return res;