blob: 332096445abd1115936c3adf4b8402d608def51c (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
|
#include "chunked_memory_pool_output.h"
#include "chunked_memory_pool.h"
#include <library/cpp/yt/memory/ref.h>
namespace NYT {
////////////////////////////////////////////////////////////////////////////////
TChunkedMemoryPoolOutput::TChunkedMemoryPoolOutput(TChunkedMemoryPool* pool, size_t chunkSize)
: Pool_(pool)
, ChunkSize_(chunkSize)
{ }
size_t TChunkedMemoryPoolOutput::DoNext(void** ptr)
{
// Check if the current chunk is exhausted.
if (Current_ == End_) {
// Emplace the (whole) last chunk, if any.
if (Begin_) {
Refs_.emplace_back(Begin_, Current_);
}
// Allocate a new chunk.
// Use |AllocateAligned| to get a chance to free some memory afterwards.
// Tune the number of bytes requested from the pool to try avoid allocations.
auto spareSize = Pool_->GetCurrentChunkSpareSize();
auto allocationSize = (spareSize == 0 ? ChunkSize_ : std::min(ChunkSize_, spareSize));
Begin_ = Pool_->AllocateAligned(allocationSize, /* align */ 1);
Current_ = Begin_;
End_ = Begin_ + allocationSize;
}
// Return the unused part of the current chunk.
// This could be the whole chunk allocated above.
*ptr = Current_;
auto size = End_ - Current_;
Current_ = End_;
return size;
}
void TChunkedMemoryPoolOutput::DoUndo(size_t size)
{
// Just rewind the current pointer.
Current_ -= size;
YT_VERIFY(Current_ >= Begin_);
}
std::vector<TMutableRef> TChunkedMemoryPoolOutput::Finish()
{
// Emplace the used part of the last chunk, if any.
if (Begin_) {
Refs_.emplace_back(Begin_, Current_);
}
// Try to free the unused part of the last chunk, if possible.
if (Current_ < End_) {
Pool_->Free(Current_, End_);
}
return std::move(Refs_);
}
////////////////////////////////////////////////////////////////////////////////
} // namespace NYT
|