aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/clickhouse/src/Common/ArenaWithFreeLists.h
blob: 76760a20320eaeea02c759728ab249e8362ab94d (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
#pragma once

#include <Core/Defines.h>
#if __has_include(<sanitizer/asan_interface.h>) && defined(ADDRESS_SANITIZER)
#   include <sanitizer/asan_interface.h>
#endif
#include <Common/Arena.h>
#include <Common/BitHelpers.h>


namespace DB
{


/** Unlike Arena, allows you to release (for later re-use)
  *  previously allocated (not necessarily just recently) chunks of memory.
  * For this, the requested size is rounded up to the power of two
  *  (or up to 8, if less, or using memory allocation outside Arena if the size is greater than 65536).
  * When freeing memory, for each size (14 options in all: 8, 16 ... 65536),
  *  a single-linked list of free blocks is kept track.
  * When allocating, we take the head of the list of free blocks,
  *  or, if the list is empty - allocate a new block using Arena.
  */
class ArenaWithFreeLists : private Allocator<false>, private boost::noncopyable
{
private:
    /// If the block is free, then the pointer to the next free block is stored at its beginning, or nullptr, if there are no more free blocks.
    /// If the block is used, then some data is stored in it.
    union Block
    {
        Block * next;
        char data[0];
    };

    /// The maximum size of a piece of memory that is allocated with Arena. Otherwise, we use Allocator directly.
    static constexpr size_t max_fixed_block_size = 65536;

    /// Get the index in the freelist array for the specified size.
    static size_t findFreeListIndex(const size_t size)
    {
        return size <= 8 ? 2 : bitScanReverse(size - 1);
    }

    /// Arena is used to allocate blocks that are not too large.
    Arena pool;

    /// Lists of free blocks. Each element points to the head of the corresponding list, or is nullptr.
    /// The first two elements are not used, but are intended to simplify arithmetic.
    Block * free_lists[16] {};

public:
    explicit ArenaWithFreeLists(
        const size_t initial_size = 4096, const size_t growth_factor = 2,
        const size_t linear_growth_threshold = 128 * 1024 * 1024)
        : pool{initial_size, growth_factor, linear_growth_threshold}
    {
    }

    char * alloc(const size_t size)
    {
        if (size > max_fixed_block_size)
            return static_cast<char *>(Allocator<false>::alloc(size));

        /// find list of required size
        const auto list_idx = findFreeListIndex(size);

        /// If there is a free block.
        if (auto & free_block_ptr = free_lists[list_idx])
        {
            /// Let's take it. And change the head of the list to the next
            /// item in the list. We poisoned the free block before putting
            /// it into the free list, so we have to unpoison it before
            /// reading anything.
            ASAN_UNPOISON_MEMORY_REGION(free_block_ptr,
                                        std::max(size, sizeof(Block)));

            auto * const res = free_block_ptr->data;
            free_block_ptr = free_block_ptr->next;
            return res;
        }

        /// no block of corresponding size, allocate a new one
        return pool.alloc(1ULL << (list_idx + 1));
    }

    void free(char * ptr, const size_t size)
    {
        if (size > max_fixed_block_size)
            return Allocator<false>::free(ptr, size);

        /// find list of required size
        const auto list_idx = findFreeListIndex(size);

        /// Insert the released block into the head of the list.
        auto & free_block_ptr = free_lists[list_idx];
        auto * const old_head = free_block_ptr;
        free_block_ptr = reinterpret_cast<Block *>(ptr);
        free_block_ptr->next = old_head;

        /// The requested size may be less than the size of the block, but
        /// we still want to poison the entire block.
        /// Strictly speaking, the free blocks must be unpoisoned in
        /// destructor, to support an underlying allocator that doesn't
        /// integrate with asan. We don't do that, and rely on the fact that
        /// our underlying allocator is Arena, which does have asan integration.
        ASAN_POISON_MEMORY_REGION(ptr, 1ULL << (list_idx + 1));
    }

    /// Size of the allocated pool in bytes
    size_t allocatedBytes() const { return pool.allocatedBytes(); }
};

class SynchronizedArenaWithFreeLists : private ArenaWithFreeLists
{
public:
    explicit SynchronizedArenaWithFreeLists(
        const size_t initial_size = 4096, const size_t growth_factor = 2,
        const size_t linear_growth_threshold = 128 * 1024 * 1024)
        : ArenaWithFreeLists{initial_size, growth_factor, linear_growth_threshold}
    {}

    char * alloc(const size_t size)
    {
        std::lock_guard lock{mutex};
        return ArenaWithFreeLists::alloc(size);
    }

    void free(char * ptr, const size_t size)
    {
        std::lock_guard lock{mutex};
        return ArenaWithFreeLists::free(ptr, size);
    }

    /// Size of the allocated pool in bytes
    size_t allocatedBytes() const
    {
        std::lock_guard lock{mutex};
        return ArenaWithFreeLists::allocatedBytes();
    }
private:
    mutable std::mutex mutex;
};

}