aboutsummaryrefslogtreecommitdiffstats
path: root/library/cpp/messagebus/actor/queue_for_actor.h
blob: 40fa536b8254e226b4a2f405d752bb1d95385101 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#pragma once

#include <util/generic/vector.h>
#include <util/system/yassert.h>
#include <util/thread/lfstack.h>
#include <util/thread/singleton.h>

// TODO: include from correct directory
#include "temp_tls_vector.h"

namespace NActor {
    namespace NPrivate {
        struct TTagForTl {};

    }

    template <typename T>
    class TQueueForActor {
    private:
        TLockFreeStack<T> Queue;

    public:
        ~TQueueForActor() {
            Y_VERIFY(Queue.IsEmpty());
        }

        bool IsEmpty() {
            return Queue.IsEmpty();
        }

        void Enqueue(const T& value) {
            Queue.Enqueue(value);
        }

        template <typename TCollection>
        void EnqueueAll(const TCollection& all) {
            Queue.EnqueueAll(all);
        }

        void Clear() {
            TVector<T> tmp;
            Queue.DequeueAll(&tmp);
        }

        template <typename TFunc>
        void DequeueAll(const TFunc& func
                        // TODO: , std::enable_if_t<TFunctionParamCount<TFunc>::Value == 1>* = 0
        ) {
            TTempTlsVector<T> temp;

            Queue.DequeueAllSingleConsumer(temp.GetVector());

            for (typename TVector<T>::reverse_iterator i = temp.GetVector()->rbegin(); i != temp.GetVector()->rend(); ++i) {
                func(*i);
            }

            temp.Clear();

            if (temp.Capacity() * sizeof(T) > 64 * 1024) {
                temp.Shrink();
            }
        }

        template <typename TFunc>
        void DequeueAllLikelyEmpty(const TFunc& func) {
            if (Y_LIKELY(IsEmpty())) {
                return;
            }

            DequeueAll(func);
        }
    };

}