aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/clickhouse/src/Disks/ObjectStorages/ObjectStorageIteratorAsync.cpp
blob: 7425f629a5a5319463ca0836b77570d77a618020 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
#include <Disks/ObjectStorages/ObjectStorageIteratorAsync.h>

#include <Common/logger_useful.h>

namespace DB
{

namespace ErrorCodes
{
    extern const int LOGICAL_ERROR;
}

void IObjectStorageIteratorAsync::nextBatch()
{
    std::lock_guard lock(mutex);
    if (!is_finished)
    {
        if (!is_initialized)
        {
            outcome_future = scheduleBatch();
            is_initialized = true;
        }

         BatchAndHasNext next_batch = outcome_future.get();
         current_batch = std::move(next_batch.batch);
         accumulated_size.fetch_add(current_batch.size(), std::memory_order_relaxed);
         current_batch_iterator = current_batch.begin();
         if (next_batch.has_next)
             outcome_future = scheduleBatch();
         else
             is_finished = true;
    }
    else
    {
        current_batch.clear();
        current_batch_iterator = current_batch.begin();
    }
}

void IObjectStorageIteratorAsync::next()
{
    std::lock_guard lock(mutex);

    if (current_batch_iterator != current_batch.end())
    {
        ++current_batch_iterator;
    }
    else if (!is_finished)
    {
        if (outcome_future.valid())
        {
            BatchAndHasNext next_batch = outcome_future.get();
            current_batch = std::move(next_batch.batch);
            accumulated_size.fetch_add(current_batch.size(), std::memory_order_relaxed);
            current_batch_iterator = current_batch.begin();
            if (next_batch.has_next)
                outcome_future = scheduleBatch();
            else
                is_finished = true;
        }
    }
}

std::future<IObjectStorageIteratorAsync::BatchAndHasNext> IObjectStorageIteratorAsync::scheduleBatch()
{
    return list_objects_scheduler([this]
    {
        BatchAndHasNext result;
        result.has_next = getBatchAndCheckNext(result.batch);
        return result;
    }, Priority{});
}


bool IObjectStorageIteratorAsync::isValid()
{
    if (!is_initialized)
        nextBatch();

    std::lock_guard lock(mutex);
    return current_batch_iterator != current_batch.end();
}

RelativePathWithMetadata IObjectStorageIteratorAsync::current()
{
    if (!isValid())
        throw Exception(ErrorCodes::LOGICAL_ERROR, "Trying to access invalid iterator");

    std::lock_guard lock(mutex);
    return *current_batch_iterator;
}


RelativePathsWithMetadata IObjectStorageIteratorAsync::currentBatch()
{
    if (!isValid())
        throw Exception(ErrorCodes::LOGICAL_ERROR, "Trying to access invalid iterator");

    std::lock_guard lock(mutex);
    return current_batch;
}

std::optional<RelativePathsWithMetadata> IObjectStorageIteratorAsync::getCurrrentBatchAndScheduleNext()
{
    std::lock_guard lock(mutex);
    if (!is_initialized)
        nextBatch();

    if (current_batch_iterator != current_batch.end())
    {
        auto temp_current_batch = current_batch;
        nextBatch();
        return temp_current_batch;
    }

    return std::nullopt;
}

size_t IObjectStorageIteratorAsync::getAccumulatedSize() const
{
    return accumulated_size.load(std::memory_order_relaxed);
}

}