aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp
diff options
context:
space:
mode:
authorvitalyisaev <vitalyisaev@ydb.tech>2023-11-14 09:58:56 +0300
committervitalyisaev <vitalyisaev@ydb.tech>2023-11-14 10:20:20 +0300
commitc2b2dfd9827a400a8495e172a56343462e3ceb82 (patch)
treecd4e4f597d01bede4c82dffeb2d780d0a9046bd0 /contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp
parentd4ae8f119e67808cb0cf776ba6e0cf95296f2df7 (diff)
downloadydb-c2b2dfd9827a400a8495e172a56343462e3ceb82.tar.gz
YQ Connector: move tests from yql to ydb (OSS)
Перенос папки с тестами на Коннектор из папки yql в папку ydb (синхронизируется с github).
Diffstat (limited to 'contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp')
-rw-r--r--contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp128
1 files changed, 128 insertions, 0 deletions
diff --git a/contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp b/contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp
new file mode 100644
index 0000000000..e64e9ab0b2
--- /dev/null
+++ b/contrib/clickhouse/src/Storages/MergeTree/RangesInDataPart.cpp
@@ -0,0 +1,128 @@
+#include <Storages/MergeTree/RangesInDataPart.h>
+
+#include <fmt/format.h>
+
+#include <IO/ReadHelpers.h>
+#include <IO/WriteHelpers.h>
+#include <Storages/MergeTree/IMergeTreeDataPart.h>
+#include "IO/VarInt.h"
+
+template <>
+struct fmt::formatter<DB::RangesInDataPartDescription>
+{
+ static constexpr auto parse(format_parse_context & ctx) { return ctx.begin(); }
+
+ template <typename FormatContext>
+ auto format(const DB::RangesInDataPartDescription & range, FormatContext & ctx)
+ {
+ return fmt::format_to(ctx.out(), "{}", range.describe());
+ }
+};
+
+namespace DB
+{
+
+namespace ErrorCodes
+{
+ extern const int TOO_LARGE_ARRAY_SIZE;
+}
+
+
+void RangesInDataPartDescription::serialize(WriteBuffer & out) const
+{
+ info.serialize(out);
+ ranges.serialize(out);
+}
+
+String RangesInDataPartDescription::describe() const
+{
+ String result;
+ result += fmt::format("part {} with ranges [{}]", info.getPartNameV1(), fmt::join(ranges, ","));
+ return result;
+}
+
+void RangesInDataPartDescription::deserialize(ReadBuffer & in)
+{
+ info.deserialize(in);
+ ranges.deserialize(in);
+}
+
+void RangesInDataPartsDescription::serialize(WriteBuffer & out) const
+{
+ writeVarUInt(this->size(), out);
+ for (const auto & desc : *this)
+ desc.serialize(out);
+}
+
+String RangesInDataPartsDescription::describe() const
+{
+ return fmt::format("{} parts: [{}]", this->size(), fmt::join(*this, ", "));
+}
+
+void RangesInDataPartsDescription::deserialize(ReadBuffer & in)
+{
+ size_t new_size = 0;
+ readVarUInt(new_size, in);
+ if (new_size > 100'000'000'000)
+ throw DB::Exception(DB::ErrorCodes::TOO_LARGE_ARRAY_SIZE, "The size of serialized hash table is suspiciously large: {}", new_size);
+
+ this->resize(new_size);
+ for (auto & desc : *this)
+ desc.deserialize(in);
+}
+
+void RangesInDataPartsDescription::merge(RangesInDataPartsDescription & other)
+{
+ for (const auto & desc : other)
+ this->emplace_back(desc);
+}
+
+RangesInDataPartDescription RangesInDataPart::getDescription() const
+{
+ return RangesInDataPartDescription{
+ .info = data_part->info,
+ .ranges = ranges,
+ };
+}
+
+size_t RangesInDataPart::getMarksCount() const
+{
+ size_t total = 0;
+ for (const auto & range : ranges)
+ total += range.end - range.begin;
+
+ return total;
+}
+
+size_t RangesInDataPart::getRowsCount() const
+{
+ return data_part->index_granularity.getRowsCountInRanges(ranges);
+}
+
+
+RangesInDataPartsDescription RangesInDataParts::getDescriptions() const
+{
+ RangesInDataPartsDescription result;
+ for (const auto & part : *this)
+ result.emplace_back(part.getDescription());
+ return result;
+}
+
+
+size_t RangesInDataParts::getMarksCountAllParts() const
+{
+ size_t result = 0;
+ for (const auto & part : *this)
+ result += part.getMarksCount();
+ return result;
+}
+
+size_t RangesInDataParts::getRowsCountAllParts() const
+{
+ size_t result = 0;
+ for (const auto & part: *this)
+ result += part.getRowsCount();
+ return result;
+}
+
+}