aboutsummaryrefslogtreecommitdiffstats
path: root/util
diff options
context:
space:
mode:
authorudovichenko-r <udovichenko-r@yandex-team.ru>2022-02-10 16:49:22 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:49:22 +0300
commita6f6b22bda21d53d07bd2e0ff63a2b2abf69ede9 (patch)
tree5d5cb817648f650d76cf1076100726fd9b8448e8 /util
parentd7e4eaec9d325e188dabb3eb1949a32a5229e9ce (diff)
downloadydb-a6f6b22bda21d53d07bd2e0ff63a2b2abf69ede9.tar.gz
Restoring authorship annotation for <udovichenko-r@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'util')
-rw-r--r--util/generic/algorithm.h8
-rw-r--r--util/generic/bitmap.h1222
-rw-r--r--util/generic/bitmap_ut.cpp772
-rw-r--r--util/generic/ptr.h2
-rw-r--r--util/stream/debug.cpp12
-rw-r--r--util/stream/debug.h2
-rw-r--r--util/stream/trace.h28
7 files changed, 1023 insertions, 1023 deletions
diff --git a/util/generic/algorithm.h b/util/generic/algorithm.h
index 6544d56fa2d..badfb889933 100644
--- a/util/generic/algorithm.h
+++ b/util/generic/algorithm.h
@@ -388,11 +388,11 @@ static inline TO RemoveCopyIf(TI f, TI l, TO t, TP p) {
return std::remove_copy_if(f, l, t, p);
}
-template <class TI, class TO>
-static inline TO ReverseCopy(TI f, TI l, TO t) {
+template <class TI, class TO>
+static inline TO ReverseCopy(TI f, TI l, TO t) {
return std::reverse_copy(f, l, t);
-}
-
+}
+
template <class TI1, class TI2, class TO>
static inline TO SetUnion(TI1 f1, TI1 l1, TI2 f2, TI2 l2, TO p) {
return std::set_union(f1, l1, f2, l2, p);
diff --git a/util/generic/bitmap.h b/util/generic/bitmap.h
index d8611b9e69b..f77d1824607 100644
--- a/util/generic/bitmap.h
+++ b/util/generic/bitmap.h
@@ -3,16 +3,16 @@
#include "fwd.h"
#include "ptr.h"
#include "bitops.h"
-#include "typetraits.h"
+#include "typetraits.h"
#include "algorithm.h"
-#include "utility.h"
+#include "utility.h"
#include <util/system/yassert.h>
#include <util/system/defaults.h>
#include <util/str_stl.h>
#include <util/ysaveload.h>
-namespace NBitMapPrivate {
+namespace NBitMapPrivate {
// Returns number of bits set; result is in most significatnt byte
inline ui64 ByteSums(ui64 x) {
ui64 byteSums = x - ((x & 0xAAAAAAAAAAAAAAAAULL) >> 1);
@@ -33,34 +33,34 @@ namespace NBitMapPrivate {
struct TSanitizeMask {
static constexpr TChunkType Value = ~((~TChunkType(0)) << ExtraBits);
};
-
+
template <typename TChunkType>
struct TSanitizeMask<TChunkType, 0> {
static constexpr TChunkType Value = (TChunkType)~TChunkType(0u);
};
-
+
template <typename TTargetChunk, typename TSourceChunk>
struct TBigToSmallDataCopier {
static_assert(sizeof(TTargetChunk) < sizeof(TSourceChunk), "expect sizeof(TTargetChunk) < sizeof(TSourceChunk)");
static_assert(0 == sizeof(TSourceChunk) % sizeof(TTargetChunk), "expect 0 == sizeof(TSourceChunk) % sizeof(TTargetChunk)");
-
+
static constexpr size_t BLOCK_SIZE = sizeof(TSourceChunk) / sizeof(TTargetChunk);
-
+
union TCnv {
TSourceChunk BigData;
TTargetChunk SmallData[BLOCK_SIZE];
};
-
+
static inline void CopyChunk(TTargetChunk* target, TSourceChunk source) {
TCnv c;
c.BigData = source;
-#if defined(_big_endian_)
+#if defined(_big_endian_)
::ReverseCopy(c.SmallData, c.SmallData + Y_ARRAY_SIZE(c.SmallData), target);
-#else
+#else
::Copy(c.SmallData, c.SmallData + Y_ARRAY_SIZE(c.SmallData), target);
-#endif
+#endif
}
-
+
static inline void Copy(TTargetChunk* target, size_t targetSize, const TSourceChunk* source, size_t sourceSize) {
Y_ASSERT(targetSize >= sourceSize * BLOCK_SIZE);
if (targetSize > sourceSize * BLOCK_SIZE) {
@@ -69,43 +69,43 @@ namespace NBitMapPrivate {
for (size_t i = 0; i < sourceSize; ++i) {
CopyChunk(target + i * BLOCK_SIZE, source[i]);
}
- }
+ }
};
-
+
template <typename TTargetChunk, typename TSourceChunk>
struct TSmallToBigDataCopier {
static_assert(sizeof(TTargetChunk) > sizeof(TSourceChunk), "expect sizeof(TTargetChunk) > sizeof(TSourceChunk)");
static_assert(0 == sizeof(TTargetChunk) % sizeof(TSourceChunk), "expect 0 == sizeof(TTargetChunk) % sizeof(TSourceChunk)");
-
+
static constexpr size_t BLOCK_SIZE = sizeof(TTargetChunk) / sizeof(TSourceChunk);
-
+
union TCnv {
TSourceChunk SmallData[BLOCK_SIZE];
TTargetChunk BigData;
};
-
+
static inline TTargetChunk CopyFullChunk(const TSourceChunk* source) {
TCnv c;
-#if defined(_big_endian_)
+#if defined(_big_endian_)
::ReverseCopy(source, source + BLOCK_SIZE, c.SmallData);
-#else
+#else
::Copy(source, source + BLOCK_SIZE, c.SmallData);
-#endif
+#endif
return c.BigData;
}
-
+
static inline TTargetChunk CopyPartChunk(const TSourceChunk* source, size_t count) {
Y_ASSERT(count <= BLOCK_SIZE);
TCnv c;
c.BigData = 0;
-#if defined(_big_endian_)
+#if defined(_big_endian_)
::ReverseCopy(source, source + count, c.SmallData);
-#else
+#else
::Copy(source, source + count, c.SmallData);
-#endif
+#endif
return c.BigData;
}
-
+
static inline void Copy(TTargetChunk* target, size_t targetSize, const TSourceChunk* source, size_t sourceSize) {
Y_ASSERT(targetSize * BLOCK_SIZE >= sourceSize);
if (targetSize * BLOCK_SIZE > sourceSize) {
@@ -118,9 +118,9 @@ namespace NBitMapPrivate {
if (0 != sourceSize % BLOCK_SIZE) {
target[i] = CopyPartChunk(source + i * BLOCK_SIZE, sourceSize % BLOCK_SIZE);
}
- }
+ }
};
-
+
template <typename TChunk>
struct TUniformDataCopier {
static inline void Copy(TChunk* target, size_t targetSize, const TChunk* source, size_t sourceSize) {
@@ -131,91 +131,91 @@ namespace NBitMapPrivate {
for (size_t i = sourceSize; i < targetSize; ++i) {
target[i] = 0;
}
- }
+ }
};
-
+
template <typename TFirst, typename TSecond>
struct TIsSmaller {
enum {
Result = sizeof(TFirst) < sizeof(TSecond)
};
- };
-
+ };
+
template <typename TTargetChunk, typename TSourceChunk>
struct TDataCopier: public std::conditional_t<std::is_same<TTargetChunk, TSourceChunk>::value, TUniformDataCopier<TTargetChunk>, std::conditional_t<TIsSmaller<TTargetChunk, TSourceChunk>::Result, TBigToSmallDataCopier<TTargetChunk, TSourceChunk>, TSmallToBigDataCopier<TTargetChunk, TSourceChunk>>> {
};
-
+
template <typename TTargetChunk, typename TSourceChunk>
inline void CopyData(TTargetChunk* target, size_t targetSize, const TSourceChunk* source, size_t sourceSize) {
TDataCopier<TTargetChunk, TSourceChunk>::Copy(target, targetSize, source, sourceSize);
}
-
+
template <size_t BitCount, typename TChunkType>
struct TFixedStorage {
using TChunk = TChunkType;
-
+
static constexpr size_t Size = (BitCount + 8 * sizeof(TChunk) - 1) / (8 * sizeof(TChunk));
-
+
TChunk Data[Size];
-
+
TFixedStorage() {
Zero(Data);
}
-
+
TFixedStorage(const TFixedStorage<BitCount, TChunkType>& st) {
for (size_t i = 0; i < Size; ++i) {
Data[i] = st.Data[i];
}
}
-
+
template <typename TOtherChunk>
TFixedStorage(const TOtherChunk* data, size_t size) {
Y_VERIFY(Size * sizeof(TChunk) >= size * sizeof(TOtherChunk), "Exceeding bitmap storage capacity");
CopyData(Data, Size, data, size);
- }
-
+ }
+
Y_FORCE_INLINE void Swap(TFixedStorage<BitCount, TChunkType>& st) {
for (size_t i = 0; i < Size; ++i) {
DoSwap(Data[i], st.Data[i]);
}
}
-
+
Y_FORCE_INLINE static constexpr size_t GetBitCapacity() noexcept {
return BitCount;
- }
-
+ }
+
Y_FORCE_INLINE static constexpr size_t GetChunkCapacity() noexcept {
return Size;
}
-
+
// Returns true if the resulting storage capacity is enough to fit the requested size
Y_FORCE_INLINE static constexpr bool ExpandBitSize(const size_t bitSize) noexcept {
return bitSize <= BitCount;
}
-
+
Y_FORCE_INLINE void Sanitize() {
Data[Size - 1] &= TSanitizeMask<TChunk, BitCount % (8 * sizeof(TChunk))>::Value;
}
};
-
+
// Dynamically expanded storage.
// It uses "on stack" realization with no allocation for one chunk spaces
template <typename TChunkType>
struct TDynamicStorage {
using TChunk = TChunkType;
-
+
size_t Size;
TChunk StackData;
TArrayHolder<TChunk> ArrayData;
TChunk* Data;
-
+
TDynamicStorage()
: Size(1)
, StackData(0)
, Data(&StackData)
{
}
-
+
TDynamicStorage(const TDynamicStorage<TChunk>& st)
: Size(1)
, StackData(0)
@@ -229,7 +229,7 @@ namespace NBitMapPrivate {
Data[i] = 0;
}
}
-
+
template <typename TOtherChunk>
TDynamicStorage(const TOtherChunk* data, size_t size)
: Size(1)
@@ -238,7 +238,7 @@ namespace NBitMapPrivate {
{
ExpandBitSize(size * sizeof(TOtherChunk) * 8, false);
CopyData(Data, Size, data, size);
- }
+ }
Y_FORCE_INLINE void Swap(TDynamicStorage<TChunkType>& st) {
DoSwap(Size, st.Size);
@@ -246,20 +246,20 @@ namespace NBitMapPrivate {
DoSwap(ArrayData, st.ArrayData);
Data = 1 == Size ? &StackData : ArrayData.Get();
st.Data = 1 == st.Size ? &st.StackData : st.ArrayData.Get();
- }
-
+ }
+
Y_FORCE_INLINE size_t GetBitCapacity() const {
return Size * 8 * sizeof(TChunk);
}
-
+
Y_FORCE_INLINE size_t GetChunkCapacity() const {
return Size;
}
-
+
// Returns true if the resulting storage capacity is enough to fit the requested size
Y_FORCE_INLINE bool ExpandSize(size_t size, bool keepData = true) {
if (size > Size) {
- size = Max(size, Size * 2);
+ size = Max(size, Size * 2);
TArrayHolder<TChunk> newData(new TChunk[size]);
if (keepData) {
for (size_t i = 0; i < Size; ++i) {
@@ -268,140 +268,140 @@ namespace NBitMapPrivate {
for (size_t i = Size; i < size; ++i) {
newData[i] = 0;
}
- }
+ }
DoSwap(ArrayData, newData);
Data = ArrayData.Get();
Size = size;
- }
+ }
return true;
- }
+ }
Y_FORCE_INLINE bool ExpandBitSize(size_t bitSize, bool keepData = true) {
return ExpandSize((bitSize + 8 * sizeof(TChunk) - 1) / (8 * sizeof(TChunk)), keepData);
}
-
+
Y_FORCE_INLINE void Sanitize() {
}
};
-
+
template <size_t num>
struct TDivCount {
static constexpr size_t Value = 1 + TDivCount<(num >> 1)>::Value;
};
-
+
template <>
struct TDivCount<0> {
static constexpr size_t Value = 0;
};
-
+
}
-
-template <size_t BitCount, typename TChunkType>
-struct TFixedBitMapTraits {
+
+template <size_t BitCount, typename TChunkType>
+struct TFixedBitMapTraits {
using TChunk = TChunkType;
using TStorage = NBitMapPrivate::TFixedStorage<BitCount, TChunkType>;
-};
-
-template <typename TChunkType>
-struct TDynamicBitMapTraits {
+};
+
+template <typename TChunkType>
+struct TDynamicBitMapTraits {
using TChunk = TChunkType;
using TStorage = NBitMapPrivate::TDynamicStorage<TChunkType>;
-};
-
-template <class TTraits>
-class TBitMapOps {
+};
+
+template <class TTraits>
+class TBitMapOps {
public:
using TChunk = typename TTraits::TChunk;
using TThis = TBitMapOps<TTraits>;
-
-private:
+
+private:
static_assert(std::is_unsigned<TChunk>::value, "expect std::is_unsigned<TChunk>::value");
-
+
static constexpr size_t BitsPerChunk = 8 * sizeof(TChunk);
static constexpr TChunk ModMask = static_cast<TChunk>(BitsPerChunk - 1);
static constexpr size_t DivCount = NBitMapPrivate::TDivCount<BitsPerChunk>::Value - 1;
static constexpr TChunk FullChunk = (TChunk)~TChunk(0);
-
+
template <class>
friend class TBitMapOps;
-
+
using TStorage = typename TTraits::TStorage;
-
- // The smallest unsigned type, which can be used in bit ops
+
+ // The smallest unsigned type, which can be used in bit ops
using TIntType = std::conditional_t<sizeof(TChunk) < sizeof(unsigned int), unsigned int, TChunk>;
-
- TStorage Mask;
-
-public:
- class TReference {
- private:
- friend class TBitMapOps<TTraits>;
-
- TChunk* Chunk;
- size_t Offset;
-
- TReference(TChunk* c, size_t offset)
- : Chunk(c)
- , Offset(offset)
- {
- }
-
- public:
+
+ TStorage Mask;
+
+public:
+ class TReference {
+ private:
+ friend class TBitMapOps<TTraits>;
+
+ TChunk* Chunk;
+ size_t Offset;
+
+ TReference(TChunk* c, size_t offset)
+ : Chunk(c)
+ , Offset(offset)
+ {
+ }
+
+ public:
~TReference() = default;
-
+
Y_FORCE_INLINE TReference& operator=(bool val) {
- if (val)
- *Chunk |= static_cast<TChunk>(1) << Offset;
- else
+ if (val)
+ *Chunk |= static_cast<TChunk>(1) << Offset;
+ else
*Chunk &= ~(static_cast<TChunk>(1) << Offset);
-
- return *this;
- }
-
+
+ return *this;
+ }
+
Y_FORCE_INLINE TReference& operator=(const TReference& ref) {
- if (ref)
- *Chunk |= static_cast<TChunk>(1) << Offset;
- else
- *Chunk &= ~(static_cast<TChunk>(1) << Offset);
-
- return *this;
- }
-
+ if (ref)
+ *Chunk |= static_cast<TChunk>(1) << Offset;
+ else
+ *Chunk &= ~(static_cast<TChunk>(1) << Offset);
+
+ return *this;
+ }
+
Y_FORCE_INLINE bool operator~() const {
- return 0 == (*Chunk & (static_cast<TChunk>(1) << Offset));
- }
-
+ return 0 == (*Chunk & (static_cast<TChunk>(1) << Offset));
+ }
+
Y_FORCE_INLINE operator bool() const {
- return 0 != (*Chunk & (static_cast<TChunk>(1) << Offset));
- }
-
+ return 0 != (*Chunk & (static_cast<TChunk>(1) << Offset));
+ }
+
Y_FORCE_INLINE TReference& Flip() {
- *Chunk ^= static_cast<TChunk>(1) << Offset;
- return *this;
- }
- };
-
-private:
- struct TSetOp {
+ *Chunk ^= static_cast<TChunk>(1) << Offset;
+ return *this;
+ }
+ };
+
+private:
+ struct TSetOp {
static constexpr TChunk Op(const TChunk src, const TChunk mask) noexcept {
- return src | mask;
- }
- };
-
- struct TResetOp {
+ return src | mask;
+ }
+ };
+
+ struct TResetOp {
static constexpr TChunk Op(const TChunk src, const TChunk mask) noexcept {
- return src & ~mask;
- }
- };
-
- template <class TUpdateOp>
- void UpdateRange(size_t start, size_t end) {
- const size_t startChunk = start >> DivCount;
- const size_t startBitOffset = start & ModMask;
-
- const size_t endChunk = end >> DivCount;
- const size_t endBitOffset = end & ModMask;
-
+ return src & ~mask;
+ }
+ };
+
+ template <class TUpdateOp>
+ void UpdateRange(size_t start, size_t end) {
+ const size_t startChunk = start >> DivCount;
+ const size_t startBitOffset = start & ModMask;
+
+ const size_t endChunk = end >> DivCount;
+ const size_t endBitOffset = end & ModMask;
+
size_t bitOffset = startBitOffset;
for (size_t chunk = startChunk; chunk <= endChunk; ++chunk) {
TChunk updateMask = FullChunk << bitOffset;
@@ -409,76 +409,76 @@ private:
updateMask ^= FullChunk << endBitOffset;
if (!updateMask)
break;
- }
+ }
Mask.Data[chunk] = TUpdateOp::Op(Mask.Data[chunk], updateMask);
bitOffset = 0;
- }
- }
-
-public:
+ }
+ }
+
+public:
TBitMapOps() = default;
TBitMapOps(TChunk val) {
- Mask.Data[0] = val;
- Mask.Sanitize();
+ Mask.Data[0] = val;
+ Mask.Sanitize();
}
TBitMapOps(const TThis&) = default;
- template <class T>
- TBitMapOps(const TBitMapOps<T>& bitmap)
- : Mask(bitmap.Mask.Data, bitmap.Mask.GetChunkCapacity())
- {
- Mask.Sanitize();
+ template <class T>
+ TBitMapOps(const TBitMapOps<T>& bitmap)
+ : Mask(bitmap.Mask.Data, bitmap.Mask.GetChunkCapacity())
+ {
+ Mask.Sanitize();
}
- template <class T>
+ template <class T>
Y_FORCE_INLINE bool operator==(const TBitMapOps<T>& bitmap) const {
- return Equal(bitmap);
- }
-
+ return Equal(bitmap);
+ }
+
Y_FORCE_INLINE TThis& operator=(const TThis& bitmap) {
- if (this != &bitmap) {
- TThis bm(bitmap);
- Swap(bm);
- }
- return *this;
- }
-
- template <class T>
+ if (this != &bitmap) {
+ TThis bm(bitmap);
+ Swap(bm);
+ }
+ return *this;
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& operator=(const TBitMapOps<T>& bitmap) {
- TThis bm(bitmap);
- Swap(bm);
- return *this;
- }
-
- template <class T>
+ TThis bm(bitmap);
+ Swap(bm);
+ return *this;
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& operator&=(const TBitMapOps<T>& bitmap) {
- return And(bitmap);
- }
-
+ return And(bitmap);
+ }
+
Y_FORCE_INLINE TThis& operator&=(const TChunk& val) {
- return And(val);
- }
-
- template <class T>
+ return And(val);
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& operator|=(const TBitMapOps<T>& bitmap) {
- return Or(bitmap);
- }
-
+ return Or(bitmap);
+ }
+
Y_FORCE_INLINE TThis& operator|=(const TChunk& val) {
- return Or(val);
- }
-
- template <class T>
+ return Or(val);
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& operator^=(const TBitMapOps<T>& bitmap) {
- return Xor(bitmap);
- }
-
+ return Xor(bitmap);
+ }
+
Y_FORCE_INLINE TThis& operator^=(const TChunk& val) {
- return Xor(val);
- }
-
+ return Xor(val);
+ }
+
template <class T>
Y_FORCE_INLINE TThis& operator-=(const TBitMapOps<T>& bitmap) {
return SetDifference(bitmap);
@@ -490,81 +490,81 @@ public:
Y_FORCE_INLINE TThis& operator<<=(size_t pos) {
return LShift(pos);
- }
-
+ }
+
Y_FORCE_INLINE TThis& operator>>=(size_t pos) {
return RShift(pos);
- }
-
+ }
+
Y_FORCE_INLINE TThis operator<<(size_t pos) const {
- return TThis(*this).LShift(pos);
- }
-
+ return TThis(*this).LShift(pos);
+ }
+
Y_FORCE_INLINE TThis operator>>(size_t pos) const {
- return TThis(*this).RShift(pos);
- }
-
+ return TThis(*this).RShift(pos);
+ }
+
Y_FORCE_INLINE bool operator[](size_t pos) const {
- return Get(pos);
- }
-
+ return Get(pos);
+ }
+
Y_FORCE_INLINE TReference operator[](size_t pos) {
const bool fitStorage = Mask.ExpandBitSize(pos + 1);
Y_ASSERT(fitStorage);
- return TReference(&Mask.Data[pos >> DivCount], ModMask & pos);
- }
-
+ return TReference(&Mask.Data[pos >> DivCount], ModMask & pos);
+ }
+
Y_FORCE_INLINE void Swap(TThis& bitmap) {
- DoSwap(Mask, bitmap.Mask);
- }
-
+ DoSwap(Mask, bitmap.Mask);
+ }
+
Y_FORCE_INLINE TThis& Set(size_t pos) {
const bool fitStorage = Mask.ExpandBitSize(pos + 1);
Y_ASSERT(fitStorage);
- Mask.Data[pos >> DivCount] |= static_cast<TChunk>(1) << (pos & ModMask);
- return *this;
- }
-
- // Fills the specified [start, end) bit range by the 1. Other bits are kept unchanged
- TThis& Set(size_t start, size_t end) {
+ Mask.Data[pos >> DivCount] |= static_cast<TChunk>(1) << (pos & ModMask);
+ return *this;
+ }
+
+ // Fills the specified [start, end) bit range by the 1. Other bits are kept unchanged
+ TThis& Set(size_t start, size_t end) {
Y_ASSERT(start <= end);
- if (start < end) {
- Reserve(end);
- UpdateRange<TSetOp>(start, end);
- }
- return *this;
- }
-
+ if (start < end) {
+ Reserve(end);
+ UpdateRange<TSetOp>(start, end);
+ }
+ return *this;
+ }
+
Y_FORCE_INLINE TThis& Reset(size_t pos) {
- if ((pos >> DivCount) < Mask.GetChunkCapacity()) {
- Mask.Data[pos >> DivCount] &= ~(static_cast<TChunk>(1) << (pos & ModMask));
- }
- return *this;
- }
-
- // Clears the specified [start, end) bit range. Other bits are kept unchanged
- TThis& Reset(size_t start, size_t end) {
+ if ((pos >> DivCount) < Mask.GetChunkCapacity()) {
+ Mask.Data[pos >> DivCount] &= ~(static_cast<TChunk>(1) << (pos & ModMask));
+ }
+ return *this;
+ }
+
+ // Clears the specified [start, end) bit range. Other bits are kept unchanged
+ TThis& Reset(size_t start, size_t end) {
Y_ASSERT(start <= end);
- if (start < end && (start >> DivCount) < Mask.GetChunkCapacity()) {
+ if (start < end && (start >> DivCount) < Mask.GetChunkCapacity()) {
UpdateRange<TResetOp>(start, Min(end, Mask.GetBitCapacity()));
- }
- return *this;
- }
-
+ }
+ return *this;
+ }
+
Y_FORCE_INLINE TThis& Flip(size_t pos) {
const bool fitStorage = Mask.ExpandBitSize(pos + 1);
Y_ASSERT(fitStorage);
- Mask.Data[pos >> DivCount] ^= static_cast<TChunk>(1) << (pos & ModMask);
- return *this;
- }
-
+ Mask.Data[pos >> DivCount] ^= static_cast<TChunk>(1) << (pos & ModMask);
+ return *this;
+ }
+
Y_FORCE_INLINE bool Get(size_t pos) const {
- if ((pos >> DivCount) < Mask.GetChunkCapacity()) {
- return Mask.Data[pos >> DivCount] & (static_cast<TChunk>(1) << (pos & ModMask));
- }
- return false;
- }
-
+ if ((pos >> DivCount) < Mask.GetChunkCapacity()) {
+ return Mask.Data[pos >> DivCount] & (static_cast<TChunk>(1) << (pos & ModMask));
+ }
+ return false;
+ }
+
template <class TTo>
void Export(size_t pos, TTo& to) const {
static_assert(std::is_unsigned<TTo>::value, "expect std::is_unsigned<TTo>::value");
@@ -593,152 +593,152 @@ public:
}
Y_FORCE_INLINE TThis& Push(bool val) {
- LShift(1);
- return val ? Set(0) : *this;
+ LShift(1);
+ return val ? Set(0) : *this;
}
Y_FORCE_INLINE bool Pop() {
- bool val = Get(0);
- return RShift(1), val;
+ bool val = Get(0);
+ return RShift(1), val;
}
- // Clear entire bitmap. Current capacity is kept unchanged
+ // Clear entire bitmap. Current capacity is kept unchanged
Y_FORCE_INLINE TThis& Clear() {
- for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) {
- Mask.Data[i] = 0;
- }
- return *this;
+ for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) {
+ Mask.Data[i] = 0;
+ }
+ return *this;
}
- // Returns bits capacity
+ // Returns bits capacity
Y_FORCE_INLINE constexpr size_t Size() const noexcept {
- return Mask.GetBitCapacity();
- }
-
+ return Mask.GetBitCapacity();
+ }
+
Y_FORCE_INLINE void Reserve(size_t bitCount) {
Y_VERIFY(Mask.ExpandBitSize(bitCount), "Exceeding bitmap storage capacity");
}
Y_FORCE_INLINE size_t ValueBitCount() const {
- size_t nonZeroChunk = Mask.GetChunkCapacity() - 1;
- while (nonZeroChunk != 0 && !Mask.Data[nonZeroChunk])
- --nonZeroChunk;
- return nonZeroChunk || Mask.Data[nonZeroChunk]
+ size_t nonZeroChunk = Mask.GetChunkCapacity() - 1;
+ while (nonZeroChunk != 0 && !Mask.Data[nonZeroChunk])
+ --nonZeroChunk;
+ return nonZeroChunk || Mask.Data[nonZeroChunk]
? nonZeroChunk * BitsPerChunk + GetValueBitCount(TIntType(Mask.Data[nonZeroChunk]))
: 0;
- }
-
+ }
+
Y_PURE_FUNCTION Y_FORCE_INLINE bool Empty() const {
- for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i)
- if (Mask.Data[i])
+ for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i)
+ if (Mask.Data[i])
return false;
return true;
}
- bool HasAny(const TThis& bitmap) const {
- for (size_t i = 0; i < Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity()); ++i) {
- if (0 != (Mask.Data[i] & bitmap.Mask.Data[i])) {
- return true;
- }
- }
- return false;
- }
-
- template <class T>
+ bool HasAny(const TThis& bitmap) const {
+ for (size_t i = 0; i < Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity()); ++i) {
+ if (0 != (Mask.Data[i] & bitmap.Mask.Data[i])) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ template <class T>
Y_FORCE_INLINE bool HasAny(const TBitMapOps<T>& bitmap) const {
- return HasAny(TThis(bitmap));
- }
-
+ return HasAny(TThis(bitmap));
+ }
+
Y_FORCE_INLINE bool HasAny(const TChunk& val) const {
- return 0 != (Mask.Data[0] & val);
- }
-
- bool HasAll(const TThis& bitmap) const {
- for (size_t i = 0; i < Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity()); ++i) {
- if (bitmap.Mask.Data[i] != (Mask.Data[i] & bitmap.Mask.Data[i])) {
- return false;
- }
- }
- for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) {
- if (bitmap.Mask.Data[i] != 0) {
- return false;
- }
- }
- return true;
- }
-
- template <class T>
+ return 0 != (Mask.Data[0] & val);
+ }
+
+ bool HasAll(const TThis& bitmap) const {
+ for (size_t i = 0; i < Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity()); ++i) {
+ if (bitmap.Mask.Data[i] != (Mask.Data[i] & bitmap.Mask.Data[i])) {
+ return false;
+ }
+ }
+ for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) {
+ if (bitmap.Mask.Data[i] != 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ template <class T>
Y_FORCE_INLINE bool HasAll(const TBitMapOps<T>& bitmap) const {
- return HasAll(TThis(bitmap));
- }
-
+ return HasAll(TThis(bitmap));
+ }
+
Y_FORCE_INLINE bool HasAll(const TChunk& val) const {
- return (Mask.Data[0] & val) == val;
- }
-
- TThis& And(const TThis& bitmap) {
- // Don't expand capacity here, because resulting bits in positions,
- // which are greater then size of one of these bitmaps, will be zero
- for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i)
- Mask.Data[i] &= bitmap.Mask.Data[i];
- // Clear bits if current bitmap size is greater than AND-ed one
- for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i)
- Mask.Data[i] = 0;
- return *this;
- }
-
- template <class T>
+ return (Mask.Data[0] & val) == val;
+ }
+
+ TThis& And(const TThis& bitmap) {
+ // Don't expand capacity here, because resulting bits in positions,
+ // which are greater then size of one of these bitmaps, will be zero
+ for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i)
+ Mask.Data[i] &= bitmap.Mask.Data[i];
+ // Clear bits if current bitmap size is greater than AND-ed one
+ for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i)
+ Mask.Data[i] = 0;
+ return *this;
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& And(const TBitMapOps<T>& bitmap) {
- return And(TThis(bitmap));
+ return And(TThis(bitmap));
}
Y_FORCE_INLINE TThis& And(const TChunk& val) {
- Mask.Data[0] &= val;
- for (size_t i = 1; i < Mask.GetChunkCapacity(); ++i)
- Mask.Data[i] = 0;
- return *this;
- }
-
- TThis& Or(const TThis& bitmap) {
- const size_t valueBitCount = bitmap.ValueBitCount();
- if (valueBitCount) {
- // Memory optimization: expand size only for non-zero bits
- Reserve(valueBitCount);
- for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i)
- Mask.Data[i] |= bitmap.Mask.Data[i];
- }
- return *this;
- }
-
- template <class T>
+ Mask.Data[0] &= val;
+ for (size_t i = 1; i < Mask.GetChunkCapacity(); ++i)
+ Mask.Data[i] = 0;
+ return *this;
+ }
+
+ TThis& Or(const TThis& bitmap) {
+ const size_t valueBitCount = bitmap.ValueBitCount();
+ if (valueBitCount) {
+ // Memory optimization: expand size only for non-zero bits
+ Reserve(valueBitCount);
+ for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i)
+ Mask.Data[i] |= bitmap.Mask.Data[i];
+ }
+ return *this;
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& Or(const TBitMapOps<T>& bitmap) {
- return Or(TThis(bitmap));
- }
-
+ return Or(TThis(bitmap));
+ }
+
Y_FORCE_INLINE TThis& Or(const TChunk& val) {
- Mask.Data[0] |= val;
- Mask.Sanitize();
- return *this;
- }
-
- TThis& Xor(const TThis& bitmap) {
+ Mask.Data[0] |= val;
+ Mask.Sanitize();
+ return *this;
+ }
+
+ TThis& Xor(const TThis& bitmap) {
Reserve(bitmap.Size());
- for (size_t i = 0; i < bitmap.Mask.GetChunkCapacity(); ++i)
- Mask.Data[i] ^= bitmap.Mask.Data[i];
- return *this;
- }
-
- template <class T>
+ for (size_t i = 0; i < bitmap.Mask.GetChunkCapacity(); ++i)
+ Mask.Data[i] ^= bitmap.Mask.Data[i];
+ return *this;
+ }
+
+ template <class T>
Y_FORCE_INLINE TThis& Xor(const TBitMapOps<T>& bitmap) {
- return Xor(TThis(bitmap));
- }
-
+ return Xor(TThis(bitmap));
+ }
+
Y_FORCE_INLINE TThis& Xor(const TChunk& val) {
- Mask.Data[0] ^= val;
- Mask.Sanitize();
- return *this;
- }
-
+ Mask.Data[0] ^= val;
+ Mask.Sanitize();
+ return *this;
+ }
+
TThis& SetDifference(const TThis& bitmap) {
for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i)
Mask.Data[i] &= ~bitmap.Mask.Data[i];
@@ -756,204 +756,204 @@ public:
}
Y_FORCE_INLINE TThis& Flip() {
- for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i)
- Mask.Data[i] = ~Mask.Data[i];
- Mask.Sanitize();
- return *this;
- }
-
- TThis& LShift(size_t shift) {
- if (shift != 0) {
- const size_t valueBitCount = ValueBitCount();
- // Do nothing for empty bitmap
- if (valueBitCount != 0) {
- const size_t eshift = shift / BitsPerChunk;
- const size_t offset = shift % BitsPerChunk;
- const size_t subOffset = BitsPerChunk - offset;
-
- // Don't verify expand result, so l-shift of fixed bitmap will work in the same way as for unsigned integer.
- Mask.ExpandBitSize(valueBitCount + shift);
-
- if (offset == 0) {
- for (size_t i = Mask.GetChunkCapacity() - 1; i >= eshift; --i) {
- Mask.Data[i] = Mask.Data[i - eshift];
- }
- } else {
- for (size_t i = Mask.GetChunkCapacity() - 1; i > eshift; --i)
- Mask.Data[i] = (Mask.Data[i - eshift] << offset) | (Mask.Data[i - eshift - 1] >> subOffset);
- if (eshift < Mask.GetChunkCapacity())
- Mask.Data[eshift] = Mask.Data[0] << offset;
- }
- for (size_t i = 0; i < Min(eshift, Mask.GetChunkCapacity()); ++i)
- Mask.Data[i] = 0;
-
- // Cleanup extra high bits in the storage
- Mask.Sanitize();
- }
- }
- return *this;
- }
-
- TThis& RShift(size_t shift) {
- if (shift != 0) {
- const size_t eshift = shift / BitsPerChunk;
- const size_t offset = shift % BitsPerChunk;
- if (eshift >= Mask.GetChunkCapacity()) {
- Clear();
-
- } else {
- const size_t limit = Mask.GetChunkCapacity() - eshift - 1;
-
- if (offset == 0) {
- for (size_t i = 0; i <= limit; ++i) {
- Mask.Data[i] = Mask.Data[i + eshift];
- }
- } else {
- const size_t subOffset = BitsPerChunk - offset;
- for (size_t i = 0; i < limit; ++i)
- Mask.Data[i] = (Mask.Data[i + eshift] >> offset) | (Mask.Data[i + eshift + 1] << subOffset);
- Mask.Data[limit] = Mask.Data[Mask.GetChunkCapacity() - 1] >> offset;
- }
-
- for (size_t i = limit + 1; i < Mask.GetChunkCapacity(); ++i)
- Mask.Data[i] = 0;
- }
- }
- return *this;
- }
-
- // Applies bitmap at the specified offset using OR operator.
- // This method is optimized combination of Or() and LShift(), which allows reducing memory allocation
- // when combining long dynamic bitmaps.
- TThis& Or(const TThis& bitmap, size_t offset) {
- if (0 == offset)
- return Or(bitmap);
-
- const size_t otherValueBitCount = bitmap.ValueBitCount();
- // Continue only if OR-ed bitmap have non-zero bits
- if (otherValueBitCount) {
- const size_t chunkShift = offset / BitsPerChunk;
- const size_t subShift = offset % BitsPerChunk;
- const size_t subOffset = BitsPerChunk - subShift;
-
- Reserve(otherValueBitCount + offset);
-
- if (subShift == 0) {
- for (size_t i = chunkShift; i < Min(bitmap.Mask.GetChunkCapacity() + chunkShift, Mask.GetChunkCapacity()); ++i) {
- Mask.Data[i] |= bitmap.Mask.Data[i - chunkShift];
- }
- } else {
- Mask.Data[chunkShift] |= bitmap.Mask.Data[0] << subShift;
- size_t i = chunkShift + 1;
- for (; i < Min(bitmap.Mask.GetChunkCapacity() + chunkShift, Mask.GetChunkCapacity()); ++i) {
- Mask.Data[i] |= (bitmap.Mask.Data[i - chunkShift] << subShift) | (bitmap.Mask.Data[i - chunkShift - 1] >> subOffset);
- }
- if (i < Mask.GetChunkCapacity())
- Mask.Data[i] |= bitmap.Mask.Data[i - chunkShift - 1] >> subOffset;
- }
- }
-
- return *this;
- }
-
- bool Equal(const TThis& bitmap) const {
- if (Mask.GetChunkCapacity() > bitmap.Mask.GetChunkCapacity()) {
- for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) {
- if (0 != Mask.Data[i])
- return false;
- }
+ for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i)
+ Mask.Data[i] = ~Mask.Data[i];
+ Mask.Sanitize();
+ return *this;
+ }
+
+ TThis& LShift(size_t shift) {
+ if (shift != 0) {
+ const size_t valueBitCount = ValueBitCount();
+ // Do nothing for empty bitmap
+ if (valueBitCount != 0) {
+ const size_t eshift = shift / BitsPerChunk;
+ const size_t offset = shift % BitsPerChunk;
+ const size_t subOffset = BitsPerChunk - offset;
+
+ // Don't verify expand result, so l-shift of fixed bitmap will work in the same way as for unsigned integer.
+ Mask.ExpandBitSize(valueBitCount + shift);
+
+ if (offset == 0) {
+ for (size_t i = Mask.GetChunkCapacity() - 1; i >= eshift; --i) {
+ Mask.Data[i] = Mask.Data[i - eshift];
+ }
+ } else {
+ for (size_t i = Mask.GetChunkCapacity() - 1; i > eshift; --i)
+ Mask.Data[i] = (Mask.Data[i - eshift] << offset) | (Mask.Data[i - eshift - 1] >> subOffset);
+ if (eshift < Mask.GetChunkCapacity())
+ Mask.Data[eshift] = Mask.Data[0] << offset;
+ }
+ for (size_t i = 0; i < Min(eshift, Mask.GetChunkCapacity()); ++i)
+ Mask.Data[i] = 0;
+
+ // Cleanup extra high bits in the storage
+ Mask.Sanitize();
+ }
+ }
+ return *this;
+ }
+
+ TThis& RShift(size_t shift) {
+ if (shift != 0) {
+ const size_t eshift = shift / BitsPerChunk;
+ const size_t offset = shift % BitsPerChunk;
+ if (eshift >= Mask.GetChunkCapacity()) {
+ Clear();
+
+ } else {
+ const size_t limit = Mask.GetChunkCapacity() - eshift - 1;
+
+ if (offset == 0) {
+ for (size_t i = 0; i <= limit; ++i) {
+ Mask.Data[i] = Mask.Data[i + eshift];
+ }
+ } else {
+ const size_t subOffset = BitsPerChunk - offset;
+ for (size_t i = 0; i < limit; ++i)
+ Mask.Data[i] = (Mask.Data[i + eshift] >> offset) | (Mask.Data[i + eshift + 1] << subOffset);
+ Mask.Data[limit] = Mask.Data[Mask.GetChunkCapacity() - 1] >> offset;
+ }
+
+ for (size_t i = limit + 1; i < Mask.GetChunkCapacity(); ++i)
+ Mask.Data[i] = 0;
+ }
+ }
+ return *this;
+ }
+
+ // Applies bitmap at the specified offset using OR operator.
+ // This method is optimized combination of Or() and LShift(), which allows reducing memory allocation
+ // when combining long dynamic bitmaps.
+ TThis& Or(const TThis& bitmap, size_t offset) {
+ if (0 == offset)
+ return Or(bitmap);
+
+ const size_t otherValueBitCount = bitmap.ValueBitCount();
+ // Continue only if OR-ed bitmap have non-zero bits
+ if (otherValueBitCount) {
+ const size_t chunkShift = offset / BitsPerChunk;
+ const size_t subShift = offset % BitsPerChunk;
+ const size_t subOffset = BitsPerChunk - subShift;
+
+ Reserve(otherValueBitCount + offset);
+
+ if (subShift == 0) {
+ for (size_t i = chunkShift; i < Min(bitmap.Mask.GetChunkCapacity() + chunkShift, Mask.GetChunkCapacity()); ++i) {
+ Mask.Data[i] |= bitmap.Mask.Data[i - chunkShift];
+ }
+ } else {
+ Mask.Data[chunkShift] |= bitmap.Mask.Data[0] << subShift;
+ size_t i = chunkShift + 1;
+ for (; i < Min(bitmap.Mask.GetChunkCapacity() + chunkShift, Mask.GetChunkCapacity()); ++i) {
+ Mask.Data[i] |= (bitmap.Mask.Data[i - chunkShift] << subShift) | (bitmap.Mask.Data[i - chunkShift - 1] >> subOffset);
+ }
+ if (i < Mask.GetChunkCapacity())
+ Mask.Data[i] |= bitmap.Mask.Data[i - chunkShift - 1] >> subOffset;
+ }
+ }
+
+ return *this;
+ }
+
+ bool Equal(const TThis& bitmap) const {
+ if (Mask.GetChunkCapacity() > bitmap.Mask.GetChunkCapacity()) {
+ for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) {
+ if (0 != Mask.Data[i])
+ return false;
+ }
} else if (Mask.GetChunkCapacity() < bitmap.Mask.GetChunkCapacity()) {
- for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) {
- if (0 != bitmap.Mask.Data[i])
- return false;
- }
- }
- size_t size = Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity());
- for (size_t i = 0; i < size; ++i) {
- if (Mask.Data[i] != bitmap.Mask.Data[i])
- return false;
- }
- return true;
- }
-
- template <class T>
+ for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) {
+ if (0 != bitmap.Mask.Data[i])
+ return false;
+ }
+ }
+ size_t size = Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity());
+ for (size_t i = 0; i < size; ++i) {
+ if (Mask.Data[i] != bitmap.Mask.Data[i])
+ return false;
+ }
+ return true;
+ }
+
+ template <class T>
Y_FORCE_INLINE bool Equal(const TBitMapOps<T>& bitmap) const {
- return Equal(TThis(bitmap));
- }
-
- int Compare(const TThis& bitmap) const {
- size_t size = Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity());
- int res = ::memcmp(Mask.Data, bitmap.Mask.Data, size * sizeof(TChunk));
- if (0 != res || Mask.GetChunkCapacity() == bitmap.Mask.GetChunkCapacity())
- return res;
-
- if (Mask.GetChunkCapacity() > bitmap.Mask.GetChunkCapacity()) {
- for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) {
- if (0 != Mask.Data[i])
- return 1;
- }
- } else {
- for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) {
- if (0 != bitmap.Mask.Data[i])
- return -1;
- }
- }
- return 0;
- }
-
- template <class T>
+ return Equal(TThis(bitmap));
+ }
+
+ int Compare(const TThis& bitmap) const {
+ size_t size = Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity());
+ int res = ::memcmp(Mask.Data, bitmap.Mask.Data, size * sizeof(TChunk));
+ if (0 != res || Mask.GetChunkCapacity() == bitmap.Mask.GetChunkCapacity())
+ return res;
+
+ if (Mask.GetChunkCapacity() > bitmap.Mask.GetChunkCapacity()) {
+ for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) {
+ if (0 != Mask.Data[i])
+ return 1;
+ }
+ } else {
+ for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) {
+ if (0 != bitmap.Mask.Data[i])
+ return -1;
+ }
+ }
+ return 0;
+ }
+
+ template <class T>
Y_FORCE_INLINE int Compare(const TBitMapOps<T>& bitmap) const {
- return Compare(TThis(bitmap));
- }
-
- // For backward compatibility
+ return Compare(TThis(bitmap));
+ }
+
+ // For backward compatibility
Y_FORCE_INLINE static int Compare(const TThis& l, const TThis& r) {
- return l.Compare(r);
- }
-
+ return l.Compare(r);
+ }
+
size_t FirstNonZeroBit() const {
- for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) {
- if (Mask.Data[i]) {
- // CountTrailingZeroBits() expects unsigned types not smaller than unsigned int. So, convert before calling
- return BitsPerChunk * i + CountTrailingZeroBits(TIntType(Mask.Data[i]));
- }
- }
- return Size();
- }
-
- // Returns position of the next non-zero bit, which offset is greater than specified pos
- // Typical loop for iterating bits:
- // for (size_t pos = bits.FirstNonZeroBit(); pos != bits.Size(); pos = bits.NextNonZeroBit(pos)) {
- // ...
- // }
+ for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) {
+ if (Mask.Data[i]) {
+ // CountTrailingZeroBits() expects unsigned types not smaller than unsigned int. So, convert before calling
+ return BitsPerChunk * i + CountTrailingZeroBits(TIntType(Mask.Data[i]));
+ }
+ }
+ return Size();
+ }
+
+ // Returns position of the next non-zero bit, which offset is greater than specified pos
+ // Typical loop for iterating bits:
+ // for (size_t pos = bits.FirstNonZeroBit(); pos != bits.Size(); pos = bits.NextNonZeroBit(pos)) {
+ // ...
+ // }
// See Y_FOR_EACH_BIT macro definition at the bottom
- size_t NextNonZeroBit(size_t pos) const {
+ size_t NextNonZeroBit(size_t pos) const {
size_t i = (pos + 1) >> DivCount;
- if (i < Mask.GetChunkCapacity()) {
- const size_t offset = (pos + 1) & ModMask;
- // Process the current chunk
- if (offset) {
- // Zero already iterated trailing bits using mask
- const TChunk val = Mask.Data[i] & ((~TChunk(0)) << offset);
- if (val) {
- return BitsPerChunk * i + CountTrailingZeroBits(TIntType(val));
+ if (i < Mask.GetChunkCapacity()) {
+ const size_t offset = (pos + 1) & ModMask;
+ // Process the current chunk
+ if (offset) {
+ // Zero already iterated trailing bits using mask
+ const TChunk val = Mask.Data[i] & ((~TChunk(0)) << offset);
+ if (val) {
+ return BitsPerChunk * i + CountTrailingZeroBits(TIntType(val));
+ }
+ // Continue with other chunks
+ ++i;
+ }
+
+ for (; i < Mask.GetChunkCapacity(); ++i) {
+ if (Mask.Data[i]) {
+ return BitsPerChunk * i + CountTrailingZeroBits(TIntType(Mask.Data[i]));
}
- // Continue with other chunks
- ++i;
}
-
- for (; i < Mask.GetChunkCapacity(); ++i) {
- if (Mask.Data[i]) {
- return BitsPerChunk * i + CountTrailingZeroBits(TIntType(Mask.Data[i]));
- }
- }
}
- return Size();
+ return Size();
}
Y_FORCE_INLINE size_t Count() const {
size_t count = 0;
- for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i)
+ for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i)
count += ::NBitMapPrivate::CountBitsPrivate(Mask.Data[i]);
return count;
}
@@ -983,7 +983,7 @@ public:
if (chunkCount < Mask.GetChunkCapacity()) {
::memset(Mask.Data + chunkCount, 0, (Mask.GetChunkCapacity() - chunkCount) * sizeof(TChunk));
}
- Mask.Sanitize();
+ Mask.Sanitize();
}
inline size_t Hash() const {
@@ -1009,58 +1009,58 @@ public:
return Mask.GetChunkCapacity();
}
};
-
-template <class X, class Y>
+
+template <class X, class Y>
inline TBitMapOps<X> operator&(const TBitMapOps<X>& x, const TBitMapOps<Y>& y) {
- return TBitMapOps<X>(x).And(y);
-}
-
-template <class X>
+ return TBitMapOps<X>(x).And(y);
+}
+
+template <class X>
inline TBitMapOps<X> operator&(const TBitMapOps<X>& x, const typename TBitMapOps<X>::TChunk& y) {
- return TBitMapOps<X>(x).And(y);
-}
-
-template <class X>
+ return TBitMapOps<X>(x).And(y);
+}
+
+template <class X>
inline TBitMapOps<X> operator&(const typename TBitMapOps<X>::TChunk& x, const TBitMapOps<X>& y) {
- return TBitMapOps<X>(x).And(y);
-}
-
-template <class X, class Y>
+ return TBitMapOps<X>(x).And(y);
+}
+
+template <class X, class Y>
inline TBitMapOps<X> operator|(const TBitMapOps<X>& x, const TBitMapOps<Y>& y) {
- return TBitMapOps<X>(x).Or(y);
-}
-
-template <class X>
+ return TBitMapOps<X>(x).Or(y);
+}
+
+template <class X>
inline TBitMapOps<X> operator|(const TBitMapOps<X>& x, const typename TBitMapOps<X>::TChunk& y) {
- return TBitMapOps<X>(x).Or(y);
-}
-
-template <class X>
+ return TBitMapOps<X>(x).Or(y);
+}
+
+template <class X>
inline TBitMapOps<X> operator|(const typename TBitMapOps<X>::TChunk& x, const TBitMapOps<X>& y) {
- return TBitMapOps<X>(x).Or(y);
-}
-
-template <class X, class Y>
+ return TBitMapOps<X>(x).Or(y);
+}
+
+template <class X, class Y>
inline TBitMapOps<X> operator^(const TBitMapOps<X>& x, const TBitMapOps<Y>& y) {
- return TBitMapOps<X>(x).Xor(y);
-}
-
-template <class X>
+ return TBitMapOps<X>(x).Xor(y);
+}
+
+template <class X>
inline TBitMapOps<X> operator^(const TBitMapOps<X>& x, const typename TBitMapOps<X>::TChunk& y) {
- return TBitMapOps<X>(x).Xor(y);
-}
-
-template <class X>
+ return TBitMapOps<X>(x).Xor(y);
+}
+
+template <class X>
inline TBitMapOps<X> operator^(const typename TBitMapOps<X>::TChunk& x, const TBitMapOps<X>& y) {
- return TBitMapOps<X>(x).Xor(y);
-}
-
+ return TBitMapOps<X>(x).Xor(y);
+}
+
template <class X, class Y>
inline TBitMapOps<X> operator-(const TBitMapOps<X>& x, const TBitMapOps<Y>& y) {
return TBitMapOps<X>(x).SetDifference(y);
}
-template <class X>
+template <class X>
inline TBitMapOps<X> operator-(const TBitMapOps<X>& x, const typename TBitMapOps<X>::TChunk& y) {
return TBitMapOps<X>(x).SetDifference(y);
}
@@ -1072,38 +1072,38 @@ inline TBitMapOps<X> operator-(const typename TBitMapOps<X>::TChunk& x, const TB
template <class X>
inline TBitMapOps<X> operator~(const TBitMapOps<X>& x) {
- return TBitMapOps<X>(x).Flip();
-}
-
-/////////////////// Specialization ///////////////////////////
-
+ return TBitMapOps<X>(x).Flip();
+}
+
+/////////////////// Specialization ///////////////////////////
+
template <size_t BitCount, typename TChunkType /*= ui64*/>
class TBitMap: public TBitMapOps<TFixedBitMapTraits<BitCount, TChunkType>> {
-private:
+private:
using TBase = TBitMapOps<TFixedBitMapTraits<BitCount, TChunkType>>;
-public:
- TBitMap()
- : TBase()
- {
- }
-
- TBitMap(typename TBase::TChunk val)
- : TBase(val)
- {
- }
-
+public:
+ TBitMap()
+ : TBase()
+ {
+ }
+
+ TBitMap(typename TBase::TChunk val)
+ : TBase(val)
+ {
+ }
+
TBitMap(const TBitMap<BitCount, TChunkType>&) = default;
-
- template <class T>
- TBitMap(const TBitMapOps<T>& bitmap)
- : TBase(bitmap)
- {
- }
-};
-
+
+ template <class T>
+ TBitMap(const TBitMapOps<T>& bitmap)
+ : TBase(bitmap)
+ {
+ }
+};
+
using TDynBitMap = TBitMapOps<TDynamicBitMapTraits<ui64>>;
-
+
#define Y_FOR_EACH_BIT(var, bitmap) for (size_t var = (bitmap).FirstNonZeroBit(); var != (bitmap).Size(); var = (bitmap).NextNonZeroBit(var))
template <typename TTraits>
diff --git a/util/generic/bitmap_ut.cpp b/util/generic/bitmap_ut.cpp
index 67156303a04..087d34a8dcc 100644
--- a/util/generic/bitmap_ut.cpp
+++ b/util/generic/bitmap_ut.cpp
@@ -3,10 +3,10 @@
#include <library/cpp/testing/unittest/registar.h>
#define INIT_BITMAP(bitmap, bits) \
- for (size_t i = 0; i < sizeof(bits) / sizeof(size_t); ++i) { \
+ for (size_t i = 0; i < sizeof(bits) / sizeof(size_t); ++i) { \
bitmap.Set(bits[i]); \
- }
-
+ }
+
#define CHECK_BITMAP(bitmap, bits) \
{ \
size_t cur = 0, end = sizeof(bits) / sizeof(size_t); \
@@ -15,11 +15,11 @@
UNIT_ASSERT_EQUAL_C(bitmap.Get(i), true, "pos=" << i); \
++cur; \
} else { \
- UNIT_ASSERT_EQUAL_C(bitmap.Get(i), false, "pos=" << i); \
+ UNIT_ASSERT_EQUAL_C(bitmap.Get(i), false, "pos=" << i); \
} \
} \
- }
-
+ }
+
#define CHECK_BITMAP_WITH_TAIL(bitmap, bits) \
{ \
size_t cur = 0, end = sizeof(bits) / sizeof(size_t); \
@@ -29,14 +29,14 @@
UNIT_ASSERT_EQUAL_C(bitmap.Get(i), true, "pos=" << i); \
++cur; \
} else { \
- UNIT_ASSERT_EQUAL_C(bitmap.Get(i), false, "pos=" << i); \
+ UNIT_ASSERT_EQUAL_C(bitmap.Get(i), false, "pos=" << i); \
} \
} else { \
UNIT_ASSERT_EQUAL_C(bitmap.Get(i), true, "pos=" << i); \
} \
} \
- }
-
+ }
+
Y_UNIT_TEST_SUITE(TBitMapTest) {
Y_UNIT_TEST(TestBitMap) {
TBitMap<101> bitmap;
@@ -45,47 +45,47 @@ Y_UNIT_TEST_SUITE(TBitMapTest) {
UNIT_ASSERT_EQUAL(bitmap.Count(), 0);
UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 101);
- size_t initBits[] = {0, 50, 100, 45};
- INIT_BITMAP(bitmap, initBits);
+ size_t initBits[] = {0, 50, 100, 45};
+ INIT_BITMAP(bitmap, initBits);
bitmap.Reset(45);
UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 0);
- size_t setBits[] = {0, 50, 100};
- CHECK_BITMAP(bitmap, setBits);
+ size_t setBits[] = {0, 50, 100};
+ CHECK_BITMAP(bitmap, setBits);
for (size_t i = 0; i < bitmap.Size(); ++i) {
- UNIT_ASSERT_EQUAL(bitmap.Get(i), bitmap.Test(i));
+ UNIT_ASSERT_EQUAL(bitmap.Get(i), bitmap.Test(i));
}
- UNIT_ASSERT_EQUAL(bitmap.Count(), 3);
-
- bitmap.Reset(0);
-
- UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 50);
-
- bitmap.Clear();
-
- UNIT_ASSERT_EQUAL(bitmap.Count(), 0);
- UNIT_ASSERT_EQUAL(bitmap.Empty(), true);
- }
-
+ UNIT_ASSERT_EQUAL(bitmap.Count(), 3);
+
+ bitmap.Reset(0);
+
+ UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 50);
+
+ bitmap.Clear();
+
+ UNIT_ASSERT_EQUAL(bitmap.Count(), 0);
+ UNIT_ASSERT_EQUAL(bitmap.Empty(), true);
+ }
+
Y_UNIT_TEST(TestDynBitMap) {
- TDynBitMap bitmap;
-
- UNIT_ASSERT_EQUAL(bitmap.Size(), 64); // Initial capacity
- UNIT_ASSERT_EQUAL(bitmap.Count(), 0);
- UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 64);
-
- size_t initBits[] = {0, 50, 100, 45};
- INIT_BITMAP(bitmap, initBits);
- bitmap.Reset(45);
-
- UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
-
- UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 0);
- size_t setBits[] = {0, 50, 100};
- CHECK_BITMAP(bitmap, setBits);
-
+ TDynBitMap bitmap;
+
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 64); // Initial capacity
+ UNIT_ASSERT_EQUAL(bitmap.Count(), 0);
+ UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 64);
+
+ size_t initBits[] = {0, 50, 100, 45};
+ INIT_BITMAP(bitmap, initBits);
+ bitmap.Reset(45);
+
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
+
+ UNIT_ASSERT_EQUAL(bitmap.FirstNonZeroBit(), 0);
+ size_t setBits[] = {0, 50, 100};
+ CHECK_BITMAP(bitmap, setBits);
+
for (size_t i = 0; i < bitmap.Size(); ++i) {
UNIT_ASSERT_EQUAL(bitmap.Get(i), bitmap.Test(i));
}
@@ -99,171 +99,171 @@ Y_UNIT_TEST_SUITE(TBitMapTest) {
bitmap.Clear();
UNIT_ASSERT_EQUAL(bitmap.Count(), 0);
- UNIT_ASSERT_EQUAL(bitmap.Empty(), true);
+ UNIT_ASSERT_EQUAL(bitmap.Empty(), true);
}
- template <class TBitMapImpl>
- void TestAndImpl() {
- TBitMapImpl bitmap1;
- TBitMapImpl bitmap2;
-
- size_t initBits1[] = {10, 20, 50, 100};
- size_t initBits2[] = {10, 11, 22, 50};
-
- INIT_BITMAP(bitmap1, initBits1);
- INIT_BITMAP(bitmap2, initBits2);
-
+ template <class TBitMapImpl>
+ void TestAndImpl() {
+ TBitMapImpl bitmap1;
+ TBitMapImpl bitmap2;
+
+ size_t initBits1[] = {10, 20, 50, 100};
+ size_t initBits2[] = {10, 11, 22, 50};
+
+ INIT_BITMAP(bitmap1, initBits1);
+ INIT_BITMAP(bitmap2, initBits2);
+
bitmap1.And(bitmap2);
UNIT_ASSERT_EQUAL(bitmap1.Count(), 2);
UNIT_ASSERT_EQUAL(bitmap2.Count(), 4);
- size_t resBits[] = {10, 50};
+ size_t resBits[] = {10, 50};
- CHECK_BITMAP(bitmap1, resBits);
- CHECK_BITMAP(bitmap2, initBits2);
+ CHECK_BITMAP(bitmap1, resBits);
+ CHECK_BITMAP(bitmap2, initBits2);
}
Y_UNIT_TEST(TestAndFixed) {
TestAndImpl<TBitMap<101>>();
- }
+ }
Y_UNIT_TEST(TestAndDyn) {
- TestAndImpl<TDynBitMap>();
- }
-
- template <class TBitMapImpl>
- void TestOrImpl() {
- TBitMapImpl bitmap1;
- TBitMapImpl bitmap2;
-
- size_t initBits1[] = {0, 10, 11, 76};
- size_t initBits2[] = {1, 11, 22, 50};
-
- INIT_BITMAP(bitmap1, initBits1);
- INIT_BITMAP(bitmap2, initBits2);
-
+ TestAndImpl<TDynBitMap>();
+ }
+
+ template <class TBitMapImpl>
+ void TestOrImpl() {
+ TBitMapImpl bitmap1;
+ TBitMapImpl bitmap2;
+
+ size_t initBits1[] = {0, 10, 11, 76};
+ size_t initBits2[] = {1, 11, 22, 50};
+
+ INIT_BITMAP(bitmap1, initBits1);
+ INIT_BITMAP(bitmap2, initBits2);
+
bitmap1.Or(bitmap2);
UNIT_ASSERT_EQUAL(bitmap1.Count(), 7);
UNIT_ASSERT_EQUAL(bitmap2.Count(), 4);
- size_t resBits[] = {0, 1, 10, 11, 22, 50, 76};
-
- CHECK_BITMAP(bitmap1, resBits);
- CHECK_BITMAP(bitmap2, initBits2);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits1);
-
- UNIT_ASSERT_EQUAL(bitmap1 | (bitmap2 << 3), TBitMapImpl(bitmap1).Or(bitmap2, 3));
- UNIT_ASSERT_EQUAL(bitmap1 | (bitmap2 << 64), TBitMapImpl(bitmap1).Or(bitmap2, 64));
- UNIT_ASSERT_EQUAL(bitmap1 | (bitmap2 << 66), TBitMapImpl(bitmap1).Or(bitmap2, 66));
-
- UNIT_ASSERT_EQUAL(bitmap2 | (bitmap1 << 3), TBitMapImpl(bitmap2).Or(bitmap1, 3));
- UNIT_ASSERT_EQUAL(bitmap2 | (bitmap1 << 64), TBitMapImpl(bitmap2).Or(bitmap1, 64));
- UNIT_ASSERT_EQUAL(bitmap2 | (bitmap1 << 66), TBitMapImpl(bitmap2).Or(bitmap1, 66));
- }
-
+ size_t resBits[] = {0, 1, 10, 11, 22, 50, 76};
+
+ CHECK_BITMAP(bitmap1, resBits);
+ CHECK_BITMAP(bitmap2, initBits2);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits1);
+
+ UNIT_ASSERT_EQUAL(bitmap1 | (bitmap2 << 3), TBitMapImpl(bitmap1).Or(bitmap2, 3));
+ UNIT_ASSERT_EQUAL(bitmap1 | (bitmap2 << 64), TBitMapImpl(bitmap1).Or(bitmap2, 64));
+ UNIT_ASSERT_EQUAL(bitmap1 | (bitmap2 << 66), TBitMapImpl(bitmap1).Or(bitmap2, 66));
+
+ UNIT_ASSERT_EQUAL(bitmap2 | (bitmap1 << 3), TBitMapImpl(bitmap2).Or(bitmap1, 3));
+ UNIT_ASSERT_EQUAL(bitmap2 | (bitmap1 << 64), TBitMapImpl(bitmap2).Or(bitmap1, 64));
+ UNIT_ASSERT_EQUAL(bitmap2 | (bitmap1 << 66), TBitMapImpl(bitmap2).Or(bitmap1, 66));
+ }
+
Y_UNIT_TEST(TestOrFixed) {
TestOrImpl<TBitMap<145>>();
- }
-
+ }
+
Y_UNIT_TEST(TestOrDyn) {
- TestOrImpl<TDynBitMap>();
- }
-
+ TestOrImpl<TDynBitMap>();
+ }
+
Y_UNIT_TEST(TestCopy) {
- TBitMap<101> bitmap1;
- size_t initBits[] = {0, 10, 11, 76, 100};
-
- INIT_BITMAP(bitmap1, initBits);
-
- TDynBitMap bitmap2(bitmap1);
- CHECK_BITMAP(bitmap2, initBits);
-
- TBitMap<101> bitmap3(bitmap1);
- CHECK_BITMAP(bitmap3, initBits);
-
- TBitMap<127> bitmap4(bitmap1);
- CHECK_BITMAP(bitmap4, initBits);
-
- TDynBitMap bitmap5;
- bitmap5 = bitmap1;
- CHECK_BITMAP(bitmap5, initBits);
-
- TBitMap<101> bitmap6;
- bitmap6 = bitmap1;
- CHECK_BITMAP(bitmap6, initBits);
-
- TBitMap<127> bitmap7;
- bitmap7 = bitmap1;
- CHECK_BITMAP(bitmap7, initBits);
-
- TBitMap<101> bitmap8;
- DoSwap(bitmap8, bitmap6);
- CHECK_BITMAP(bitmap8, initBits);
- UNIT_ASSERT_EQUAL(bitmap6.Empty(), true);
-
- TDynBitMap bitmap9;
- DoSwap(bitmap9, bitmap5);
- CHECK_BITMAP(bitmap9, initBits);
- UNIT_ASSERT_EQUAL(bitmap5.Empty(), true);
-
- // 64->32
- TBitMap<160, ui32> bitmap10(bitmap1);
- CHECK_BITMAP(bitmap10, initBits);
-
- // 64->16
- TBitMap<160, ui16> bitmap11(bitmap1);
- CHECK_BITMAP(bitmap11, initBits);
-
- // 64->8
- TBitMap<160, ui8> bitmap12(bitmap1);
- CHECK_BITMAP(bitmap12, initBits);
-
- // 32->16
- TBitMap<160, ui16> bitmap13(bitmap10);
- CHECK_BITMAP(bitmap13, initBits);
-
- // 32->64
- TBitMap<160, ui64> bitmap14(bitmap10);
- CHECK_BITMAP(bitmap14, initBits);
-
- // 16->64
- TBitMap<160, ui64> bitmap15(bitmap11);
- CHECK_BITMAP(bitmap15, initBits);
-
- // 8->64
- TBitMap<160, ui64> bitmap16(bitmap12);
- CHECK_BITMAP(bitmap16, initBits);
- }
-
+ TBitMap<101> bitmap1;
+ size_t initBits[] = {0, 10, 11, 76, 100};
+
+ INIT_BITMAP(bitmap1, initBits);
+
+ TDynBitMap bitmap2(bitmap1);
+ CHECK_BITMAP(bitmap2, initBits);
+
+ TBitMap<101> bitmap3(bitmap1);
+ CHECK_BITMAP(bitmap3, initBits);
+
+ TBitMap<127> bitmap4(bitmap1);
+ CHECK_BITMAP(bitmap4, initBits);
+
+ TDynBitMap bitmap5;
+ bitmap5 = bitmap1;
+ CHECK_BITMAP(bitmap5, initBits);
+
+ TBitMap<101> bitmap6;
+ bitmap6 = bitmap1;
+ CHECK_BITMAP(bitmap6, initBits);
+
+ TBitMap<127> bitmap7;
+ bitmap7 = bitmap1;
+ CHECK_BITMAP(bitmap7, initBits);
+
+ TBitMap<101> bitmap8;
+ DoSwap(bitmap8, bitmap6);
+ CHECK_BITMAP(bitmap8, initBits);
+ UNIT_ASSERT_EQUAL(bitmap6.Empty(), true);
+
+ TDynBitMap bitmap9;
+ DoSwap(bitmap9, bitmap5);
+ CHECK_BITMAP(bitmap9, initBits);
+ UNIT_ASSERT_EQUAL(bitmap5.Empty(), true);
+
+ // 64->32
+ TBitMap<160, ui32> bitmap10(bitmap1);
+ CHECK_BITMAP(bitmap10, initBits);
+
+ // 64->16
+ TBitMap<160, ui16> bitmap11(bitmap1);
+ CHECK_BITMAP(bitmap11, initBits);
+
+ // 64->8
+ TBitMap<160, ui8> bitmap12(bitmap1);
+ CHECK_BITMAP(bitmap12, initBits);
+
+ // 32->16
+ TBitMap<160, ui16> bitmap13(bitmap10);
+ CHECK_BITMAP(bitmap13, initBits);
+
+ // 32->64
+ TBitMap<160, ui64> bitmap14(bitmap10);
+ CHECK_BITMAP(bitmap14, initBits);
+
+ // 16->64
+ TBitMap<160, ui64> bitmap15(bitmap11);
+ CHECK_BITMAP(bitmap15, initBits);
+
+ // 8->64
+ TBitMap<160, ui64> bitmap16(bitmap12);
+ CHECK_BITMAP(bitmap16, initBits);
+ }
+
Y_UNIT_TEST(TestOps) {
- TBitMap<16> bitmap1;
- TBitMap<12> bitmap2;
- size_t initBits1[] = {0, 3, 7, 11};
- size_t initBits2[] = {1, 3, 6, 7, 11};
- INIT_BITMAP(bitmap1, initBits1);
- INIT_BITMAP(bitmap2, initBits2);
-
- bitmap1.Or(3).And(bitmap2).Flip();
-
- size_t resBits[] = {0, 2, 4, 5, 6, 8, 9, 10, 12};
- CHECK_BITMAP_WITH_TAIL(bitmap1, resBits);
-
- TDynBitMap bitmap3;
- INIT_BITMAP(bitmap3, initBits1);
-
- bitmap3.Or(3).And(bitmap2).Flip();
-
- CHECK_BITMAP_WITH_TAIL(bitmap3, resBits);
-
- bitmap3.Clear();
- INIT_BITMAP(bitmap3, initBits1);
-
+ TBitMap<16> bitmap1;
+ TBitMap<12> bitmap2;
+ size_t initBits1[] = {0, 3, 7, 11};
+ size_t initBits2[] = {1, 3, 6, 7, 11};
+ INIT_BITMAP(bitmap1, initBits1);
+ INIT_BITMAP(bitmap2, initBits2);
+
+ bitmap1.Or(3).And(bitmap2).Flip();
+
+ size_t resBits[] = {0, 2, 4, 5, 6, 8, 9, 10, 12};
+ CHECK_BITMAP_WITH_TAIL(bitmap1, resBits);
+
+ TDynBitMap bitmap3;
+ INIT_BITMAP(bitmap3, initBits1);
+
+ bitmap3.Or(3).And(bitmap2).Flip();
+
+ CHECK_BITMAP_WITH_TAIL(bitmap3, resBits);
+
+ bitmap3.Clear();
+ INIT_BITMAP(bitmap3, initBits1);
+
TDynBitMap bitmap4 = ~((bitmap3 | 3) & bitmap2);
- CHECK_BITMAP_WITH_TAIL(bitmap4, resBits);
+ CHECK_BITMAP_WITH_TAIL(bitmap4, resBits);
TBitMap<128, ui32> expmap;
expmap.Set(47);
@@ -284,196 +284,196 @@ Y_UNIT_TEST_SUITE(TBitMapTest) {
UNIT_ASSERT_EQUAL(tst2, (1 << 14));
expmap.Export(33, tst3);
UNIT_ASSERT_EQUAL(tst3, (1 << 14));
- }
-
+ }
+
Y_UNIT_TEST(TestShiftFixed) {
- size_t initBits[] = {0, 3, 7, 11};
-
- TBitMap<128> bitmap1;
-
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 62;
- size_t resBits1[] = {62, 65, 69, 73};
- CHECK_BITMAP(bitmap1, resBits1);
- bitmap1 >>= 62;
- CHECK_BITMAP(bitmap1, initBits);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 120;
- size_t resBits2[] = {120, 123, 127};
- CHECK_BITMAP(bitmap1, resBits2);
- bitmap1 >>= 120;
- size_t resBits3[] = {0, 3, 7};
- CHECK_BITMAP(bitmap1, resBits3);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 128;
- UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 120;
- bitmap1 >>= 128;
- UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 140;
- UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 62;
- bitmap1 >>= 140;
- UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
- }
-
+ size_t initBits[] = {0, 3, 7, 11};
+
+ TBitMap<128> bitmap1;
+
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 62;
+ size_t resBits1[] = {62, 65, 69, 73};
+ CHECK_BITMAP(bitmap1, resBits1);
+ bitmap1 >>= 62;
+ CHECK_BITMAP(bitmap1, initBits);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 120;
+ size_t resBits2[] = {120, 123, 127};
+ CHECK_BITMAP(bitmap1, resBits2);
+ bitmap1 >>= 120;
+ size_t resBits3[] = {0, 3, 7};
+ CHECK_BITMAP(bitmap1, resBits3);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 128;
+ UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 120;
+ bitmap1 >>= 128;
+ UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 140;
+ UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 62;
+ bitmap1 >>= 140;
+ UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
+ }
+
Y_UNIT_TEST(TestShiftDyn) {
- size_t initBits[] = {0, 3, 7, 11};
-
- TDynBitMap bitmap1;
-
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 62;
- size_t resBits1[] = {62, 65, 69, 73};
- CHECK_BITMAP(bitmap1, resBits1);
- bitmap1 >>= 62;
- CHECK_BITMAP(bitmap1, initBits);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 120;
- size_t resBits2[] = {120, 123, 127, 131};
- CHECK_BITMAP(bitmap1, resBits2);
- bitmap1 >>= 120;
- CHECK_BITMAP(bitmap1, initBits);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 128;
- size_t resBits3[] = {128, 131, 135, 139};
- CHECK_BITMAP(bitmap1, resBits3);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 120;
- bitmap1 >>= 128;
- size_t resBits4[] = {3};
- CHECK_BITMAP(bitmap1, resBits4);
-
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits);
- bitmap1 <<= 62;
- bitmap1 >>= 140;
- UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
- }
-
- // Test that we don't expand bitmap in LShift when high-order bits are zero
+ size_t initBits[] = {0, 3, 7, 11};
+
+ TDynBitMap bitmap1;
+
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 62;
+ size_t resBits1[] = {62, 65, 69, 73};
+ CHECK_BITMAP(bitmap1, resBits1);
+ bitmap1 >>= 62;
+ CHECK_BITMAP(bitmap1, initBits);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 120;
+ size_t resBits2[] = {120, 123, 127, 131};
+ CHECK_BITMAP(bitmap1, resBits2);
+ bitmap1 >>= 120;
+ CHECK_BITMAP(bitmap1, initBits);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 128;
+ size_t resBits3[] = {128, 131, 135, 139};
+ CHECK_BITMAP(bitmap1, resBits3);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 120;
+ bitmap1 >>= 128;
+ size_t resBits4[] = {3};
+ CHECK_BITMAP(bitmap1, resBits4);
+
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits);
+ bitmap1 <<= 62;
+ bitmap1 >>= 140;
+ UNIT_ASSERT_EQUAL(bitmap1.Empty(), true);
+ }
+
+ // Test that we don't expand bitmap in LShift when high-order bits are zero
Y_UNIT_TEST(TestShiftExpansion) {
- UNIT_ASSERT_EQUAL(TDynBitMap().LShift(1).Size(), 64);
- UNIT_ASSERT_EQUAL(TDynBitMap().LShift(65).Size(), 64);
- UNIT_ASSERT_EQUAL(TDynBitMap().LShift(128).Size(), 64);
-
- TDynBitMap bitmap;
- bitmap.Set(62).LShift(1);
- UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(63));
- UNIT_ASSERT_EQUAL(bitmap.Size(), 64);
-
- // Expand explicitly
- bitmap.Set(65);
- UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
-
- bitmap.Clear().Set(0).LShift(1);
- UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(1));
- UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
-
- bitmap.Clear().Set(63).LShift(1);
- UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(64));
- UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
-
- bitmap.Clear().Set(63).LShift(64);
- UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(127));
- UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
-
- bitmap.Clear().Set(62).LShift(129);
- UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(191));
- UNIT_ASSERT_EQUAL(bitmap.Size(), 256);
- }
-
+ UNIT_ASSERT_EQUAL(TDynBitMap().LShift(1).Size(), 64);
+ UNIT_ASSERT_EQUAL(TDynBitMap().LShift(65).Size(), 64);
+ UNIT_ASSERT_EQUAL(TDynBitMap().LShift(128).Size(), 64);
+
+ TDynBitMap bitmap;
+ bitmap.Set(62).LShift(1);
+ UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(63));
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 64);
+
+ // Expand explicitly
+ bitmap.Set(65);
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
+
+ bitmap.Clear().Set(0).LShift(1);
+ UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(1));
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
+
+ bitmap.Clear().Set(63).LShift(1);
+ UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(64));
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
+
+ bitmap.Clear().Set(63).LShift(64);
+ UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(127));
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 128);
+
+ bitmap.Clear().Set(62).LShift(129);
+ UNIT_ASSERT_EQUAL(bitmap, TDynBitMap().Set(191));
+ UNIT_ASSERT_EQUAL(bitmap.Size(), 256);
+ }
+
Y_UNIT_TEST(TestFixedSanity) {
- // test extra-bit cleanup
- UNIT_ASSERT_EQUAL(TBitMap<33>().Set(0).LShift(34).RShift(34).Empty(), true);
- UNIT_ASSERT_EQUAL(TBitMap<88>().Set(0).Set(1).Set(2).LShift(90).RShift(90).Empty(), true);
- UNIT_ASSERT_EQUAL(TBitMap<88>().Flip().RShift(88).Empty(), true);
- UNIT_ASSERT_EQUAL(TBitMap<64>().Flip().LShift(2).RShift(2).Count(), 62);
- UNIT_ASSERT_EQUAL(TBitMap<67>().Flip().LShift(2).RShift(2).Count(), 65);
- UNIT_ASSERT_EQUAL(TBitMap<128>().Flip().LShift(2).RShift(2).Count(), 126);
- UNIT_ASSERT_EQUAL(TBitMap<130>().Flip().LShift(2).RShift(2).Count(), 128);
- UNIT_ASSERT_EQUAL(TBitMap<130>(TDynBitMap().Set(131)).Empty(), true);
- UNIT_ASSERT_EQUAL(TBitMap<33>().Or(TBitMap<40>().Set(39)).Empty(), true);
- UNIT_ASSERT_EQUAL(TBitMap<33>().Xor(TBitMap<40>().Set(39)).Empty(), true);
- }
-
+ // test extra-bit cleanup
+ UNIT_ASSERT_EQUAL(TBitMap<33>().Set(0).LShift(34).RShift(34).Empty(), true);
+ UNIT_ASSERT_EQUAL(TBitMap<88>().Set(0).Set(1).Set(2).LShift(90).RShift(90).Empty(), true);
+ UNIT_ASSERT_EQUAL(TBitMap<88>().Flip().RShift(88).Empty(), true);
+ UNIT_ASSERT_EQUAL(TBitMap<64>().Flip().LShift(2).RShift(2).Count(), 62);
+ UNIT_ASSERT_EQUAL(TBitMap<67>().Flip().LShift(2).RShift(2).Count(), 65);
+ UNIT_ASSERT_EQUAL(TBitMap<128>().Flip().LShift(2).RShift(2).Count(), 126);
+ UNIT_ASSERT_EQUAL(TBitMap<130>().Flip().LShift(2).RShift(2).Count(), 128);
+ UNIT_ASSERT_EQUAL(TBitMap<130>(TDynBitMap().Set(131)).Empty(), true);
+ UNIT_ASSERT_EQUAL(TBitMap<33>().Or(TBitMap<40>().Set(39)).Empty(), true);
+ UNIT_ASSERT_EQUAL(TBitMap<33>().Xor(TBitMap<40>().Set(39)).Empty(), true);
+ }
+
Y_UNIT_TEST(TestIterate) {
- TDynBitMap bitmap1;
- TDynBitMap bitmap2;
-
- size_t initBits1[] = {0, 3, 7, 8, 11, 33, 34, 35, 36, 62, 63, 100, 127};
- INIT_BITMAP(bitmap1, initBits1);
- for (size_t i = bitmap1.FirstNonZeroBit(); i != bitmap1.Size(); i = bitmap1.NextNonZeroBit(i)) {
- bitmap2.Set(i);
- }
- CHECK_BITMAP(bitmap2, initBits1);
- UNIT_ASSERT_EQUAL(bitmap1, bitmap2);
-
- size_t initBits2[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 33, 34, 35, 36, 62};
- bitmap1.Clear();
- bitmap2.Clear();
- INIT_BITMAP(bitmap1, initBits2);
- for (size_t i = bitmap1.FirstNonZeroBit(); i != bitmap1.Size(); i = bitmap1.NextNonZeroBit(i)) {
- bitmap2.Set(i);
- }
- CHECK_BITMAP(bitmap2, initBits2);
- UNIT_ASSERT_EQUAL(bitmap1, bitmap2);
-
- UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(63), bitmap1.Size());
- UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(64), bitmap1.Size());
- UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(65), bitmap1.Size());
- UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(127), bitmap1.Size());
- UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(533), bitmap1.Size());
-
- TBitMap<128, ui8> bitmap3;
- bitmap1.Clear();
- INIT_BITMAP(bitmap1, initBits1);
- for (size_t i = bitmap1.FirstNonZeroBit(); i != bitmap1.Size(); i = bitmap1.NextNonZeroBit(i)) {
- bitmap3.Set(i);
- }
- CHECK_BITMAP(bitmap3, initBits1);
- UNIT_ASSERT_EQUAL(bitmap3, bitmap1);
-
- TBitMap<18> bitmap4;
- bitmap4.Set(15);
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(0), 15);
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(15), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(63), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(64), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(65), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(127), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(533), bitmap4.Size());
-
- bitmap4.Clear().Flip();
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(0), 1);
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(15), 16);
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(17), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(18), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(63), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(64), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(65), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(127), bitmap4.Size());
- UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(533), bitmap4.Size());
- }
+ TDynBitMap bitmap1;
+ TDynBitMap bitmap2;
+
+ size_t initBits1[] = {0, 3, 7, 8, 11, 33, 34, 35, 36, 62, 63, 100, 127};
+ INIT_BITMAP(bitmap1, initBits1);
+ for (size_t i = bitmap1.FirstNonZeroBit(); i != bitmap1.Size(); i = bitmap1.NextNonZeroBit(i)) {
+ bitmap2.Set(i);
+ }
+ CHECK_BITMAP(bitmap2, initBits1);
+ UNIT_ASSERT_EQUAL(bitmap1, bitmap2);
+
+ size_t initBits2[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 33, 34, 35, 36, 62};
+ bitmap1.Clear();
+ bitmap2.Clear();
+ INIT_BITMAP(bitmap1, initBits2);
+ for (size_t i = bitmap1.FirstNonZeroBit(); i != bitmap1.Size(); i = bitmap1.NextNonZeroBit(i)) {
+ bitmap2.Set(i);
+ }
+ CHECK_BITMAP(bitmap2, initBits2);
+ UNIT_ASSERT_EQUAL(bitmap1, bitmap2);
+
+ UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(63), bitmap1.Size());
+ UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(64), bitmap1.Size());
+ UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(65), bitmap1.Size());
+ UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(127), bitmap1.Size());
+ UNIT_ASSERT_EQUAL(bitmap1.NextNonZeroBit(533), bitmap1.Size());
+
+ TBitMap<128, ui8> bitmap3;
+ bitmap1.Clear();
+ INIT_BITMAP(bitmap1, initBits1);
+ for (size_t i = bitmap1.FirstNonZeroBit(); i != bitmap1.Size(); i = bitmap1.NextNonZeroBit(i)) {
+ bitmap3.Set(i);
+ }
+ CHECK_BITMAP(bitmap3, initBits1);
+ UNIT_ASSERT_EQUAL(bitmap3, bitmap1);
+
+ TBitMap<18> bitmap4;
+ bitmap4.Set(15);
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(0), 15);
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(15), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(63), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(64), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(65), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(127), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(533), bitmap4.Size());
+
+ bitmap4.Clear().Flip();
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(0), 1);
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(15), 16);
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(17), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(18), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(63), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(64), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(65), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(127), bitmap4.Size());
+ UNIT_ASSERT_EQUAL(bitmap4.NextNonZeroBit(533), bitmap4.Size());
+ }
Y_UNIT_TEST(TestHashFixed) {
TBitMap<32, ui8> bitmap32;
@@ -550,27 +550,27 @@ Y_UNIT_TEST_SUITE(TBitMapTest) {
}
Y_UNIT_TEST(TestSetResetRange) {
- // Single chunk
+ // Single chunk
using TBitMap1Chunk = TBitMap<64>;
- UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(10, 50), TBitMap1Chunk().Set(0, 10).Set(50, 64));
- UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(0, 10), TBitMap1Chunk().Set(10, 64));
- UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(50, 64), TBitMap1Chunk().Set(0, 50));
- UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(0, 10).Reset(50, 64), TBitMap1Chunk().Set(10, 50));
-
- // Two chunks
+ UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(10, 50), TBitMap1Chunk().Set(0, 10).Set(50, 64));
+ UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(0, 10), TBitMap1Chunk().Set(10, 64));
+ UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(50, 64), TBitMap1Chunk().Set(0, 50));
+ UNIT_ASSERT_EQUAL(TBitMap1Chunk().Flip().Reset(0, 10).Reset(50, 64), TBitMap1Chunk().Set(10, 50));
+
+ // Two chunks
using TBitMap2Chunks = TBitMap<64, ui32>;
- UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(10, 50), TBitMap2Chunks().Set(0, 10).Set(50, 64));
- UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(0, 10), TBitMap2Chunks().Set(10, 64));
- UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(50, 64), TBitMap2Chunks().Set(0, 50));
- UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(0, 10).Reset(50, 64), TBitMap2Chunks().Set(10, 50));
-
- // Many chunks
+ UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(10, 50), TBitMap2Chunks().Set(0, 10).Set(50, 64));
+ UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(0, 10), TBitMap2Chunks().Set(10, 64));
+ UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(50, 64), TBitMap2Chunks().Set(0, 50));
+ UNIT_ASSERT_EQUAL(TBitMap2Chunks().Flip().Reset(0, 10).Reset(50, 64), TBitMap2Chunks().Set(10, 50));
+
+ // Many chunks
using TBitMap4Chunks = TBitMap<64, ui16>;
- UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(10, 50), TBitMap4Chunks().Set(0, 10).Set(50, 64));
- UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(0, 10), TBitMap4Chunks().Set(10, 64));
- UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(50, 64), TBitMap4Chunks().Set(0, 50));
- UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(0, 10).Reset(50, 64), TBitMap4Chunks().Set(10, 50));
- }
+ UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(10, 50), TBitMap4Chunks().Set(0, 10).Set(50, 64));
+ UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(0, 10), TBitMap4Chunks().Set(10, 64));
+ UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(50, 64), TBitMap4Chunks().Set(0, 50));
+ UNIT_ASSERT_EQUAL(TBitMap4Chunks().Flip().Reset(0, 10).Reset(50, 64), TBitMap4Chunks().Set(10, 50));
+ }
Y_UNIT_TEST(TestSetRangeDyn) {
for (size_t start = 0; start < 192; ++start) {
@@ -594,4 +594,4 @@ Y_UNIT_TEST_SUITE(TBitMapTest) {
UNIT_ASSERT_VALUES_EQUAL(bm.Get(k), k >= 1 && k < 2048 ? 0 : 1);
}
}
-}
+}
diff --git a/util/generic/ptr.h b/util/generic/ptr.h
index 31282045ff9..19db0e3ec55 100644
--- a/util/generic/ptr.h
+++ b/util/generic/ptr.h
@@ -102,7 +102,7 @@ template <class Base, class T>
class TPointerCommon {
public:
using TValueType = T;
-
+
inline T* operator->() const noexcept {
T* ptr = AsT();
Y_ASSERT(ptr);
diff --git a/util/stream/debug.cpp b/util/stream/debug.cpp
index 6a43700e592..afd5b3e1c73 100644
--- a/util/stream/debug.cpp
+++ b/util/stream/debug.cpp
@@ -27,9 +27,9 @@ namespace {
}
} else {
Out = &Cnull;
- Level = 0;
- }
- }
+ Level = 0;
+ }
+ }
IOutputStream* Out;
int Level;
@@ -44,7 +44,7 @@ struct TSingletonTraits<TDbgSelector> {
IOutputStream& StdDbgStream() noexcept {
return *(Singleton<TDbgSelector>()->Out);
}
-
+
int StdDbgLevel() noexcept {
- return Singleton<TDbgSelector>()->Level;
-}
+ return Singleton<TDbgSelector>()->Level;
+}
diff --git a/util/stream/debug.h b/util/stream/debug.h
index 123fdaefda1..92d6d4b42da 100644
--- a/util/stream/debug.h
+++ b/util/stream/debug.h
@@ -40,7 +40,7 @@ IOutputStream& StdDbgStream() noexcept;
* @see DBGTRACE
*/
int StdDbgLevel() noexcept;
-
+
/**
* Standard debug stream.
*
diff --git a/util/stream/trace.h b/util/stream/trace.h
index 15720416da2..e74b6ecf3e0 100644
--- a/util/stream/trace.h
+++ b/util/stream/trace.h
@@ -1,7 +1,7 @@
-#pragma once
-
-#include "debug.h"
-
+#pragma once
+
+#include "debug.h"
+
/**
* Debug level, as set via `DBGOUT` environment variable.
*/
@@ -13,14 +13,14 @@ enum ETraceLevel: ui8 {
TRACE_DEBUG = 5,
TRACE_DETAIL = 6,
TRACE_VERBOSE = 7
-};
-
+};
+
#if !defined(NDEBUG) && !defined(Y_ENABLE_TRACE)
#define Y_ENABLE_TRACE
-#endif
-
+#endif
+
#ifdef Y_ENABLE_TRACE
-
+
/**
* Writes the given data into standard debug stream if current debug level set
* via `DBGOUT` environment variable permits it.
@@ -47,14 +47,14 @@ enum ETraceLevel: ui8 {
StdDbgStream() << args << Endl; \
} \
while (false)
-
-#else
-
+
+#else
+
#define Y_DBGTRACE(elevel, args) \
do { \
} while (false)
#define Y_DBGTRACE0(level, args) \
do { \
} while (false)
-
-#endif
+
+#endif