diff options
author | nae202 <nae202@yandex-team.com> | 2024-11-20 12:52:01 +0300 |
---|---|---|
committer | nae202 <nae202@yandex-team.com> | 2024-11-20 13:14:09 +0300 |
commit | 0b9d91e900b52bccd6eabd033acbb57c4ee173fc (patch) | |
tree | 6839ed69fbda4d5214eb418acc4554924bc64160 /util/generic | |
parent | 878e26057d11cce46b7bc3a6c838209d4686e28b (diff) | |
download | ydb-0b9d91e900b52bccd6eabd033acbb57c4ee173fc.tar.gz |
Part of PR. Style
Часть большого ПР REVIEW:7264088
commit_hash:0f5b03fbbed0ac30f734943309e3ef5cd4d7a30e
Diffstat (limited to 'util/generic')
-rw-r--r-- | util/generic/bitmap.h | 97 | ||||
-rw-r--r-- | util/generic/bitops.h | 12 | ||||
-rw-r--r-- | util/generic/function_ref_ut.cpp | 3 | ||||
-rw-r--r-- | util/generic/hash_table.h | 148 | ||||
-rw-r--r-- | util/generic/ptr.h | 9 | ||||
-rw-r--r-- | util/generic/strbuf.h | 3 | ||||
-rw-r--r-- | util/generic/ymath.cpp | 6 |
7 files changed, 181 insertions, 97 deletions
diff --git a/util/generic/bitmap.h b/util/generic/bitmap.h index 929f23a883..3116c8dd19 100644 --- a/util/generic/bitmap.h +++ b/util/generic/bitmap.h @@ -350,19 +350,21 @@ public: ~TReference() = default; Y_FORCE_INLINE TReference& operator=(bool val) { - if (val) + if (val) { *Chunk |= static_cast<TChunk>(1) << Offset; - else + } else { *Chunk &= ~(static_cast<TChunk>(1) << Offset); + } return *this; } Y_FORCE_INLINE TReference& operator=(const TReference& ref) { - if (ref) + if (ref) { *Chunk |= static_cast<TChunk>(1) << Offset; - else + } else { *Chunk &= ~(static_cast<TChunk>(1) << Offset); + } return *this; } @@ -407,8 +409,9 @@ private: TChunk updateMask = FullChunk << bitOffset; if (chunk == endChunk) { updateMask ^= FullChunk << endBitOffset; - if (!updateMask) + if (!updateMask) { break; + } } Mask.Data[chunk] = TUpdateOp::Op(Mask.Data[chunk], updateMask); bitOffset = 0; @@ -570,16 +573,18 @@ public: static_assert(std::is_unsigned<TTo>::value, "expect std::is_unsigned<TTo>::value"); to = 0; size_t chunkpos = pos >> DivCount; - if (chunkpos >= Mask.GetChunkCapacity()) + if (chunkpos >= Mask.GetChunkCapacity()) { return; + } if ((pos & ModMask) == 0) { - if (sizeof(TChunk) >= sizeof(TTo)) + if (sizeof(TChunk) >= sizeof(TTo)) { to = (TTo)Mask.Data[chunkpos]; - else // if (sizeof(TChunk) < sizeof(TTo)) + } else { // if (sizeof(TChunk) < sizeof(TTo)) NBitMapPrivate::CopyData(&to, 1, Mask.Data + chunkpos, Min(((sizeof(TTo) * 8) >> DivCount), Mask.GetChunkCapacity() - chunkpos)); - } else if ((pos & (sizeof(TTo) * 8 - 1)) == 0 && sizeof(TChunk) >= 2 * sizeof(TTo)) + } + } else if ((pos & (sizeof(TTo) * 8 - 1)) == 0 && sizeof(TChunk) >= 2 * sizeof(TTo)) { to = (TTo)(Mask.Data[chunkpos] >> (pos & ModMask)); - else { + } else { static constexpr size_t copyToSize = (sizeof(TChunk) >= sizeof(TTo)) ? (sizeof(TChunk) / sizeof(TTo)) + 2 : 3; TTo temp[copyToSize] = {0, 0}; // or use non defined by now TBitmap<copyToSize, TTo>::CopyData,RShift(pos & ModMask),Export(0,to) @@ -621,17 +626,20 @@ public: Y_FORCE_INLINE size_t ValueBitCount() const { size_t nonZeroChunk = Mask.GetChunkCapacity() - 1; - while (nonZeroChunk != 0 && !Mask.Data[nonZeroChunk]) + while (nonZeroChunk != 0 && !Mask.Data[nonZeroChunk]) { --nonZeroChunk; + } return nonZeroChunk || Mask.Data[nonZeroChunk] ? nonZeroChunk * BitsPerChunk + GetValueBitCount(TIntType(Mask.Data[nonZeroChunk])) : 0; } Y_PURE_FUNCTION Y_FORCE_INLINE bool Empty() const { - for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) - if (Mask.Data[i]) + for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) { + if (Mask.Data[i]) { return false; + } + } return true; } @@ -679,11 +687,13 @@ public: TThis& And(const TThis& bitmap) { // Don't expand capacity here, because resulting bits in positions, // which are greater then size of one of these bitmaps, will be zero - for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i) + for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i) { Mask.Data[i] &= bitmap.Mask.Data[i]; + } // Clear bits if current bitmap size is greater than AND-ed one - for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) + for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) { Mask.Data[i] = 0; + } return *this; } @@ -694,8 +704,9 @@ public: Y_FORCE_INLINE TThis& And(const TChunk& val) { Mask.Data[0] &= val; - for (size_t i = 1; i < Mask.GetChunkCapacity(); ++i) + for (size_t i = 1; i < Mask.GetChunkCapacity(); ++i) { Mask.Data[i] = 0; + } return *this; } @@ -704,8 +715,9 @@ public: if (valueBitCount) { // Memory optimization: expand size only for non-zero bits Reserve(valueBitCount); - for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i) + for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i) { Mask.Data[i] |= bitmap.Mask.Data[i]; + } } return *this; } @@ -723,8 +735,9 @@ public: TThis& Xor(const TThis& bitmap) { Reserve(bitmap.Size()); - for (size_t i = 0; i < bitmap.Mask.GetChunkCapacity(); ++i) + for (size_t i = 0; i < bitmap.Mask.GetChunkCapacity(); ++i) { Mask.Data[i] ^= bitmap.Mask.Data[i]; + } return *this; } @@ -740,8 +753,9 @@ public: } TThis& SetDifference(const TThis& bitmap) { - for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i) + for (size_t i = 0; i < Min(bitmap.Mask.GetChunkCapacity(), Mask.GetChunkCapacity()); ++i) { Mask.Data[i] &= ~bitmap.Mask.Data[i]; + } return *this; } @@ -756,8 +770,9 @@ public: } Y_FORCE_INLINE TThis& Flip() { - for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) + for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) { Mask.Data[i] = ~Mask.Data[i]; + } Mask.Sanitize(); return *this; } @@ -779,13 +794,16 @@ public: Mask.Data[i] = Mask.Data[i - eshift]; } } else { - for (size_t i = Mask.GetChunkCapacity() - 1; i > eshift; --i) + for (size_t i = Mask.GetChunkCapacity() - 1; i > eshift; --i) { Mask.Data[i] = (Mask.Data[i - eshift] << offset) | (Mask.Data[i - eshift - 1] >> subOffset); - if (eshift < Mask.GetChunkCapacity()) + } + if (eshift < Mask.GetChunkCapacity()) { Mask.Data[eshift] = Mask.Data[0] << offset; + } } - for (size_t i = 0; i < Min(eshift, Mask.GetChunkCapacity()); ++i) + for (size_t i = 0; i < Min(eshift, Mask.GetChunkCapacity()); ++i) { Mask.Data[i] = 0; + } // Cleanup extra high bits in the storage Mask.Sanitize(); @@ -810,13 +828,15 @@ public: } } else { const size_t subOffset = BitsPerChunk - offset; - for (size_t i = 0; i < limit; ++i) + for (size_t i = 0; i < limit; ++i) { Mask.Data[i] = (Mask.Data[i + eshift] >> offset) | (Mask.Data[i + eshift + 1] << subOffset); + } Mask.Data[limit] = Mask.Data[Mask.GetChunkCapacity() - 1] >> offset; } - for (size_t i = limit + 1; i < Mask.GetChunkCapacity(); ++i) + for (size_t i = limit + 1; i < Mask.GetChunkCapacity(); ++i) { Mask.Data[i] = 0; + } } } return *this; @@ -826,8 +846,9 @@ public: // This method is optimized combination of Or() and LShift(), which allows reducing memory allocation // when combining long dynamic bitmaps. TThis& Or(const TThis& bitmap, size_t offset) { - if (0 == offset) + if (0 == offset) { return Or(bitmap); + } const size_t otherValueBitCount = bitmap.ValueBitCount(); // Continue only if OR-ed bitmap have non-zero bits @@ -848,8 +869,9 @@ public: for (; i < Min(bitmap.Mask.GetChunkCapacity() + chunkShift, Mask.GetChunkCapacity()); ++i) { Mask.Data[i] |= (bitmap.Mask.Data[i - chunkShift] << subShift) | (bitmap.Mask.Data[i - chunkShift - 1] >> subOffset); } - if (i < Mask.GetChunkCapacity()) + if (i < Mask.GetChunkCapacity()) { Mask.Data[i] |= bitmap.Mask.Data[i - chunkShift - 1] >> subOffset; + } } } @@ -859,19 +881,22 @@ public: bool Equal(const TThis& bitmap) const { if (Mask.GetChunkCapacity() > bitmap.Mask.GetChunkCapacity()) { for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) { - if (0 != Mask.Data[i]) + if (0 != Mask.Data[i]) { return false; + } } } else if (Mask.GetChunkCapacity() < bitmap.Mask.GetChunkCapacity()) { for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) { - if (0 != bitmap.Mask.Data[i]) + if (0 != bitmap.Mask.Data[i]) { return false; + } } } size_t size = Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity()); for (size_t i = 0; i < size; ++i) { - if (Mask.Data[i] != bitmap.Mask.Data[i]) + if (Mask.Data[i] != bitmap.Mask.Data[i]) { return false; + } } return true; } @@ -884,18 +909,21 @@ public: int Compare(const TThis& bitmap) const { size_t size = Min(Mask.GetChunkCapacity(), bitmap.Mask.GetChunkCapacity()); int res = ::memcmp(Mask.Data, bitmap.Mask.Data, size * sizeof(TChunk)); - if (0 != res || Mask.GetChunkCapacity() == bitmap.Mask.GetChunkCapacity()) + if (0 != res || Mask.GetChunkCapacity() == bitmap.Mask.GetChunkCapacity()) { return res; + } if (Mask.GetChunkCapacity() > bitmap.Mask.GetChunkCapacity()) { for (size_t i = bitmap.Mask.GetChunkCapacity(); i < Mask.GetChunkCapacity(); ++i) { - if (0 != Mask.Data[i]) + if (0 != Mask.Data[i]) { return 1; + } } } else { for (size_t i = Mask.GetChunkCapacity(); i < bitmap.Mask.GetChunkCapacity(); ++i) { - if (0 != bitmap.Mask.Data[i]) + if (0 != bitmap.Mask.Data[i]) { return -1; + } } } return 0; @@ -953,8 +981,9 @@ public: Y_FORCE_INLINE size_t Count() const { size_t count = 0; - for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) + for (size_t i = 0; i < Mask.GetChunkCapacity(); ++i) { count += ::NBitMapPrivate::CountBitsPrivate(Mask.Data[i]); + } return count; } diff --git a/util/generic/bitops.h b/util/generic/bitops.h index 0a2396bfee..601daf7a30 100644 --- a/util/generic/bitops.h +++ b/util/generic/bitops.h @@ -276,13 +276,16 @@ Y_FORCE_INLINE ui64 MostSignificantBit(ui64 v) { ui64 res = v ? (63 - __builtin_clzll(v)) : 0; #elif defined(_MSC_VER) && defined(_64_) unsigned long res = 0; - if (v) + if (v) { _BitScanReverse64(&res, v); + } #else ui64 res = 0; - if (v) - while (v >>= 1) + if (v) { + while (v >>= 1) { ++res; + } + } #endif return res; } @@ -295,8 +298,9 @@ Y_FORCE_INLINE ui64 LeastSignificantBit(ui64 v) { ui64 res = v ? __builtin_ffsll(v) - 1 : 0; #elif defined(_MSC_VER) && defined(_64_) unsigned long res = 0; - if (v) + if (v) { _BitScanForward64(&res, v); + } #else ui64 res = 0; if (v) { diff --git a/util/generic/function_ref_ut.cpp b/util/generic/function_ref_ut.cpp index 45506beeeb..171671d7ea 100644 --- a/util/generic/function_ref_ut.cpp +++ b/util/generic/function_ref_ut.cpp @@ -20,8 +20,9 @@ Y_UNIT_TEST_SUITE(TestFunctionRef) { } int F1(bool x) { - if (x) + if (x) { throw 19; + } return 42; } diff --git a/util/generic/hash_table.h b/util/generic/hash_table.h index 5976881a71..b33ad4f596 100644 --- a/util/generic/hash_table.h +++ b/util/generic/hash_table.h @@ -634,9 +634,11 @@ public: } iterator begin() { - for (size_type n = 0; n < buckets.size(); ++n) /*y*/ - if (buckets[n]) + for (size_type n = 0; n < buckets.size(); ++n) { /*y*/ + if (buckets[n]) { return iterator(buckets[n]); /*y*/ + } + } return end(); } @@ -645,9 +647,11 @@ public: } /*y*/ const_iterator begin() const { - for (size_type n = 0; n < buckets.size(); ++n) /*y*/ - if (buckets[n]) + for (size_type n = 0; n < buckets.size(); ++n) { /*y*/ + if (buckets[n]) { return const_iterator(buckets[n]); /*y*/ + } + } return end(); } @@ -662,9 +666,11 @@ public: size_type bucket_size(size_type bucket) const { size_type result = 0; - if (const node* cur = buckets[bucket]) - for (; !((uintptr_t)cur & 1); cur = cur->next) + if (const node* cur = buckets[bucket]) { + for (; !((uintptr_t)cur & 1); cur = cur->next) { result += 1; + } + } return result; } @@ -731,14 +737,16 @@ public: template <class InputIterator> void insert_unique(InputIterator f, InputIterator l, std::input_iterator_tag) { - for (; f != l; ++f) + for (; f != l; ++f) { insert_unique(*f); + } } template <class InputIterator> void insert_equal(InputIterator f, InputIterator l, std::input_iterator_tag) { - for (; f != l; ++f) + for (; f != l; ++f) { insert_equal(*f); + } } template <class ForwardIterator> @@ -746,8 +754,9 @@ public: difference_type n = std::distance(f, l); reserve(num_elements + n); - for (; n > 0; --n, ++f) + for (; n > 0; --n, ++f) { insert_unique_noresize(*f); + } } template <class ForwardIterator> @@ -755,8 +764,9 @@ public: difference_type n = std::distance(f, l); reserve(num_elements + n); - for (; n > 0; --n, ++f) + for (; n > 0; --n, ++f) { emplace_equal_noresize(*f); + } } template <class OtherValue> @@ -794,10 +804,13 @@ public: const size_type n = bkt_num_key(key); size_type result = 0; - if (const node* cur = buckets[n]) - for (; !((uintptr_t)cur & 1); cur = cur->next) - if (equals(get_key(cur->val), key)) + if (const node* cur = buckets[n]) { + for (; !((uintptr_t)cur & 1); cur = cur->next) { + if (equals(get_key(cur->val), key)) { ++result; + } + } + } return result; } @@ -834,8 +847,9 @@ public: * the nodes at once. */ void release_nodes() { - if (empty()) + if (empty()) { return; /* Need this check because empty buckets may reside in read-only memory. */ + } clear_buckets(buckets); num_elements = 0; @@ -877,8 +891,9 @@ public: * downsizing. */ Y_REINITIALIZES_OBJECT void clear() { - if (num_elements) + if (num_elements) { clear((num_elements * 2 + buckets.size()) / 3); + } } private: @@ -964,8 +979,9 @@ __yhashtable_iterator<V>& __yhashtable_iterator<V>::operator++() { cur = cur->next; if ((uintptr_t)cur & 1) { node** bucket = (node**)((uintptr_t)cur & ~1); - while (*bucket == nullptr) + while (*bucket == nullptr) { ++bucket; + } Y_ASSERT(*bucket != nullptr); cur = (node*)((uintptr_t)*bucket & ~1); } @@ -985,8 +1001,9 @@ __yhashtable_const_iterator<V>& __yhashtable_const_iterator<V>::operator++() { cur = cur->next; if ((uintptr_t)cur & 1) { node** bucket = (node**)((uintptr_t)cur & ~1); - while (*bucket == nullptr) + while (*bucket == nullptr) { ++bucket; + } Y_ASSERT(*bucket != nullptr); cur = (node*)((uintptr_t)*bucket & ~1); } @@ -1010,10 +1027,13 @@ std::pair<typename THashTable<V, K, HF, Ex, Eq, A>::iterator, bool> THashTable<V const size_type n = bkt_num(tmp->val); node* first = buckets[n]; - if (first) /*y*/ - for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) /*y*/ - if (equals(get_key(cur->val), get_key(tmp->val))) + if (first) { /*y*/ + for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) { /*y*/ + if (equals(get_key(cur->val), get_key(tmp->val))) { return std::pair<iterator, bool>(iterator(cur), false); /*y*/ + } + } + } guard.release(); tmp->next = first ? first : (node*)((uintptr_t)&buckets[n + 1] | 1); /*y*/ @@ -1028,10 +1048,13 @@ std::pair<typename THashTable<V, K, HF, Ex, Eq, A>::iterator, bool> THashTable<V const size_type n = bkt_num(obj); node* first = buckets[n]; - if (first) /*y*/ - for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) /*y*/ - if (equals(get_key(cur->val), get_key(obj))) + if (first) { /*y*/ + for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) { /*y*/ + if (equals(get_key(cur->val), get_key(obj))) { return std::pair<iterator, bool>(iterator(cur), false); /*y*/ + } + } + } node* tmp = new_node(obj); tmp->next = first ? first : (node*)((uintptr_t)&buckets[n + 1] | 1); /*y*/ @@ -1049,8 +1072,8 @@ __yhashtable_iterator<V> THashTable<V, K, HF, Ex, Eq, A>::emplace_equal_noresize const size_type n = bkt_num(tmp->val); node* first = buckets[n]; - if (first) /*y*/ - for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) /*y*/ + if (first) { /*y*/ + for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) { /*y*/ if (equals(get_key(cur->val), get_key(tmp->val))) { guard.release(); tmp->next = cur->next; @@ -1058,6 +1081,8 @@ __yhashtable_iterator<V> THashTable<V, K, HF, Ex, Eq, A>::emplace_equal_noresize ++num_elements; return iterator(tmp); /*y*/ } + } + } guard.release(); tmp->next = first ? first : (node*)((uintptr_t)&buckets[n + 1] | 1); /*y*/ @@ -1074,10 +1099,13 @@ typename THashTable<V, K, HF, Ex, Eq, A>::reference THashTable<V, K, HF, Ex, Eq, size_type n = bkt_num_key(get_key(v)); node* first = buckets[n]; - if (first) /*y*/ - for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) /*y*/ - if (equals(get_key(cur->val), get_key(v))) + if (first) { /*y*/ + for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) { /*y*/ + if (equals(get_key(cur->val), get_key(v))) { return cur->val; + } + } + } node* tmp = new_node(v); tmp->next = first ? first : (node*)((uintptr_t)&buckets[n + 1] | 1); /*y*/ @@ -1093,10 +1121,13 @@ __yhashtable_iterator<V> THashTable<V, K, HF, Ex, Eq, A>::find_i(const OtherKey& ins = &buckets[n]; node* first = buckets[n]; - if (first) /*y*/ - for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) /*y*/ - if (equals(get_key(cur->val), key)) + if (first) { /*y*/ + for (node* cur = first; !((uintptr_t)cur & 1); cur = cur->next) { /*y*/ + if (equals(get_key(cur->val), key)) { return iterator(cur); /*y*/ + } + } + } return end(); } @@ -1115,19 +1146,24 @@ std::pair<__yhashtable_iterator<V>, __yhashtable_iterator<V>> THashTable<V, K, H ins = &buckets[n]; node* first = buckets[n]; - if (first) /*y*/ + if (first) { /*y*/ for (; !((uintptr_t)first & 1); first = first->next) { /*y*/ if (equals(get_key(first->val), key)) { - for (node* cur = first->next; !((uintptr_t)cur & 1); cur = cur->next) - if (!equals(get_key(cur->val), key)) + for (node* cur = first->next; !((uintptr_t)cur & 1); cur = cur->next) { + if (!equals(get_key(cur->val), key)) { return pii(iterator(first), iterator(cur)); /*y*/ - for (size_type m = n + 1; m < buckets.size(); ++m) /*y*/ - if (buckets[m]) + } + } + for (size_type m = n + 1; m < buckets.size(); ++m) { /*y*/ + if (buckets[m]) { return pii(iterator(first), /*y*/ iterator(buckets[m])); /*y*/ - return pii(iterator(first), end()); /*y*/ + } + } + return pii(iterator(first), end()); /*y*/ } } + } return pii(end(), end()); } @@ -1138,20 +1174,25 @@ std::pair<__yhashtable_const_iterator<V>, __yhashtable_const_iterator<V>> THashT const size_type n = bkt_num_key(key); const node* first = buckets[n]; - if (first) /*y*/ + if (first) { /*y*/ for (; !((uintptr_t)first & 1); first = first->next) { /*y*/ if (equals(get_key(first->val), key)) { - for (const node* cur = first->next; !((uintptr_t)cur & 1); cur = cur->next) - if (!equals(get_key(cur->val), key)) - return pii(const_iterator(first), /*y*/ - const_iterator(cur)); /*y*/ - for (size_type m = n + 1; m < buckets.size(); ++m) /*y*/ - if (buckets[m]) + for (const node* cur = first->next; !((uintptr_t)cur & 1); cur = cur->next) { + if (!equals(get_key(cur->val), key)) { + return pii(const_iterator(first), /*y*/ + const_iterator(cur)); /*y*/ + } + } + for (size_type m = n + 1; m < buckets.size(); ++m) { /*y*/ + if (buckets[m]) { return pii(const_iterator(first /*y*/), const_iterator(buckets[m] /*y*/)); + } + } return pii(const_iterator(first /*y*/), end()); } } + } return pii(end(), end()); } @@ -1249,16 +1290,18 @@ void THashTable<V, K, HF, Ex, Eq, A>::erase(iterator first, iterator last) { size_type f_bucket = first.cur ? bkt_num(first.cur->val) : buckets.size(); /*y*/ size_type l_bucket = last.cur ? bkt_num(last.cur->val) : buckets.size(); /*y*/ - if (first.cur == last.cur) + if (first.cur == last.cur) { return; - else if (f_bucket == l_bucket) + } else if (f_bucket == l_bucket) { erase_bucket(f_bucket, first.cur, last.cur); - else { + } else { erase_bucket(f_bucket, first.cur, nullptr); - for (size_type n = f_bucket + 1; n < l_bucket; ++n) + for (size_type n = f_bucket + 1; n < l_bucket; ++n) { erase_bucket(n, nullptr); - if (l_bucket != buckets.size()) /*y*/ + } + if (l_bucket != buckets.size()) { /*y*/ erase_bucket(l_bucket, last.cur); + } } } @@ -1277,8 +1320,9 @@ template <class V, class K, class HF, class Ex, class Eq, class A> bool THashTable<V, K, HF, Ex, Eq, A>::reserve(size_type num_elements_hint) { const size_type old_n = buckets.size(); /*y*/ if (num_elements_hint + 1 > old_n) { - if (old_n != 1 && num_elements_hint <= old_n) // TODO: this if is for backwards compatibility down to order-in-buckets level. Can be safely removed. + if (old_n != 1 && num_elements_hint <= old_n) { // TODO: this if is for backwards compatibility down to order-in-buckets level. Can be safely removed. return false; + } const TBucketDivisor n = HashBucketCountExt(num_elements_hint + 1, buckets.BucketDivisorHint() + 1); if (n() > old_n) { @@ -1325,9 +1369,9 @@ bool THashTable<V, K, HF, Ex, Eq, A>::reserve(size_type num_elements_hint) { template <class V, class K, class HF, class Ex, class Eq, class A> void THashTable<V, K, HF, Ex, Eq, A>::erase_bucket(const size_type n, node* first, node* last) { node* cur = buckets[n]; - if (cur == first) + if (cur == first) { erase_bucket(n, last); - else { + } else { node* next; for (next = cur->next; next != first; cur = next, next = cur->next) ; diff --git a/util/generic/ptr.h b/util/generic/ptr.h index 703e61e68e..7057949bc4 100644 --- a/util/generic/ptr.h +++ b/util/generic/ptr.h @@ -1035,8 +1035,9 @@ class TCopyClone { public: template <class T> static inline T* Copy(T* t) { - if (t) + if (t) { return t->Clone(); + } return nullptr; } }; @@ -1045,8 +1046,9 @@ class TCopyNew { public: template <class T> static inline T* Copy(T* t) { - if (t) + if (t) { return new T(*t); + } return nullptr; } }; @@ -1115,8 +1117,9 @@ public: #endif private: inline void DoDestroy() noexcept { - if (T_) + if (T_) { D::Destroy(T_); + } } private: diff --git a/util/generic/strbuf.h b/util/generic/strbuf.h index 3f946775dc..6806950dc3 100644 --- a/util/generic/strbuf.h +++ b/util/generic/strbuf.h @@ -282,8 +282,9 @@ public: // s.TrySplitOn(s.find('z'), ...) is false, but s.TrySplitOn(100500, ...) is true. bool TrySplitOn(size_t pos, TdSelf& l, TdSelf& r, size_t len = 1) const noexcept { - if (TBase::npos == pos) + if (TBase::npos == pos) { return false; + } DoSplitOn(pos, l, r, len); return true; diff --git a/util/generic/ymath.cpp b/util/generic/ymath.cpp index 31270728f4..a28a71b0e0 100644 --- a/util/generic/ymath.cpp +++ b/util/generic/ymath.cpp @@ -13,15 +13,17 @@ float Exp2f(float x) { double Erf(double x) { static constexpr double _M_2_SQRTPI = 1.12837916709551257390; static constexpr double eps = 1.0e-7; - if (fabs(x) >= 3.75) + if (fabs(x) >= 3.75) { return x > 0 ? 1.0 : -1.0; + } double r = _M_2_SQRTPI * x; double f = r; for (int i = 1;; ++i) { r *= -x * x / i; f += r / (2 * i + 1); - if (fabs(r) < eps * (2 * i + 1)) + if (fabs(r) < eps * (2 * i + 1)) { break; + } } return f; } |