aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/libs/jemalloc/src/bitmap.c
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/libs/jemalloc/src/bitmap.c')
-rw-r--r--contrib/libs/jemalloc/src/bitmap.c98
1 files changed, 49 insertions, 49 deletions
diff --git a/contrib/libs/jemalloc/src/bitmap.c b/contrib/libs/jemalloc/src/bitmap.c
index 468b3178eb..1981e8ea17 100644
--- a/contrib/libs/jemalloc/src/bitmap.c
+++ b/contrib/libs/jemalloc/src/bitmap.c
@@ -1,55 +1,55 @@
#define JEMALLOC_BITMAP_C_
#include "jemalloc/internal/jemalloc_preamble.h"
#include "jemalloc/internal/jemalloc_internal_includes.h"
-
+
#include "jemalloc/internal/assert.h"
-
-/******************************************************************************/
-
+
+/******************************************************************************/
+
#ifdef BITMAP_USE_TREE
-
-void
+
+void
bitmap_info_init(bitmap_info_t *binfo, size_t nbits) {
- unsigned i;
- size_t group_count;
-
- assert(nbits > 0);
- assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS));
-
- /*
- * Compute the number of groups necessary to store nbits bits, and
- * progressively work upward through the levels until reaching a level
- * that requires only one group.
- */
- binfo->levels[0].group_offset = 0;
+ unsigned i;
+ size_t group_count;
+
+ assert(nbits > 0);
+ assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS));
+
+ /*
+ * Compute the number of groups necessary to store nbits bits, and
+ * progressively work upward through the levels until reaching a level
+ * that requires only one group.
+ */
+ binfo->levels[0].group_offset = 0;
group_count = BITMAP_BITS2GROUPS(nbits);
- for (i = 1; group_count > 1; i++) {
- assert(i < BITMAP_MAX_LEVELS);
- binfo->levels[i].group_offset = binfo->levels[i-1].group_offset
- + group_count;
+ for (i = 1; group_count > 1; i++) {
+ assert(i < BITMAP_MAX_LEVELS);
+ binfo->levels[i].group_offset = binfo->levels[i-1].group_offset
+ + group_count;
group_count = BITMAP_BITS2GROUPS(group_count);
- }
- binfo->levels[i].group_offset = binfo->levels[i-1].group_offset
- + group_count;
+ }
+ binfo->levels[i].group_offset = binfo->levels[i-1].group_offset
+ + group_count;
assert(binfo->levels[i].group_offset <= BITMAP_GROUPS_MAX);
- binfo->nlevels = i;
- binfo->nbits = nbits;
-}
-
+ binfo->nlevels = i;
+ binfo->nbits = nbits;
+}
+
static size_t
bitmap_info_ngroups(const bitmap_info_t *binfo) {
return binfo->levels[binfo->nlevels].group_offset;
-}
-
-void
+}
+
+void
bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill) {
- size_t extra;
- unsigned i;
-
- /*
- * Bits are actually inverted with regard to the external bitmap
+ size_t extra;
+ unsigned i;
+
+ /*
+ * Bits are actually inverted with regard to the external bitmap
* interface.
- */
+ */
if (fill) {
/* The "filled" bitmap starts out with all 0 bits. */
@@ -64,21 +64,21 @@ bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo, bool fill) {
* significant bits of the last group.
*/
memset(bitmap, 0xffU, bitmap_size(binfo));
- extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
- & BITMAP_GROUP_NBITS_MASK;
+ extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
+ & BITMAP_GROUP_NBITS_MASK;
if (extra != 0) {
- bitmap[binfo->levels[1].group_offset - 1] >>= extra;
+ bitmap[binfo->levels[1].group_offset - 1] >>= extra;
}
- for (i = 1; i < binfo->nlevels; i++) {
- size_t group_count = binfo->levels[i].group_offset -
- binfo->levels[i-1].group_offset;
- extra = (BITMAP_GROUP_NBITS - (group_count &
- BITMAP_GROUP_NBITS_MASK)) & BITMAP_GROUP_NBITS_MASK;
+ for (i = 1; i < binfo->nlevels; i++) {
+ size_t group_count = binfo->levels[i].group_offset -
+ binfo->levels[i-1].group_offset;
+ extra = (BITMAP_GROUP_NBITS - (group_count &
+ BITMAP_GROUP_NBITS_MASK)) & BITMAP_GROUP_NBITS_MASK;
if (extra != 0) {
- bitmap[binfo->levels[i+1].group_offset - 1] >>= extra;
+ bitmap[binfo->levels[i+1].group_offset - 1] >>= extra;
}
- }
-}
+ }
+}
#else /* BITMAP_USE_TREE */