server-skynet-source-3rd-je.../include/jemalloc/internal/bitmap_inlines.h
Jason Evans 5e12223925 Fix BITMAP_USE_TREE version of bitmap_ffu().
This fixes an extent searching regression on 32-bit systems, caused by
the initial bitmap_ffu() implementation in
c8021d01f6 (Implement bitmap_ffu(), which
finds the first unset bit.), as first used in
5d33233a5e (Use a bitmap in extents_t to
speed up search.).
2017-03-25 23:29:32 -07:00

224 lines
5.9 KiB
C

#ifndef JEMALLOC_INTERNAL_BITMAP_INLINES_H
#define JEMALLOC_INTERNAL_BITMAP_INLINES_H
#ifndef JEMALLOC_ENABLE_INLINE
bool bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo);
bool bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit);
void bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit);
size_t bitmap_ffu(const bitmap_t *bitmap, const bitmap_info_t *binfo,
size_t min_bit);
size_t bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo);
void bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit);
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_BITMAP_C_))
JEMALLOC_INLINE bool
bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo) {
#ifdef BITMAP_USE_TREE
size_t rgoff = binfo->levels[binfo->nlevels].group_offset - 1;
bitmap_t rg = bitmap[rgoff];
/* The bitmap is full iff the root group is 0. */
return (rg == 0);
#else
size_t i;
for (i = 0; i < binfo->ngroups; i++) {
if (bitmap[i] != 0) {
return false;
}
}
return true;
#endif
}
JEMALLOC_INLINE bool
bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
size_t goff;
bitmap_t g;
assert(bit < binfo->nbits);
goff = bit >> LG_BITMAP_GROUP_NBITS;
g = bitmap[goff];
return !(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
}
JEMALLOC_INLINE void
bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
size_t goff;
bitmap_t *gp;
bitmap_t g;
assert(bit < binfo->nbits);
assert(!bitmap_get(bitmap, binfo, bit));
goff = bit >> LG_BITMAP_GROUP_NBITS;
gp = &bitmap[goff];
g = *gp;
assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
*gp = g;
assert(bitmap_get(bitmap, binfo, bit));
#ifdef BITMAP_USE_TREE
/* Propagate group state transitions up the tree. */
if (g == 0) {
unsigned i;
for (i = 1; i < binfo->nlevels; i++) {
bit = goff;
goff = bit >> LG_BITMAP_GROUP_NBITS;
gp = &bitmap[binfo->levels[i].group_offset + goff];
g = *gp;
assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
*gp = g;
if (g != 0) {
break;
}
}
}
#endif
}
/* ffu: find first unset >= bit. */
JEMALLOC_INLINE size_t
bitmap_ffu(const bitmap_t *bitmap, const bitmap_info_t *binfo, size_t min_bit) {
assert(min_bit < binfo->nbits);
#ifdef BITMAP_USE_TREE
unsigned level = binfo->nlevels - 1;
size_t lg_bits_per_group = (LG_BITMAP_GROUP_NBITS * (level+1));
size_t bits_per_group = 1LU << lg_bits_per_group;
size_t bits_per_group_mask = bits_per_group - 1;
unsigned group_nmask = (min_bit & bits_per_group_mask) >> (level *
LG_BITMAP_GROUP_NBITS);
bitmap_t group_mask = ~((1LU << group_nmask) - 1);
bitmap_t group = bitmap[binfo->levels[level].group_offset] & group_mask;
if (group == 0LU) {
return binfo->nbits;
}
size_t bit = ffs_lu(group) - 1;
while (level > 0) {
level--;
lg_bits_per_group = (LG_BITMAP_GROUP_NBITS * (level+1));
bits_per_group = 1LU << lg_bits_per_group;
bits_per_group_mask = bits_per_group - 1;
group = bitmap[binfo->levels[level].group_offset + bit];
size_t cur_base = bit << lg_bits_per_group;
if (cur_base < min_bit) {
group_nmask = (min_bit & bits_per_group_mask) >> (level
* LG_BITMAP_GROUP_NBITS);
group_mask = ~((1LU << group_nmask) - 1);
group &= group_mask;
}
if (group == 0LU) {
/*
* If min_bit is not the first bit in its group, try
* again starting at the first bit of the next group.
* This will only recurse at most once, since on
* recursion, min_bit will be the first bit in its
* group.
*/
size_t ceil_min_bit = (min_bit +
BITMAP_GROUP_NBITS_MASK) & ~BITMAP_GROUP_NBITS_MASK;
if (ceil_min_bit != min_bit && ceil_min_bit <
binfo->nbits) {
return bitmap_ffu(bitmap, binfo, ceil_min_bit);
}
return binfo->nbits;
}
bit = (bit << LG_BITMAP_GROUP_NBITS) + (ffs_lu(group) - 1);
}
assert(bit < binfo->nbits);
return bit;
#else
size_t i = min_bit >> LG_BITMAP_GROUP_NBITS;
bitmap_t g = bitmap[i] & ~((1LU << (min_bit & BITMAP_GROUP_NBITS_MASK))
- 1);
size_t bit;
do {
bit = ffs_lu(g);
if (bit != 0) {
return (i << LG_BITMAP_GROUP_NBITS) + (bit - 1);
}
i++;
g = bitmap[i];
} while (i < binfo->ngroups);
return binfo->nbits;
#endif
}
/* sfu: set first unset. */
JEMALLOC_INLINE size_t
bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo) {
size_t bit;
bitmap_t g;
unsigned i;
assert(!bitmap_full(bitmap, binfo));
#ifdef BITMAP_USE_TREE
i = binfo->nlevels - 1;
g = bitmap[binfo->levels[i].group_offset];
bit = ffs_lu(g) - 1;
while (i > 0) {
i--;
g = bitmap[binfo->levels[i].group_offset + bit];
bit = (bit << LG_BITMAP_GROUP_NBITS) + (ffs_lu(g) - 1);
}
#else
i = 0;
g = bitmap[0];
while ((bit = ffs_lu(g)) == 0) {
i++;
g = bitmap[i];
}
bit = (i << LG_BITMAP_GROUP_NBITS) + (bit - 1);
#endif
bitmap_set(bitmap, binfo, bit);
return bit;
}
JEMALLOC_INLINE void
bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
size_t goff;
bitmap_t *gp;
bitmap_t g;
UNUSED bool propagate;
assert(bit < binfo->nbits);
assert(bitmap_get(bitmap, binfo, bit));
goff = bit >> LG_BITMAP_GROUP_NBITS;
gp = &bitmap[goff];
g = *gp;
propagate = (g == 0);
assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK))) == 0);
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
*gp = g;
assert(!bitmap_get(bitmap, binfo, bit));
#ifdef BITMAP_USE_TREE
/* Propagate group state transitions up the tree. */
if (propagate) {
unsigned i;
for (i = 1; i < binfo->nlevels; i++) {
bit = goff;
goff = bit >> LG_BITMAP_GROUP_NBITS;
gp = &bitmap[binfo->levels[i].group_offset + goff];
g = *gp;
propagate = (g == 0);
assert((g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)))
== 0);
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
*gp = g;
if (!propagate) {
break;
}
}
}
#endif /* BITMAP_USE_TREE */
}
#endif
#endif /* JEMALLOC_INTERNAL_BITMAP_INLINES_H */