Add flat_bitmap.

The flat_bitmap module offers an extended API, at the cost of decreased
performance in the case of very large bitmaps.
This commit is contained in:
David Goldblatt 2020-07-17 18:42:50 -07:00 committed by David Goldblatt
parent 7fde6ac490
commit ceee823519
3 changed files with 536 additions and 0 deletions

View File

@ -203,6 +203,7 @@ TESTS_UNIT := \
$(srcroot)test/unit/edata_cache.c \
$(srcroot)test/unit/emitter.c \
$(srcroot)test/unit/extent_quantize.c \
${srcroot}test/unit/flat_bitmap.c \
$(srcroot)test/unit/fork.c \
$(srcroot)test/unit/hash.c \
$(srcroot)test/unit/hook.c \

View File

@ -0,0 +1,222 @@
#ifndef JEMALLOC_INTERNAL_FB_H
#define JEMALLOC_INTERNAL_FB_H
/*
* The flat bitmap module. This has a larger API relative to the bitmap module
* (supporting things like backwards searches, and searching for both set and
* unset bits), at the cost of slower operations for very large bitmaps.
*
* Initialized flat bitmaps start at all-zeros (all bits unset).
*/
typedef unsigned long fb_group_t;
#define FB_GROUP_BITS (ZU(1) << (LG_SIZEOF_LONG + 3))
#define FB_NGROUPS(nbits) ((nbits) / FB_GROUP_BITS \
+ ((nbits) % FB_GROUP_BITS == 0 ? 0 : 1))
static inline void
fb_init(fb_group_t *fb, size_t nbits) {
size_t ngroups = FB_NGROUPS(nbits);
memset(fb, 0, ngroups * sizeof(fb_group_t));
}
static inline bool
fb_empty(fb_group_t *fb, size_t nbits) {
size_t ngroups = FB_NGROUPS(nbits);
for (size_t i = 0; i < ngroups; i++) {
if (fb[i] != 0) {
return false;
}
}
return true;
}
static inline bool
fb_full(fb_group_t *fb, size_t nbits) {
size_t ngroups = FB_NGROUPS(nbits);
size_t trailing_bits = nbits % FB_GROUP_BITS;
size_t limit = (trailing_bits == 0 ? ngroups : ngroups - 1);
for (size_t i = 0; i < limit; i++) {
if (fb[i] != ~(fb_group_t)0) {
return false;
}
}
if (trailing_bits == 0) {
return true;
}
return fb[ngroups - 1] == ((fb_group_t)1 << trailing_bits) - 1;
}
static inline bool
fb_get(fb_group_t *fb, size_t nbits, size_t bit) {
assert(bit < nbits);
size_t group_ind = bit / FB_GROUP_BITS;
size_t bit_ind = bit % FB_GROUP_BITS;
return (bool)(fb[group_ind] & ((fb_group_t)1 << bit_ind));
}
static inline void
fb_set(fb_group_t *fb, size_t nbits, size_t bit) {
assert(bit < nbits);
size_t group_ind = bit / FB_GROUP_BITS;
size_t bit_ind = bit % FB_GROUP_BITS;
fb[group_ind] |= ((fb_group_t)1 << bit_ind);
}
static inline void
fb_unset(fb_group_t *fb, size_t nbits, size_t bit) {
assert(bit < nbits);
size_t group_ind = bit / FB_GROUP_BITS;
size_t bit_ind = bit % FB_GROUP_BITS;
fb[group_ind] &= ~((fb_group_t)1 << bit_ind);
}
JEMALLOC_ALWAYS_INLINE void
fb_assign_group_impl(fb_group_t *fb, size_t start, size_t cnt, bool val) {
assert(cnt > 0);
assert(start + cnt - 1 < FB_GROUP_BITS);
fb_group_t bits = ((~(fb_group_t)0) >> (FB_GROUP_BITS - cnt)) << start;
if (val) {
*fb |= bits;
} else {
*fb &= ~bits;
}
}
JEMALLOC_ALWAYS_INLINE void
fb_assign_impl(fb_group_t *fb, size_t nbits, size_t start, size_t cnt,
bool val) {
assert(start + cnt - 1 < nbits);
size_t group_ind = start / FB_GROUP_BITS;
size_t start_bit_ind = start % FB_GROUP_BITS;
/*
* The first group is special; it's the only one we don't start writing
* to from bit 0.
*/
size_t first_group_cnt =
(start_bit_ind + cnt > FB_GROUP_BITS
? FB_GROUP_BITS - start_bit_ind
: cnt);
/*
* We can basically split affected words into:
* - The first group, where we touch only the high bits
* - The last group, where we touch only the low bits
* - The middle, where we set all the bits to the same thing.
* We treat each case individually. The last two could be merged, but
* this can lead to bad codegen for those middle words.
*/
/* First group */
fb_assign_group_impl(&fb[group_ind], start_bit_ind, first_group_cnt,
val);
cnt -= first_group_cnt;
group_ind++;
/* Middle groups */
while (cnt > FB_GROUP_BITS) {
fb_assign_group_impl(&fb[group_ind], 0, FB_GROUP_BITS, val);
cnt -= FB_GROUP_BITS;
group_ind++;
}
/* Last group */
if (cnt != 0) {
fb_assign_group_impl(&fb[group_ind], 0, cnt, val);
}
}
/* Sets the cnt bits starting at position start. Must not have a 0 count. */
static inline void
fb_set_range(fb_group_t *fb, size_t nbits, size_t start, size_t cnt) {
fb_assign_impl(fb, nbits, start, cnt, true);
}
/* Unsets the cnt bits starting at position start. Must not have a 0 count. */
static inline void
fb_unset_range(fb_group_t *fb, size_t nbits, size_t start, size_t cnt) {
fb_assign_impl(fb, nbits, start, cnt, false);
}
/*
* An implementation detail; find the first bit at position >= min_bit with the
* value val.
*
* Returns the number of bits in the bitmap if no such bit exists.
*/
JEMALLOC_ALWAYS_INLINE ssize_t
fb_find_impl(fb_group_t *fb, size_t nbits, size_t start, bool val,
bool forward) {
assert(start < nbits);
size_t ngroups = FB_NGROUPS(nbits);
ssize_t group_ind = start / FB_GROUP_BITS;
size_t bit_ind = start % FB_GROUP_BITS;
fb_group_t maybe_invert = (val ? 0 : (fb_group_t)-1);
fb_group_t group = fb[group_ind];
group ^= maybe_invert;
if (forward) {
/* Only keep ones in bits bit_ind and above. */
group &= ~((1LU << bit_ind) - 1);
} else {
/*
* Only keep ones in bits bit_ind and below. You might more
* naturally express this as (1 << (bit_ind + 1)) - 1, but
* that shifts by an invalid amount if bit_ind is one less than
* FB_GROUP_BITS.
*/
group &= ((2LU << bit_ind) - 1);
}
ssize_t group_ind_bound = forward ? (ssize_t)ngroups : -1;
while (group == 0) {
group_ind += forward ? 1 : -1;
if (group_ind == group_ind_bound) {
return forward ? (ssize_t)nbits : (ssize_t)-1;
}
group = fb[group_ind];
group ^= maybe_invert;
}
assert(group != 0);
size_t bit = forward ? ffs_lu(group) : fls_lu(group);
size_t pos = group_ind * FB_GROUP_BITS + bit;
/*
* The high bits of a partially filled last group are zeros, so if we're
* looking for zeros we don't want to report an invalid result.
*/
if (forward && !val && pos > nbits) {
return nbits;
}
return pos;
}
/*
* Find the first set bit in the bitmap with an index >= min_bit. Returns the
* number of bits in the bitmap if no such bit exists.
*/
static inline size_t
fb_ffu(fb_group_t *fb, size_t nbits, size_t min_bit) {
return (size_t)fb_find_impl(fb, nbits, min_bit, /* val */ false,
/* forward */ true);
}
/* The same, but looks for an unset bit. */
static inline size_t
fb_ffs(fb_group_t *fb, size_t nbits, size_t min_bit) {
return (size_t)fb_find_impl(fb, nbits, min_bit, /* val */ true,
/* forward */ true);
}
/*
* Find the last set bit in the bitmap with an index <= max_bit. Returns -1 if
* no such bit exists.
*/
static inline ssize_t
fb_flu(fb_group_t *fb, size_t nbits, size_t max_bit) {
return fb_find_impl(fb, nbits, max_bit, /* val */ false,
/* forward */ false);
}
static inline ssize_t
fb_fls(fb_group_t *fb, size_t nbits, size_t max_bit) {
return fb_find_impl(fb, nbits, max_bit, /* val */ true,
/* forward */ false);
}
#endif /* JEMALLOC_INTERNAL_FB_H */

313
test/unit/flat_bitmap.c Normal file
View File

@ -0,0 +1,313 @@
#include "test/jemalloc_test.h"
#include "jemalloc/internal/flat_bitmap.h"
#include "test/nbits.h"
static void
do_test_init(size_t nbits) {
size_t sz = FB_NGROUPS(nbits) * sizeof(fb_group_t);
fb_group_t *fb = malloc(sz);
/* Junk fb's contents. */
memset(fb, 99, sz);
fb_init(fb, nbits);
for (size_t i = 0; i < nbits; i++) {
expect_false(fb_get(fb, nbits, i),
"bitmap should start empty");
}
free(fb);
}
TEST_BEGIN(test_fb_init) {
#define NB(nbits) \
do_test_init(nbits);
NBITS_TAB
#undef NB
}
TEST_END
static void
do_test_get_set_unset(size_t nbits) {
size_t sz = FB_NGROUPS(nbits) * sizeof(fb_group_t);
fb_group_t *fb = malloc(sz);
fb_init(fb, nbits);
/* Set the bits divisible by 3. */
for (size_t i = 0; i < nbits; i++) {
if (i % 3 == 0) {
fb_set(fb, nbits, i);
}
}
/* Check them. */
for (size_t i = 0; i < nbits; i++) {
expect_b_eq(i % 3 == 0, fb_get(fb, nbits, i),
"Unexpected bit at position %zu", i);
}
/* Unset those divisible by 5. */
for (size_t i = 0; i < nbits; i++) {
if (i % 5 == 0) {
fb_unset(fb, nbits, i);
}
}
/* Check them. */
for (size_t i = 0; i < nbits; i++) {
expect_b_eq(i % 3 == 0 && i % 5 != 0, fb_get(fb, nbits, i),
"Unexpected bit at position %zu", i);
}
free(fb);
}
TEST_BEGIN(test_get_set_unset) {
#define NB(nbits) \
do_test_get_set_unset(nbits);
NBITS_TAB
#undef NB
}
TEST_END
static ssize_t
find_3_5_compute(ssize_t i, size_t nbits, bool bit, bool forward) {
for(; i < (ssize_t)nbits && i >= 0; i += (forward ? 1 : -1)) {
bool expected_bit = i % 3 == 0 || i % 5 == 0;
if (expected_bit == bit) {
return i;
}
}
return forward ? (ssize_t)nbits : (ssize_t)-1;
}
static void
do_test_search_simple(size_t nbits) {
size_t sz = FB_NGROUPS(nbits) * sizeof(fb_group_t);
fb_group_t *fb = malloc(sz);
fb_init(fb, nbits);
/* We pick multiples of 3 or 5. */
for (size_t i = 0; i < nbits; i++) {
if (i % 3 == 0) {
fb_set(fb, nbits, i);
}
/* This tests double-setting a little, too. */
if (i % 5 == 0) {
fb_set(fb, nbits, i);
}
}
for (size_t i = 0; i < nbits; i++) {
size_t ffs_compute = find_3_5_compute(i, nbits, true, true);
size_t ffs_search = fb_ffs(fb, nbits, i);
expect_zu_eq(ffs_compute, ffs_search, "ffs mismatch at %zu", i);
ssize_t fls_compute = find_3_5_compute(i, nbits, true, false);
size_t fls_search = fb_fls(fb, nbits, i);
expect_zu_eq(fls_compute, fls_search, "fls mismatch at %zu", i);
size_t ffu_compute = find_3_5_compute(i, nbits, false, true);
size_t ffu_search = fb_ffu(fb, nbits, i);
expect_zu_eq(ffu_compute, ffu_search, "ffu mismatch at %zu", i);
size_t flu_compute = find_3_5_compute(i, nbits, false, false);
size_t flu_search = fb_flu(fb, nbits, i);
expect_zu_eq(flu_compute, flu_search, "flu mismatch at %zu", i);
}
free(fb);
}
TEST_BEGIN(test_search_simple) {
#define NB(nbits) \
do_test_search_simple(nbits);
NBITS_TAB
#undef NB
}
TEST_END
static void
expect_exhaustive_results(fb_group_t *mostly_full, fb_group_t *mostly_empty,
size_t nbits, size_t special_bit, size_t position) {
if (position < special_bit) {
expect_zu_eq(special_bit, fb_ffs(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(-1, fb_fls(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position, fb_ffu(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position, fb_flu(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position, fb_ffs(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position, fb_fls(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(special_bit, fb_ffu(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(-1, fb_flu(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
} else if (position == special_bit) {
expect_zu_eq(special_bit, fb_ffs(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(special_bit, fb_fls(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position + 1, fb_ffu(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position - 1, fb_flu(mostly_empty, nbits,
position), "mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position + 1, fb_ffs(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position - 1, fb_fls(mostly_full, nbits,
position), "mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position, fb_ffu(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position, fb_flu(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
} else {
/* position > special_bit. */
expect_zu_eq(nbits, fb_ffs(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(special_bit, fb_fls(mostly_empty, nbits,
position), "mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position, fb_ffu(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position, fb_flu(mostly_empty, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(position, fb_ffs(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(position, fb_fls(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zu_eq(nbits, fb_ffu(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
expect_zd_eq(special_bit, fb_flu(mostly_full, nbits, position),
"mismatch at %zu, %zu", position, special_bit);
}
}
static void
do_test_search_exhaustive(size_t nbits) {
/* This test is quadratic; let's not get too big. */
if (nbits > 1000) {
return;
}
size_t sz = FB_NGROUPS(nbits) * sizeof(fb_group_t);
fb_group_t *empty = malloc(sz);
fb_init(empty, nbits);
fb_group_t *full = malloc(sz);
fb_init(full, nbits);
fb_set_range(full, nbits, 0, nbits);
for (size_t i = 0; i < nbits; i++) {
fb_set(empty, nbits, i);
fb_unset(full, nbits, i);
for (size_t j = 0; j < nbits; j++) {
expect_exhaustive_results(full, empty, nbits, i, j);
}
fb_unset(empty, nbits, i);
fb_set(full, nbits, i);
}
free(empty);
free(full);
}
TEST_BEGIN(test_search_exhaustive) {
#define NB(nbits) \
do_test_search_exhaustive(nbits);
NBITS_TAB
#undef NB
}
TEST_END
TEST_BEGIN(test_range_simple) {
/*
* Just pick a constant big enough to have nontrivial middle sizes, and
* big enough that usages of things like weirdnum (below) near the
* beginning fit comfortably into the beginning of the bitmap.
*/
size_t nbits = 64 * 10;
size_t ngroups = FB_NGROUPS(nbits);
fb_group_t *fb = malloc(sizeof(fb_group_t) * ngroups);
fb_init(fb, nbits);
for (size_t i = 0; i < nbits; i++) {
if (i % 2 == 0) {
fb_set_range(fb, nbits, i, 1);
}
}
for (size_t i = 0; i < nbits; i++) {
expect_b_eq(i % 2 == 0, fb_get(fb, nbits, i),
"mismatch at position %zu", i);
}
fb_set_range(fb, nbits, 0, nbits / 2);
fb_unset_range(fb, nbits, nbits / 2, nbits / 2);
for (size_t i = 0; i < nbits; i++) {
expect_b_eq(i < nbits / 2, fb_get(fb, nbits, i),
"mismatch at position %zu", i);
}
static const size_t weirdnum = 7;
fb_set_range(fb, nbits, 0, nbits);
fb_unset_range(fb, nbits, weirdnum, FB_GROUP_BITS + weirdnum);
for (size_t i = 0; i < nbits; i++) {
expect_b_eq(7 <= i && i <= 2 * weirdnum + FB_GROUP_BITS - 1,
!fb_get(fb, nbits, i), "mismatch at position %zu", i);
}
free(fb);
}
TEST_END
static void
do_test_empty_full_exhaustive(size_t nbits) {
size_t sz = FB_NGROUPS(nbits) * sizeof(fb_group_t);
fb_group_t *empty = malloc(sz);
fb_init(empty, nbits);
fb_group_t *full = malloc(sz);
fb_init(full, nbits);
fb_set_range(full, nbits, 0, nbits);
expect_true(fb_full(full, nbits), "");
expect_false(fb_empty(full, nbits), "");
expect_false(fb_full(empty, nbits), "");
expect_true(fb_empty(empty, nbits), "");
for (size_t i = 0; i < nbits; i++) {
fb_set(empty, nbits, i);
fb_unset(full, nbits, i);
expect_false(fb_empty(empty, nbits), "error at bit %zu", i);
if (nbits != 1) {
expect_false(fb_full(empty, nbits),
"error at bit %zu", i);
expect_false(fb_empty(full, nbits),
"error at bit %zu", i);
} else {
expect_true(fb_full(empty, nbits),
"error at bit %zu", i);
expect_true(fb_empty(full, nbits),
"error at bit %zu", i);
}
expect_false(fb_full(full, nbits), "error at bit %zu", i);
fb_unset(empty, nbits, i);
fb_set(full, nbits, i);
}
free(empty);
free(full);
}
TEST_BEGIN(test_empty_full) {
#define NB(nbits) \
do_test_empty_full_exhaustive(nbits);
NBITS_TAB
#undef NB
}
TEST_END
int
main(void) {
return test_no_reentrancy(
test_fb_init,
test_get_set_unset,
test_search_simple,
test_search_exhaustive,
test_range_simple,
test_empty_full);
}