Simplify arena_slab_regind().
Rewrite arena_slab_regind() to provide sufficient constant data for the compiler to perform division strength reduction. This replaces more general manual strength reduction that was implemented before arena_bin_info was compile-time-constant. It would be possible to slightly improve on the compiler-generated division code by taking advantage of range limits that the compiler doesn't know about.
This commit is contained in:
35
test/unit/slab.c
Normal file
35
test/unit/slab.c
Normal file
@@ -0,0 +1,35 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
TEST_BEGIN(test_arena_slab_regind)
|
||||
{
|
||||
szind_t binind;
|
||||
|
||||
for (binind = 0; binind < NBINS; binind++) {
|
||||
size_t regind;
|
||||
extent_t slab;
|
||||
const arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||
extent_init(&slab, NULL, mallocx(bin_info->slab_size,
|
||||
MALLOCX_LG_ALIGN(LG_PAGE)), bin_info->slab_size, 0, 0, true,
|
||||
false, true, true);
|
||||
assert_ptr_not_null(extent_addr_get(&slab),
|
||||
"Unexpected malloc() failure");
|
||||
for (regind = 0; regind < bin_info->nregs; regind++) {
|
||||
void *reg = (void *)((uintptr_t)extent_addr_get(&slab) +
|
||||
(bin_info->reg_size * regind));
|
||||
assert_zu_eq(arena_slab_regind(&slab, binind, reg),
|
||||
regind,
|
||||
"Incorrect region index computed for size %zu",
|
||||
bin_info->reg_size);
|
||||
}
|
||||
free(extent_addr_get(&slab));
|
||||
}
|
||||
}
|
||||
TEST_END
|
||||
|
||||
int
|
||||
main(void)
|
||||
{
|
||||
|
||||
return (test(
|
||||
test_arena_slab_regind));
|
||||
}
|
Reference in New Issue
Block a user