Extents: Break extent-struct/arena interactions

Specifically, the extent_arena_[g|s]et functions and the address randomization.

These are the only things that tie the extent struct itself to the arena code.
This commit is contained in:
David T. Goldblatt 2019-09-20 18:20:22 -07:00 committed by David Goldblatt
parent 529cfe2abc
commit 41187bdfb0
9 changed files with 106 additions and 83 deletions

View File

@ -178,7 +178,8 @@ arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
JEMALLOC_ALWAYS_INLINE arena_t *
arena_aalloc(tsdn_t *tsdn, const void *ptr) {
return extent_arena_get(iealloc(tsdn, ptr));
return (arena_t *)atomic_load_p(&arenas[extent_arena_ind_get(
iealloc(tsdn, ptr))], ATOMIC_RELAXED);
}
JEMALLOC_ALWAYS_INLINE size_t

View File

@ -44,13 +44,6 @@ extent_arena_ind_get(const extent_t *extent) {
return arena_ind;
}
static inline arena_t *
extent_arena_get(const extent_t *extent) {
unsigned arena_ind = extent_arena_ind_get(extent);
return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_ACQUIRE);
}
static inline szind_t
extent_szind_get_maybe_invalid(const extent_t *extent) {
szind_t szind = (szind_t)((extent->e_bits & EXTENT_BITS_SZIND_MASK) >>
@ -192,9 +185,7 @@ extent_prof_alloc_time_get(const extent_t *extent) {
}
static inline void
extent_arena_set(extent_t *extent, arena_t *arena) {
unsigned arena_ind = (arena != NULL) ? arena_ind_get(arena) : ((1U <<
MALLOCX_ARENA_BITS) - 1);
extent_arena_ind_set(extent_t *extent, unsigned arena_ind) {
extent->e_bits = (extent->e_bits & ~EXTENT_BITS_ARENA_MASK) |
((uint64_t)arena_ind << EXTENT_BITS_ARENA_SHIFT);
}
@ -212,32 +203,6 @@ extent_addr_set(extent_t *extent, void *addr) {
extent->e_addr = addr;
}
static inline void
extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment) {
assert(extent_base_get(extent) == extent_addr_get(extent));
if (alignment < PAGE) {
unsigned lg_range = LG_PAGE -
lg_floor(CACHELINE_CEILING(alignment));
size_t r;
if (!tsdn_null(tsdn)) {
tsd_t *tsd = tsdn_tsd(tsdn);
r = (size_t)prng_lg_range_u64(
tsd_offset_statep_get(tsd), lg_range);
} else {
r = prng_lg_range_zu(
&extent_arena_get(extent)->offset_state,
lg_range, true);
}
uintptr_t random_offset = ((uintptr_t)r) << (LG_PAGE -
lg_range);
extent->e_addr = (void *)((uintptr_t)extent->e_addr +
random_offset);
assert(ALIGNMENT_ADDR2BASE(extent->e_addr, alignment) ==
extent->e_addr);
}
}
static inline void
extent_size_set(extent_t *extent, size_t size) {
assert((size & ~EXTENT_SIZE_MASK) == 0);
@ -364,12 +329,12 @@ extent_is_head_set(extent_t *extent, bool is_head) {
}
static inline void
extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
extent_init(extent_t *extent, unsigned arena_ind, void *addr, size_t size,
bool slab, szind_t szind, size_t sn, extent_state_t state, bool zeroed,
bool committed, bool dumpable, extent_head_state_t is_head) {
assert(addr == PAGE_ADDR2BASE(addr) || !slab);
extent_arena_set(extent, arena);
extent_arena_ind_set(extent, arena_ind);
extent_addr_set(extent, addr);
extent_size_set(extent, size);
extent_slab_set(extent, slab);
@ -391,7 +356,7 @@ extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
static inline void
extent_binit(extent_t *extent, void *addr, size_t bsize, size_t sn) {
extent_arena_set(extent, NULL);
extent_arena_ind_set(extent, (1U << MALLOCX_ARENA_BITS) - 1);
extent_addr_set(extent, addr);
extent_bsize_set(extent, bsize);
extent_slab_set(extent, false);

View File

@ -1566,7 +1566,8 @@ arena_prof_promote(tsdn_t *tsdn, void *ptr, size_t usize) {
extent_t *extent = rtree_extent_read(tsdn, &extents_rtree, rtree_ctx,
(uintptr_t)ptr, true);
arena_t *arena = extent_arena_get(extent);
arena_t *arena = atomic_load_p(&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED);
szind_t szind = sz_size2index(usize);
extent_szind_set(extent, szind);
@ -1731,7 +1732,8 @@ arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr) {
void
arena_dalloc_small(tsdn_t *tsdn, void *ptr) {
extent_t *extent = iealloc(tsdn, ptr);
arena_t *arena = extent_arena_get(extent);
arena_t *arena = atomic_load_p(&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED);
arena_dalloc_bin(tsdn, arena, extent, ptr);
arena_decay_tick(tsdn, arena);
@ -1767,7 +1769,9 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
goto done;
}
arena_decay_tick(tsdn, extent_arena_get(extent));
arena_t *arena = atomic_load_p(
&arenas[extent_arena_ind_get(extent)], ATOMIC_RELAXED);
arena_decay_tick(tsdn, arena);
ret = false;
} else if (oldsize >= SC_LARGE_MINCLASS
&& usize_max >= SC_LARGE_MINCLASS) {

View File

@ -2612,7 +2612,8 @@ arenas_lookup_ctl(tsd_t *tsd, const size_t *mib,
if (extent == NULL)
goto label_return;
arena = extent_arena_get(extent);
arena = atomic_load_p(&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED);
if (arena == NULL)
goto label_return;

View File

@ -176,6 +176,32 @@ extent_lock_from_addr(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx, void *addr,
return ret;
}
static void
extent_addr_randomize(tsdn_t *tsdn, arena_t *arena, extent_t *extent,
size_t alignment) {
assert(extent_base_get(extent) == extent_addr_get(extent));
if (alignment < PAGE) {
unsigned lg_range = LG_PAGE -
lg_floor(CACHELINE_CEILING(alignment));
size_t r;
if (!tsdn_null(tsdn)) {
tsd_t *tsd = tsdn_tsd(tsdn);
r = (size_t)prng_lg_range_u64(
tsd_offset_statep_get(tsd), lg_range);
} else {
r = prng_lg_range_zu(&arena->offset_state, lg_range,
true);
}
uintptr_t random_offset = ((uintptr_t)r) << (LG_PAGE -
lg_range);
extent->e_addr = (void *)((uintptr_t)extent->e_addr +
random_offset);
assert(ALIGNMENT_ADDR2BASE(extent->e_addr, alignment) ==
extent->e_addr);
}
}
extent_t *
extent_alloc(tsdn_t *tsdn, arena_t *arena) {
malloc_mutex_lock(tsdn, &arena->extent_avail_mtx);
@ -671,7 +697,7 @@ extents_postfork_child(tsdn_t *tsdn, extents_t *extents) {
static void
extent_deactivate_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents,
extent_t *extent) {
assert(extent_arena_get(extent) == arena);
assert(extent_arena_ind_get(extent) == arena_ind_get(arena));
assert(extent_state_get(extent) == extent_state_active);
extent_state_set(extent, extents_state_get(extents));
@ -689,7 +715,7 @@ extent_deactivate(tsdn_t *tsdn, arena_t *arena, extents_t *extents,
static void
extent_activate_locked(tsdn_t *tsdn, arena_t *arena, extents_t *extents,
extent_t *extent) {
assert(extent_arena_get(extent) == arena);
assert(extent_arena_ind_get(extent) == arena_ind_get(arena));
assert(extent_state_get(extent) == extents_state_get(extents));
extents_remove_locked(tsdn, extents, extent);
@ -927,7 +953,8 @@ extent_recycle_extract(tsdn_t *tsdn, arena_t *arena,
*/
extent_t *unlock_extent = extent;
assert(extent_base_get(extent) == new_addr);
if (extent_arena_get(extent) != arena ||
if (extent_arena_ind_get(extent)
!= arena_ind_get(arena) ||
extent_size_get(extent) < esize ||
extent_state_get(extent) !=
extents_state_get(extents)) {
@ -1172,7 +1199,7 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
}
if (pad != 0) {
extent_addr_randomize(tsdn, extent, alignment);
extent_addr_randomize(tsdn, arena, extent, alignment);
}
assert(extent_state_get(extent) == extent_state_active);
if (slab) {
@ -1342,8 +1369,8 @@ extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
extent_hook_post_reentrancy(tsdn);
}
extent_init(extent, arena, ptr, alloc_size, false, SC_NSIZES,
arena_extent_sn_next(arena), extent_state_active, zeroed,
extent_init(extent, arena_ind_get(arena), ptr, alloc_size, false,
SC_NSIZES, arena_extent_sn_next(arena), extent_state_active, zeroed,
committed, true, EXTENT_IS_HEAD);
if (ptr == NULL) {
extent_dalloc(tsdn, arena, extent);
@ -1434,7 +1461,7 @@ extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
extent_gdump_add(tsdn, extent);
}
if (pad != 0) {
extent_addr_randomize(tsdn, extent, alignment);
extent_addr_randomize(tsdn, arena, extent, alignment);
}
if (slab) {
rtree_ctx_t rtree_ctx_fallback;
@ -1513,11 +1540,11 @@ extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena,
extent_dalloc(tsdn, arena, extent);
return NULL;
}
extent_init(extent, arena, addr, esize, slab, szind,
extent_init(extent, arena_ind_get(arena), addr, esize, slab, szind,
arena_extent_sn_next(arena), extent_state_active, *zero, *commit,
true, EXTENT_NOT_HEAD);
if (pad != 0) {
extent_addr_randomize(tsdn, extent, alignment);
extent_addr_randomize(tsdn, arena, extent, alignment);
}
if (extent_register(tsdn, extent)) {
extent_dalloc(tsdn, arena, extent);
@ -1559,8 +1586,8 @@ extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena,
static bool
extent_can_coalesce(arena_t *arena, extents_t *extents, const extent_t *inner,
const extent_t *outer) {
assert(extent_arena_get(inner) == arena);
if (extent_arena_get(outer) != arena) {
assert(extent_arena_ind_get(inner) == arena_ind_get(arena));
if (extent_arena_ind_get(outer) != arena_ind_get(arena)) {
return false;
}
@ -2105,11 +2132,11 @@ extent_split_impl(tsdn_t *tsdn, arena_t *arena,
goto label_error_a;
}
extent_init(trail, arena, (void *)((uintptr_t)extent_base_get(extent) +
size_a), size_b, slab_b, szind_b, extent_sn_get(extent),
extent_state_get(extent), extent_zeroed_get(extent),
extent_committed_get(extent), extent_dumpable_get(extent),
EXTENT_NOT_HEAD);
extent_init(trail, arena_ind_get(arena),
(void *)((uintptr_t)extent_base_get(extent) + size_a), size_b,
slab_b, szind_b, extent_sn_get(extent), extent_state_get(extent),
extent_zeroed_get(extent), extent_committed_get(extent),
extent_dumpable_get(extent), EXTENT_NOT_HEAD);
rtree_ctx_t rtree_ctx_fallback;
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
@ -2117,7 +2144,8 @@ extent_split_impl(tsdn_t *tsdn, arena_t *arena,
{
extent_t lead;
extent_init(&lead, arena, extent_addr_get(extent), size_a,
extent_init(&lead, arena_ind_get(arena),
extent_addr_get(extent), size_a,
slab_a, szind_a, extent_sn_get(extent),
extent_state_get(extent), extent_zeroed_get(extent),
extent_committed_get(extent), extent_dumpable_get(extent),
@ -2304,7 +2332,12 @@ extent_merge_impl(tsdn_t *tsdn, arena_t *arena,
extent_unlock2(tsdn, a, b);
extent_dalloc(tsdn, extent_arena_get(b), b);
/*
* If we got here, we merged the extents; so they must be from the same
* arena (i.e. this one).
*/
assert(extent_arena_ind_get(b) == arena_ind_get(arena));
extent_dalloc(tsdn, arena, b);
return false;
}
@ -2384,7 +2417,8 @@ extent_util_stats_verbose_get(tsdn_t *tsdn, const void *ptr,
assert(*nfree <= *nregs);
assert(*nfree * extent_usize_get(extent) <= *size);
const arena_t *arena = extent_arena_get(extent);
const arena_t *arena = (arena_t *)atomic_load_p(
&arenas[extent_arena_ind_get(extent)], ATOMIC_RELAXED);
assert(arena != NULL);
const unsigned binshard = extent_binshard_get(extent);
bin_t *bin = &arena->bins[szind].bin_shards[binshard];

View File

@ -153,9 +153,9 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
size_t gap_size_page = (uintptr_t)ret -
(uintptr_t)gap_addr_page;
if (gap_size_page != 0) {
extent_init(gap, arena, gap_addr_page,
gap_size_page, false, SC_NSIZES,
arena_extent_sn_next(arena),
extent_init(gap, arena_ind_get(arena),
gap_addr_page, gap_size_page, false,
SC_NSIZES, arena_extent_sn_next(arena),
extent_state_active, false, true, true,
EXTENT_NOT_HEAD);
}
@ -198,7 +198,8 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
EXTENT_HOOKS_INITIALIZER;
extent_t extent;
extent_init(&extent, arena, ret, size,
extent_init(&extent,
arena_ind_get(arena), ret, size,
size, false, SC_NSIZES,
extent_state_active, false, true,
true, EXTENT_NOT_HEAD);

View File

@ -94,7 +94,8 @@ large_dalloc_maybe_junk_t *JET_MUTABLE large_dalloc_maybe_junk =
static bool
large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize) {
arena_t *arena = extent_arena_get(extent);
arena_t *arena = atomic_load_p(&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED);
size_t oldusize = extent_usize_get(extent);
extent_hooks_t *extent_hooks = extent_hooks_get(arena);
size_t diff = extent_size_get(extent) - (usize + sz_large_pad);
@ -130,7 +131,8 @@ large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize) {
static bool
large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize,
bool zero) {
arena_t *arena = extent_arena_get(extent);
arena_t *arena = atomic_load_p(&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED);
size_t oldusize = extent_usize_get(extent);
extent_hooks_t *extent_hooks = extent_hooks_get(arena);
size_t trailsize = usize - oldusize;
@ -230,14 +232,18 @@ large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min,
/* Attempt to expand the allocation in-place. */
if (!large_ralloc_no_move_expand(tsdn, extent, usize_max,
zero)) {
arena_decay_tick(tsdn, extent_arena_get(extent));
arena_decay_tick(tsdn,
atomic_load_p(&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED));
return false;
}
/* Try again, this time with usize_min. */
if (usize_min < usize_max && usize_min > oldusize &&
large_ralloc_no_move_expand(tsdn, extent, usize_min,
zero)) {
arena_decay_tick(tsdn, extent_arena_get(extent));
arena_decay_tick(tsdn, atomic_load_p(
&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED));
return false;
}
}
@ -247,14 +253,17 @@ large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min,
* the new size.
*/
if (oldusize >= usize_min && oldusize <= usize_max) {
arena_decay_tick(tsdn, extent_arena_get(extent));
arena_decay_tick(tsdn, atomic_load_p(
&arenas[extent_arena_ind_get(extent)], ATOMIC_RELAXED));
return false;
}
/* Attempt to shrink the allocation in-place. */
if (oldusize > usize_max) {
if (!large_ralloc_no_move_shrink(tsdn, extent, usize_max)) {
arena_decay_tick(tsdn, extent_arena_get(extent));
arena_decay_tick(tsdn, atomic_load_p(
&arenas[extent_arena_ind_get(extent)],
ATOMIC_RELAXED));
return false;
}
}
@ -348,17 +357,20 @@ large_dalloc_finish_impl(tsdn_t *tsdn, arena_t *arena, extent_t *extent) {
void
large_dalloc_prep_junked_locked(tsdn_t *tsdn, extent_t *extent) {
large_dalloc_prep_impl(tsdn, extent_arena_get(extent), extent, true);
large_dalloc_prep_impl(tsdn, atomic_load_p(
&arenas[extent_arena_ind_get(extent)], ATOMIC_RELAXED), extent, true);
}
void
large_dalloc_finish(tsdn_t *tsdn, extent_t *extent) {
large_dalloc_finish_impl(tsdn, extent_arena_get(extent), extent);
large_dalloc_finish_impl(tsdn, atomic_load_p(
&arenas[extent_arena_ind_get(extent)], ATOMIC_RELAXED), extent);
}
void
large_dalloc(tsdn_t *tsdn, extent_t *extent) {
arena_t *arena = extent_arena_get(extent);
arena_t *arena = atomic_load_p(
&arenas[extent_arena_ind_get(extent)], ATOMIC_RELAXED);
large_dalloc_prep_impl(tsdn, arena, extent, false);
large_dalloc_finish_impl(tsdn, arena, extent);
arena_decay_tick(tsdn, arena);

View File

@ -2,6 +2,8 @@
#include "jemalloc/internal/rtree.h"
#define INVALID_ARENA_IND ((1U << MALLOCX_ARENA_BITS) - 1)
rtree_node_alloc_t *rtree_node_alloc_orig;
rtree_node_dalloc_t *rtree_node_dalloc_orig;
rtree_leaf_alloc_t *rtree_leaf_alloc_orig;
@ -85,10 +87,10 @@ TEST_END
TEST_BEGIN(test_rtree_extrema) {
extent_t extent_a, extent_b;
extent_init(&extent_a, NULL, NULL, SC_LARGE_MINCLASS, false,
sz_size2index(SC_LARGE_MINCLASS), 0,
extent_init(&extent_a, INVALID_ARENA_IND, NULL, SC_LARGE_MINCLASS,
false, sz_size2index(SC_LARGE_MINCLASS), 0,
extent_state_active, false, false, true, EXTENT_NOT_HEAD);
extent_init(&extent_b, NULL, NULL, 0, false, SC_NSIZES, 0,
extent_init(&extent_b, INVALID_ARENA_IND, NULL, 0, false, SC_NSIZES, 0,
extent_state_active, false, false, true, EXTENT_NOT_HEAD);
tsdn_t *tsdn = tsdn_fetch();
@ -125,7 +127,7 @@ TEST_BEGIN(test_rtree_bits) {
PAGE + (((uintptr_t)1) << LG_PAGE) - 1};
extent_t extent;
extent_init(&extent, NULL, NULL, 0, false, SC_NSIZES, 0,
extent_init(&extent, INVALID_ARENA_IND, NULL, 0, false, SC_NSIZES, 0,
extent_state_active, false, false, true, EXTENT_NOT_HEAD);
rtree_t *rtree = &test_rtree;
@ -166,7 +168,7 @@ TEST_BEGIN(test_rtree_random) {
rtree_ctx_data_init(&rtree_ctx);
extent_t extent;
extent_init(&extent, NULL, NULL, 0, false, SC_NSIZES, 0,
extent_init(&extent, INVALID_ARENA_IND, NULL, 0, false, SC_NSIZES, 0,
extent_state_active, false, false, true, EXTENT_NOT_HEAD);
assert_false(rtree_new(rtree, false), "Unexpected rtree_new() failure");

View File

@ -1,5 +1,7 @@
#include "test/jemalloc_test.h"
#define INVALID_ARENA_IND ((1U << MALLOCX_ARENA_BITS) - 1)
TEST_BEGIN(test_arena_slab_regind) {
szind_t binind;
@ -7,8 +9,9 @@ TEST_BEGIN(test_arena_slab_regind) {
size_t regind;
extent_t slab;
const bin_info_t *bin_info = &bin_infos[binind];
extent_init(&slab, NULL, mallocx(bin_info->slab_size,
MALLOCX_LG_ALIGN(LG_PAGE)), bin_info->slab_size, true,
extent_init(&slab, INVALID_ARENA_IND,
mallocx(bin_info->slab_size, MALLOCX_LG_ALIGN(LG_PAGE)),
bin_info->slab_size, true,
binind, 0, extent_state_active, false, true, true,
EXTENT_NOT_HEAD);
assert_ptr_not_null(extent_addr_get(&slab),