From 9b5d105fc36e719869f3e113d0d2dc16cf24a60c Mon Sep 17 00:00:00 2001 From: David Goldblatt Date: Wed, 5 Feb 2020 14:50:34 -0800 Subject: [PATCH] Emap: Move in iealloc. This is logically scoped to the emap. --- include/jemalloc/internal/arena_inlines_b.h | 22 +++++++++++-------- include/jemalloc/internal/emap.h | 9 ++++++++ .../internal/jemalloc_internal_inlines_b.h | 11 ---------- src/arena.c | 8 +++---- src/ctl.c | 2 +- src/ehooks.c | 8 +++---- src/inspect.c | 4 ++-- src/jemalloc.c | 4 ++-- src/large.c | 2 +- src/prof.c | 2 +- src/tcache.c | 6 +++-- test/unit/binshard.c | 4 ++-- test/unit/prof_recent.c | 2 +- 13 files changed, 44 insertions(+), 40 deletions(-) diff --git a/include/jemalloc/internal/arena_inlines_b.h b/include/jemalloc/internal/arena_inlines_b.h index b39578c9..79478136 100644 --- a/include/jemalloc/internal/arena_inlines_b.h +++ b/include/jemalloc/internal/arena_inlines_b.h @@ -1,6 +1,7 @@ #ifndef JEMALLOC_INTERNAL_ARENA_INLINES_B_H #define JEMALLOC_INTERNAL_ARENA_INLINES_B_H +#include "jemalloc/internal/emap.h" #include "jemalloc/internal/jemalloc_internal_types.h" #include "jemalloc/internal/mutex.h" #include "jemalloc/internal/rtree.h" @@ -47,10 +48,10 @@ arena_prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx, /* Static check. */ if (alloc_ctx == NULL) { - edata = iealloc(tsd_tsdn(tsd), ptr); + edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); is_slab = edata_slab_get(edata); } else if (unlikely(!(is_slab = alloc_ctx->slab))) { - edata = iealloc(tsd_tsdn(tsd), ptr); + edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); } if (unlikely(!is_slab)) { @@ -73,13 +74,15 @@ arena_prof_tctx_reset(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx) { /* Static check. */ if (alloc_ctx == NULL) { - edata_t *edata = iealloc(tsd_tsdn(tsd), ptr); + edata_t *edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); if (unlikely(!edata_slab_get(edata))) { large_prof_tctx_reset(edata); } } else { if (unlikely(!alloc_ctx->slab)) { - large_prof_tctx_reset(iealloc(tsd_tsdn(tsd), ptr)); + edata_t *edata = emap_lookup(tsd_tsdn(tsd), + &emap_global, ptr); + large_prof_tctx_reset(edata); } } } @@ -89,7 +92,7 @@ arena_prof_tctx_reset_sampled(tsd_t *tsd, const void *ptr) { cassert(config_prof); assert(ptr != NULL); - edata_t *edata = iealloc(tsd_tsdn(tsd), ptr); + edata_t *edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); assert(!edata_slab_get(edata)); large_prof_tctx_reset(edata); @@ -177,8 +180,9 @@ arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero, JEMALLOC_ALWAYS_INLINE arena_t * arena_aalloc(tsdn_t *tsdn, const void *ptr) { - return (arena_t *)atomic_load_p(&arenas[edata_arena_ind_get( - iealloc(tsdn, ptr))], ATOMIC_RELAXED); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); + unsigned arena_ind = edata_arena_ind_get(edata); + return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_RELAXED); } JEMALLOC_ALWAYS_INLINE size_t @@ -233,7 +237,7 @@ arena_dalloc_large_no_tcache(tsdn_t *tsdn, void *ptr, szind_t szind) { if (config_prof && unlikely(szind < SC_NBINS)) { arena_dalloc_promoted(tsdn, ptr, NULL, true); } else { - edata_t *edata = iealloc(tsdn, ptr); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); large_dalloc(tsdn, edata); } } @@ -277,7 +281,7 @@ arena_dalloc_large(tsdn_t *tsdn, void *ptr, tcache_t *tcache, szind_t szind, slow_path); } } else { - edata_t *edata = iealloc(tsdn, ptr); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); large_dalloc(tsdn, edata); } } diff --git a/include/jemalloc/internal/emap.h b/include/jemalloc/internal/emap.h index 70163942..a6aadbc5 100644 --- a/include/jemalloc/internal/emap.h +++ b/include/jemalloc/internal/emap.h @@ -112,4 +112,13 @@ emap_assert_mapped(tsdn_t *tsdn, emap_t *emap, edata_t *edata) { } } +JEMALLOC_ALWAYS_INLINE edata_t * +emap_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr) { + rtree_ctx_t rtree_ctx_fallback; + rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); + + return rtree_edata_read(tsdn, &emap->rtree, rtree_ctx, (uintptr_t)ptr, + true); +} + #endif /* JEMALLOC_INTERNAL_EMAP_H */ diff --git a/include/jemalloc/internal/jemalloc_internal_inlines_b.h b/include/jemalloc/internal/jemalloc_internal_inlines_b.h index 00fb6042..fc526c4b 100644 --- a/include/jemalloc/internal/jemalloc_internal_inlines_b.h +++ b/include/jemalloc/internal/jemalloc_internal_inlines_b.h @@ -1,9 +1,7 @@ #ifndef JEMALLOC_INTERNAL_INLINES_B_H #define JEMALLOC_INTERNAL_INLINES_B_H -#include "jemalloc/internal/emap.h" #include "jemalloc/internal/extent.h" -#include "jemalloc/internal/rtree.h" /* Choose an arena based on a per-thread value. */ static inline arena_t * @@ -77,13 +75,4 @@ arena_is_auto(arena_t *arena) { return (arena_ind_get(arena) < manual_arena_base); } -JEMALLOC_ALWAYS_INLINE edata_t * -iealloc(tsdn_t *tsdn, const void *ptr) { - rtree_ctx_t rtree_ctx_fallback; - rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback); - - return rtree_edata_read(tsdn, &emap_global.rtree, rtree_ctx, - (uintptr_t)ptr, true); -} - #endif /* JEMALLOC_INTERNAL_INLINES_B_H */ diff --git a/src/arena.c b/src/arena.c index 3206a9a6..f7f3ee5c 100644 --- a/src/arena.c +++ b/src/arena.c @@ -1637,7 +1637,7 @@ arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache, cassert(config_prof); assert(opt_prof); - edata_t *edata = iealloc(tsdn, ptr); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); size_t usize = edata_usize_get(edata); size_t bumped_usize = arena_prof_demote(tsdn, edata, ptr); if (config_opt_safety_checks && usize < SC_LARGE_MINCLASS) { @@ -1769,7 +1769,7 @@ arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, edata_t *edata, void *ptr) { void arena_dalloc_small(tsdn_t *tsdn, void *ptr) { - edata_t *edata = iealloc(tsdn, ptr); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); arena_t *arena = arena_get_from_edata(edata); arena_dalloc_bin(tsdn, arena, edata, ptr); @@ -1783,7 +1783,7 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, /* Calls with non-zero extra had to clamp extra. */ assert(extra == 0 || size + extra <= SC_LARGE_MAXCLASS); - edata_t *edata = iealloc(tsdn, ptr); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); if (unlikely(size > SC_LARGE_MAXCLASS)) { ret = true; goto done; @@ -1817,7 +1817,7 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, ret = true; } done: - assert(edata == iealloc(tsdn, ptr)); + assert(edata == emap_lookup(tsdn, &emap_global, ptr)); *newsize = edata_usize_get(edata); return ret; diff --git a/src/ctl.c b/src/ctl.c index 29909dfb..3f30ef0c 100644 --- a/src/ctl.c +++ b/src/ctl.c @@ -2667,7 +2667,7 @@ arenas_lookup_ctl(tsd_t *tsd, const size_t *mib, ret = EINVAL; malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); WRITE(ptr, void *); - edata = iealloc(tsd_tsdn(tsd), ptr); + edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); if (edata == NULL) goto label_return; diff --git a/src/ehooks.c b/src/ehooks.c index 5ea73e3e..13d9ab0c 100644 --- a/src/ehooks.c +++ b/src/ehooks.c @@ -189,8 +189,8 @@ ehooks_default_split(extent_hooks_t *extent_hooks, void *addr, size_t size, static inline bool ehooks_same_sn(tsdn_t *tsdn, void *addr_a, void *addr_b) { - edata_t *a = iealloc(tsdn, addr_a); - edata_t *b = iealloc(tsdn, addr_b); + edata_t *a = emap_lookup(tsdn, &emap_global, addr_a); + edata_t *b = emap_lookup(tsdn, &emap_global, addr_b); return edata_sn_comp(a, b) == 0; } @@ -253,9 +253,9 @@ bool ehooks_default_merge(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a, void *addr_b, size_t size_b, bool committed, unsigned arena_ind) { tsdn_t *tsdn = tsdn_fetch(); - edata_t *a = iealloc(tsdn, addr_a); + edata_t *a = emap_lookup(tsdn, &emap_global, addr_a); bool head_a = edata_is_head_get(a); - edata_t *b = iealloc(tsdn, addr_b); + edata_t *b = emap_lookup(tsdn, &emap_global, addr_b); bool head_b = edata_is_head_get(b); return ehooks_default_merge_impl(tsdn, addr_a, head_a, addr_b, head_b); } diff --git a/src/inspect.c b/src/inspect.c index 5ad23a0e..1be3429a 100644 --- a/src/inspect.c +++ b/src/inspect.c @@ -6,7 +6,7 @@ inspect_extent_util_stats_get(tsdn_t *tsdn, const void *ptr, size_t *nfree, size_t *nregs, size_t *size) { assert(ptr != NULL && nfree != NULL && nregs != NULL && size != NULL); - const edata_t *edata = iealloc(tsdn, ptr); + const edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); if (unlikely(edata == NULL)) { *nfree = *nregs = *size = 0; return; @@ -31,7 +31,7 @@ inspect_extent_util_stats_verbose_get(tsdn_t *tsdn, const void *ptr, assert(ptr != NULL && nfree != NULL && nregs != NULL && size != NULL && bin_nfree != NULL && bin_nregs != NULL && slabcur_addr != NULL); - const edata_t *edata = iealloc(tsdn, ptr); + const edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); if (unlikely(edata == NULL)) { *nfree = *nregs = *size = *bin_nfree = *bin_nregs = 0; *slabcur_addr = NULL; diff --git a/src/jemalloc.c b/src/jemalloc.c index 8f34989a..2b4cd277 100644 --- a/src/jemalloc.c +++ b/src/jemalloc.c @@ -3423,7 +3423,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) { * object associated with the ptr (though the content of the edata_t * object can be changed). */ - edata_t *old_edata = iealloc(tsd_tsdn(tsd), ptr); + edata_t *old_edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); alloc_ctx_t alloc_ctx; rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd); @@ -3462,7 +3462,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) { * xallocx() should keep using the same edata_t object (though its * content can be changed). */ - assert(iealloc(tsd_tsdn(tsd), ptr) == old_edata); + assert(emap_lookup(tsd_tsdn(tsd), &emap_global, ptr) == old_edata); if (unlikely(usize == old_usize)) { te_alloc_rollback(tsd, usize); diff --git a/src/large.c b/src/large.c index 2e520981..d393c43c 100644 --- a/src/large.c +++ b/src/large.c @@ -272,7 +272,7 @@ void * large_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t usize, size_t alignment, bool zero, tcache_t *tcache, hook_ralloc_args_t *hook_args) { - edata_t *edata = iealloc(tsdn, ptr); + edata_t *edata = emap_lookup(tsdn, &emap_global, ptr); size_t oldusize = edata_usize_get(edata); /* The following should have been caught by callers. */ diff --git a/src/prof.c b/src/prof.c index 248532e8..7b57dd26 100644 --- a/src/prof.c +++ b/src/prof.c @@ -148,7 +148,7 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) { void prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t size, size_t usize, prof_tctx_t *tctx) { - edata_t *edata = iealloc(tsd_tsdn(tsd), ptr); + edata_t *edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr); prof_info_set(tsd, edata, tctx); malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); diff --git a/src/tcache.c b/src/tcache.c index 9146f244..e9331d03 100644 --- a/src/tcache.c +++ b/src/tcache.c @@ -160,7 +160,8 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, item_edata); } else { for (unsigned i = 0 ; i < nflush; i++) { - item_edata[i] = iealloc(tsdn, *(bottom_item - i)); + item_edata[i] = emap_lookup(tsd_tsdn(tsd), &emap_global, + *(bottom_item - i)); } } @@ -258,7 +259,8 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, szind_t #ifndef JEMALLOC_EXTRA_SIZE_CHECK /* Look up edata once per item. */ for (unsigned i = 0 ; i < nflush; i++) { - item_edata[i] = iealloc(tsd_tsdn(tsd), *(bottom_item - i)); + item_edata[i] = emap_lookup(tsd_tsdn(tsd), &emap_global, + *(bottom_item - i)); } #else tbin_extents_lookup_size_check(tsd_tsdn(tsd), tbin, binind, nflush, diff --git a/test/unit/binshard.c b/test/unit/binshard.c index d9a0d599..d5f43df1 100644 --- a/test/unit/binshard.c +++ b/test/unit/binshard.c @@ -62,12 +62,12 @@ thd_start(void *varg) { ptr = mallocx(1, MALLOCX_TCACHE_NONE); ptr2 = mallocx(129, MALLOCX_TCACHE_NONE); - edata = iealloc(tsdn, ptr); + edata = emap_lookup(tsdn, &emap_global, ptr); shard1 = edata_binshard_get(edata); dallocx(ptr, 0); assert_u_lt(shard1, 16, "Unexpected bin shard used"); - edata = iealloc(tsdn, ptr2); + edata = emap_lookup(tsdn, &emap_global, ptr2); shard2 = edata_binshard_get(edata); dallocx(ptr2, 0); assert_u_lt(shard2, 4, "Unexpected bin shard used"); diff --git a/test/unit/prof_recent.c b/test/unit/prof_recent.c index 3c10618f..a8761ca9 100644 --- a/test/unit/prof_recent.c +++ b/test/unit/prof_recent.c @@ -101,7 +101,7 @@ TEST_END static void confirm_malloc(tsd_t *tsd, void *p) { assert_ptr_not_null(p, "malloc failed unexpectedly"); - edata_t *e = iealloc(TSDN_NULL, p); + edata_t *e = emap_lookup(TSDN_NULL, &emap_global, p); assert_ptr_not_null(e, "NULL edata for living pointer"); malloc_mutex_lock(tsd_tsdn(tsd), &prof_recent_alloc_mtx); prof_recent_t *n = edata_prof_recent_alloc_get(tsd, e);