Emap: Standardize naming.
Namespace everything under emap_, always specify what it is we're looking up (emap_lookup -> emap_edata_lookup), and use "ctx" over "info".
This commit is contained in:
parent
ac50c1e44b
commit
7e6c8a7286
@ -37,7 +37,7 @@ arena_choose_maybe_huge(tsd_t *tsd, arena_t *arena, size_t size) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
|
||||
arena_prof_info_get(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx,
|
||||
prof_info_t *prof_info, bool reset_recent) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
@ -48,10 +48,10 @@ arena_prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
|
||||
|
||||
/* Static check. */
|
||||
if (alloc_ctx == NULL) {
|
||||
edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
is_slab = edata_slab_get(edata);
|
||||
} else if (unlikely(!(is_slab = alloc_ctx->slab))) {
|
||||
edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
}
|
||||
|
||||
if (unlikely(!is_slab)) {
|
||||
@ -68,19 +68,21 @@ arena_prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_prof_tctx_reset(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx) {
|
||||
arena_prof_tctx_reset(tsd_t *tsd, const void *ptr,
|
||||
emap_alloc_ctx_t *alloc_ctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
/* Static check. */
|
||||
if (alloc_ctx == NULL) {
|
||||
edata_t *edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
ptr);
|
||||
if (unlikely(!edata_slab_get(edata))) {
|
||||
large_prof_tctx_reset(edata);
|
||||
}
|
||||
} else {
|
||||
if (unlikely(!alloc_ctx->slab)) {
|
||||
edata_t *edata = emap_lookup(tsd_tsdn(tsd),
|
||||
edata_t *edata = emap_edata_lookup(tsd_tsdn(tsd),
|
||||
&emap_global, ptr);
|
||||
large_prof_tctx_reset(edata);
|
||||
}
|
||||
@ -92,7 +94,7 @@ arena_prof_tctx_reset_sampled(tsd_t *tsd, const void *ptr) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
edata_t *edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
assert(!edata_slab_get(edata));
|
||||
|
||||
large_prof_tctx_reset(edata);
|
||||
@ -180,7 +182,7 @@ arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||
arena_aalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
unsigned arena_ind = edata_arena_ind_get(edata);
|
||||
return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_RELAXED);
|
||||
}
|
||||
@ -188,8 +190,8 @@ arena_aalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
arena_salloc(tsdn_t *tsdn, const void *ptr) {
|
||||
assert(ptr != NULL);
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_info_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
assert(alloc_ctx.szind != SC_NSIZES);
|
||||
|
||||
return sz_index2size(alloc_ctx.szind);
|
||||
@ -207,7 +209,7 @@ arena_vsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
*/
|
||||
|
||||
emap_full_alloc_ctx_t full_alloc_ctx;
|
||||
bool missing = emap_full_alloc_info_try_lookup(tsdn, &emap_global, ptr,
|
||||
bool missing = emap_full_alloc_ctx_try_lookup(tsdn, &emap_global, ptr,
|
||||
&full_alloc_ctx);
|
||||
if (missing) {
|
||||
return 0;
|
||||
@ -231,7 +233,7 @@ arena_dalloc_large_no_tcache(tsdn_t *tsdn, void *ptr, szind_t szind) {
|
||||
if (config_prof && unlikely(szind < SC_NBINS)) {
|
||||
arena_dalloc_promoted(tsdn, ptr, NULL, true);
|
||||
} else {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
large_dalloc(tsdn, edata);
|
||||
}
|
||||
}
|
||||
@ -240,11 +242,11 @@ static inline void
|
||||
arena_dalloc_no_tcache(tsdn_t *tsdn, void *ptr) {
|
||||
assert(ptr != NULL);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_info_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
|
||||
if (config_debug) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
assert(alloc_ctx.szind == edata_szind_get(edata));
|
||||
assert(alloc_ctx.szind < SC_NSIZES);
|
||||
assert(alloc_ctx.slab == edata_slab_get(edata));
|
||||
@ -269,14 +271,14 @@ arena_dalloc_large(tsdn_t *tsdn, void *ptr, tcache_t *tcache, szind_t szind,
|
||||
slow_path);
|
||||
}
|
||||
} else {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
large_dalloc(tsdn, edata);
|
||||
}
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
alloc_ctx_t *caller_alloc_ctx, bool slow_path) {
|
||||
emap_alloc_ctx_t *caller_alloc_ctx, bool slow_path) {
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
|
||||
@ -285,16 +287,16 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
return;
|
||||
}
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (caller_alloc_ctx != NULL) {
|
||||
alloc_ctx = *caller_alloc_ctx;
|
||||
} else {
|
||||
util_assume(!tsdn_null(tsdn));
|
||||
emap_alloc_info_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
}
|
||||
|
||||
if (config_debug) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
assert(alloc_ctx.szind == edata_szind_get(edata));
|
||||
assert(alloc_ctx.szind < SC_NSIZES);
|
||||
assert(alloc_ctx.slab == edata_slab_get(edata));
|
||||
@ -315,7 +317,7 @@ arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) {
|
||||
assert(ptr != NULL);
|
||||
assert(size <= SC_LARGE_MAXCLASS);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (!config_prof || !opt_prof) {
|
||||
/*
|
||||
* There is no risk of being confused by a promoted sampled
|
||||
@ -326,14 +328,15 @@ arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) {
|
||||
}
|
||||
|
||||
if ((config_prof && opt_prof) || config_debug) {
|
||||
emap_alloc_info_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_lookup(tsdn, &emap_global, ptr, &alloc_ctx);
|
||||
|
||||
assert(alloc_ctx.szind == sz_size2index(size));
|
||||
assert((config_prof && opt_prof)
|
||||
|| alloc_ctx.slab == (alloc_ctx.szind < SC_NBINS));
|
||||
|
||||
if (config_debug) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global,
|
||||
ptr);
|
||||
assert(alloc_ctx.szind == edata_szind_get(edata));
|
||||
assert(alloc_ctx.slab == edata_slab_get(edata));
|
||||
}
|
||||
@ -349,7 +352,7 @@ arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
alloc_ctx_t *caller_alloc_ctx, bool slow_path) {
|
||||
emap_alloc_ctx_t *caller_alloc_ctx, bool slow_path) {
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
assert(size <= SC_LARGE_MAXCLASS);
|
||||
@ -359,11 +362,11 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
return;
|
||||
}
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (config_prof && opt_prof) {
|
||||
if (caller_alloc_ctx == NULL) {
|
||||
/* Uncommon case and should be a static check. */
|
||||
emap_alloc_info_lookup(tsdn, &emap_global, ptr,
|
||||
emap_alloc_ctx_lookup(tsdn, &emap_global, ptr,
|
||||
&alloc_ctx);
|
||||
assert(alloc_ctx.szind == sz_size2index(size));
|
||||
} else {
|
||||
@ -379,7 +382,7 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
}
|
||||
|
||||
if (config_debug) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
assert(alloc_ctx.szind == edata_szind_get(edata));
|
||||
assert(alloc_ctx.slab == edata_slab_get(edata));
|
||||
}
|
||||
|
@ -12,8 +12,8 @@ struct emap_s {
|
||||
};
|
||||
|
||||
/* Used to pass rtree lookup context down the path. */
|
||||
typedef struct alloc_ctx_t alloc_ctx_t;
|
||||
struct alloc_ctx_t {
|
||||
typedef struct emap_alloc_ctx_t emap_alloc_ctx_t;
|
||||
struct emap_alloc_ctx_t {
|
||||
szind_t szind;
|
||||
bool slab;
|
||||
};
|
||||
@ -133,7 +133,7 @@ emap_assert_mapped(tsdn_t *tsdn, emap_t *emap, edata_t *edata) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE edata_t *
|
||||
emap_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr) {
|
||||
emap_edata_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
|
||||
@ -143,8 +143,8 @@ emap_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr) {
|
||||
|
||||
/* Fills in alloc_ctx with the info in the map. */
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
emap_alloc_info_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
alloc_ctx_t *alloc_ctx) {
|
||||
emap_alloc_ctx_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
emap_alloc_ctx_t *alloc_ctx) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
|
||||
@ -154,7 +154,7 @@ emap_alloc_info_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
|
||||
/* The pointer must be mapped. */
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
emap_full_alloc_info_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
emap_full_alloc_ctx_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
emap_full_alloc_ctx_t *full_alloc_ctx) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
@ -170,7 +170,7 @@ emap_full_alloc_info_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
* Returns true when the pointer is not present.
|
||||
*/
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
emap_full_alloc_info_try_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
emap_full_alloc_ctx_try_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
emap_full_alloc_ctx_t *full_alloc_ctx) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
@ -187,8 +187,8 @@ emap_full_alloc_info_try_lookup(tsdn_t *tsdn, emap_t *emap, const void *ptr,
|
||||
* Returns whether or not alloc_ctx was filled in.
|
||||
*/
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
emap_alloc_info_try_lookup_fast(tsd_t *tsd, emap_t *emap, const void *ptr,
|
||||
alloc_ctx_t *alloc_ctx) {
|
||||
emap_alloc_ctx_try_lookup_fast(tsd_t *tsd, emap_t *emap, const void *ptr,
|
||||
emap_alloc_ctx_t *alloc_ctx) {
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
bool res = rtree_szind_slab_read_fast(tsd_tsdn(tsd), &emap->rtree,
|
||||
rtree_ctx, (uintptr_t)ptr, &alloc_ctx->szind, &alloc_ctx->slab);
|
||||
|
@ -101,8 +101,8 @@ ivsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx,
|
||||
bool is_internal, bool slow_path) {
|
||||
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
emap_alloc_ctx_t *alloc_ctx, bool is_internal, bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(!is_internal || tcache == NULL);
|
||||
assert(!is_internal || arena_is_auto(iaalloc(tsdn, ptr)));
|
||||
@ -125,7 +125,7 @@ idalloc(tsd_t *tsd, void *ptr) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path) {
|
||||
emap_alloc_ctx_t *alloc_ctx, bool slow_path) {
|
||||
witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn),
|
||||
WITNESS_RANK_CORE, 0);
|
||||
arena_sdalloc(tsdn, ptr, size, tcache, alloc_ctx, slow_path);
|
||||
|
@ -40,7 +40,7 @@ prof_tdata_get(tsd_t *tsd, bool create) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
|
||||
prof_info_get(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx,
|
||||
prof_info_t *prof_info) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
@ -51,7 +51,7 @@ prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_info_get_and_reset_recent(tsd_t *tsd, const void *ptr,
|
||||
alloc_ctx_t *alloc_ctx, prof_info_t *prof_info) {
|
||||
emap_alloc_ctx_t *alloc_ctx, prof_info_t *prof_info) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
assert(prof_info != NULL);
|
||||
@ -60,7 +60,7 @@ prof_info_get_and_reset_recent(tsd_t *tsd, const void *ptr,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_tctx_reset(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx) {
|
||||
prof_tctx_reset(tsd_t *tsd, const void *ptr, emap_alloc_ctx_t *alloc_ctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
@ -127,7 +127,7 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_malloc(tsd_t *tsd, const void *ptr, size_t size, size_t usize,
|
||||
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
|
||||
emap_alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
@ -214,7 +214,8 @@ prof_sample_aligned(const void *ptr) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_free(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx) {
|
||||
prof_free(tsd_t *tsd, const void *ptr, size_t usize,
|
||||
emap_alloc_ctx_t *alloc_ctx) {
|
||||
prof_info_t prof_info;
|
||||
prof_info_get_and_reset_recent(tsd, ptr, alloc_ctx, &prof_info);
|
||||
|
||||
|
14
src/arena.c
14
src/arena.c
@ -1110,8 +1110,8 @@ arena_reset(tsd_t *tsd, arena_t *arena) {
|
||||
size_t usize;
|
||||
|
||||
malloc_mutex_unlock(tsd_tsdn(tsd), &arena->large_mtx);
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_info_lookup(tsd_tsdn(tsd), &emap_global, ptr,
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_lookup(tsd_tsdn(tsd), &emap_global, ptr,
|
||||
&alloc_ctx);
|
||||
assert(alloc_ctx.szind != SC_NSIZES);
|
||||
|
||||
@ -1597,7 +1597,7 @@ arena_prof_promote(tsdn_t *tsdn, void *ptr, size_t usize) {
|
||||
safety_check_set_redzone(ptr, usize, SC_LARGE_MINCLASS);
|
||||
}
|
||||
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
|
||||
szind_t szind = sz_size2index(usize);
|
||||
emap_remap(tsdn, &emap_global, edata, szind, false);
|
||||
@ -1625,7 +1625,7 @@ arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
cassert(config_prof);
|
||||
assert(opt_prof);
|
||||
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
size_t usize = edata_usize_get(edata);
|
||||
size_t bumped_usize = arena_prof_demote(tsdn, edata, ptr);
|
||||
if (config_opt_safety_checks && usize < SC_LARGE_MINCLASS) {
|
||||
@ -1757,7 +1757,7 @@ arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, edata_t *edata, void *ptr) {
|
||||
|
||||
void
|
||||
arena_dalloc_small(tsdn_t *tsdn, void *ptr) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
arena_t *arena = arena_get_from_edata(edata);
|
||||
|
||||
arena_dalloc_bin(tsdn, arena, edata, ptr);
|
||||
@ -1771,7 +1771,7 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
/* Calls with non-zero extra had to clamp extra. */
|
||||
assert(extra == 0 || size + extra <= SC_LARGE_MAXCLASS);
|
||||
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
if (unlikely(size > SC_LARGE_MAXCLASS)) {
|
||||
ret = true;
|
||||
goto done;
|
||||
@ -1805,7 +1805,7 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
ret = true;
|
||||
}
|
||||
done:
|
||||
assert(edata == emap_lookup(tsdn, &emap_global, ptr));
|
||||
assert(edata == emap_edata_lookup(tsdn, &emap_global, ptr));
|
||||
*newsize = edata_usize_get(edata);
|
||||
|
||||
return ret;
|
||||
|
@ -2667,7 +2667,7 @@ arenas_lookup_ctl(tsd_t *tsd, const size_t *mib,
|
||||
ret = EINVAL;
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
|
||||
WRITE(ptr, void *);
|
||||
edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
if (edata == NULL)
|
||||
goto label_return;
|
||||
|
||||
|
@ -189,8 +189,8 @@ ehooks_default_split(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||
|
||||
static inline bool
|
||||
ehooks_same_sn(tsdn_t *tsdn, void *addr_a, void *addr_b) {
|
||||
edata_t *a = emap_lookup(tsdn, &emap_global, addr_a);
|
||||
edata_t *b = emap_lookup(tsdn, &emap_global, addr_b);
|
||||
edata_t *a = emap_edata_lookup(tsdn, &emap_global, addr_a);
|
||||
edata_t *b = emap_edata_lookup(tsdn, &emap_global, addr_b);
|
||||
return edata_sn_comp(a, b) == 0;
|
||||
}
|
||||
|
||||
@ -253,9 +253,9 @@ bool
|
||||
ehooks_default_merge(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
||||
void *addr_b, size_t size_b, bool committed, unsigned arena_ind) {
|
||||
tsdn_t *tsdn = tsdn_fetch();
|
||||
edata_t *a = emap_lookup(tsdn, &emap_global, addr_a);
|
||||
edata_t *a = emap_edata_lookup(tsdn, &emap_global, addr_a);
|
||||
bool head_a = edata_is_head_get(a);
|
||||
edata_t *b = emap_lookup(tsdn, &emap_global, addr_b);
|
||||
edata_t *b = emap_edata_lookup(tsdn, &emap_global, addr_b);
|
||||
bool head_b = edata_is_head_get(b);
|
||||
return ehooks_default_merge_impl(tsdn, addr_a, head_a, addr_b, head_b);
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ inspect_extent_util_stats_get(tsdn_t *tsdn, const void *ptr, size_t *nfree,
|
||||
size_t *nregs, size_t *size) {
|
||||
assert(ptr != NULL && nfree != NULL && nregs != NULL && size != NULL);
|
||||
|
||||
const edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
const edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
if (unlikely(edata == NULL)) {
|
||||
*nfree = *nregs = *size = 0;
|
||||
return;
|
||||
@ -31,7 +31,7 @@ inspect_extent_util_stats_verbose_get(tsdn_t *tsdn, const void *ptr,
|
||||
assert(ptr != NULL && nfree != NULL && nregs != NULL && size != NULL
|
||||
&& bin_nfree != NULL && bin_nregs != NULL && slabcur_addr != NULL);
|
||||
|
||||
const edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
const edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
if (unlikely(edata == NULL)) {
|
||||
*nfree = *nregs = *size = *bin_nfree = *bin_nregs = 0;
|
||||
*slabcur_addr = NULL;
|
||||
|
@ -2170,7 +2170,7 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd) {
|
||||
prof_tctx_t *tctx = prof_alloc_prep(
|
||||
tsd, usize, prof_active_get_unlocked(), true);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (likely((uintptr_t)tctx == (uintptr_t)1U)) {
|
||||
alloc_ctx.slab = (usize <= SC_SMALL_MAXCLASS);
|
||||
allocation = imalloc_no_sample(
|
||||
@ -2567,8 +2567,8 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_info_lookup(tsd_tsdn(tsd), &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_lookup(tsd_tsdn(tsd), &emap_global, ptr, &alloc_ctx);
|
||||
assert(alloc_ctx.szind != SC_NSIZES);
|
||||
|
||||
size_t usize = sz_index2size(alloc_ctx.szind);
|
||||
@ -2599,7 +2599,7 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (!config_prof) {
|
||||
alloc_ctx.szind = sz_size2index(usize);
|
||||
alloc_ctx.slab = (alloc_ctx.szind < SC_NBINS);
|
||||
@ -2617,14 +2617,14 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
|
||||
alloc_ctx.slab = true;
|
||||
}
|
||||
if (config_debug) {
|
||||
alloc_ctx_t dbg_ctx;
|
||||
emap_alloc_info_lookup(tsd_tsdn(tsd),
|
||||
emap_alloc_ctx_t dbg_ctx;
|
||||
emap_alloc_ctx_lookup(tsd_tsdn(tsd),
|
||||
&emap_global, ptr, &dbg_ctx);
|
||||
assert(dbg_ctx.szind == alloc_ctx.szind);
|
||||
assert(dbg_ctx.slab == alloc_ctx.slab);
|
||||
}
|
||||
} else if (opt_prof) {
|
||||
emap_alloc_info_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
emap_alloc_ctx_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
ptr, &alloc_ctx);
|
||||
|
||||
if (config_opt_safety_checks) {
|
||||
@ -2693,12 +2693,12 @@ JEMALLOC_ALWAYS_INLINE
|
||||
bool free_fastpath(void *ptr, size_t size, bool size_hint) {
|
||||
tsd_t *tsd = tsd_get(false);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
if (!size_hint) {
|
||||
if (unlikely(tsd == NULL || !tsd_fast(tsd))) {
|
||||
return false;
|
||||
}
|
||||
bool res = emap_alloc_info_try_lookup_fast(tsd, &emap_global,
|
||||
bool res = emap_alloc_ctx_try_lookup_fast(tsd, &emap_global,
|
||||
ptr, &alloc_ctx);
|
||||
|
||||
/* Note: profiled objects will have alloc_ctx.slab set */
|
||||
@ -3069,7 +3069,8 @@ irallocx_prof_sample(tsdn_t *tsdn, void *old_ptr, size_t old_usize,
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
|
||||
size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
|
||||
arena_t *arena, alloc_ctx_t *alloc_ctx, hook_ralloc_args_t *hook_args) {
|
||||
arena_t *arena, emap_alloc_ctx_t *alloc_ctx,
|
||||
hook_ralloc_args_t *hook_args) {
|
||||
prof_info_t old_prof_info;
|
||||
prof_info_get_and_reset_recent(tsd, old_ptr, alloc_ctx, &old_prof_info);
|
||||
bool prof_active = prof_active_get_unlocked();
|
||||
@ -3141,8 +3142,8 @@ do_rallocx(void *ptr, size_t size, int flags, bool is_realloc) {
|
||||
tcache = tcache_get(tsd);
|
||||
}
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_info_lookup(tsd_tsdn(tsd), &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_lookup(tsd_tsdn(tsd), &emap_global, ptr, &alloc_ctx);
|
||||
assert(alloc_ctx.szind != SC_NSIZES);
|
||||
old_usize = sz_index2size(alloc_ctx.szind);
|
||||
assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
@ -3315,7 +3316,7 @@ ixallocx_prof_sample(tsdn_t *tsdn, void *ptr, size_t old_usize, size_t size,
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, alloc_ctx_t *alloc_ctx) {
|
||||
size_t extra, size_t alignment, bool zero, emap_alloc_ctx_t *alloc_ctx) {
|
||||
/*
|
||||
* old_prof_info is only used for asserting that the profiling info
|
||||
* isn't changed by the ixalloc() call.
|
||||
@ -3416,10 +3417,11 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
|
||||
* object associated with the ptr (though the content of the edata_t
|
||||
* object can be changed).
|
||||
*/
|
||||
edata_t *old_edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata_t *old_edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
ptr);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_info_lookup(tsd_tsdn(tsd), &emap_global, ptr, &alloc_ctx);
|
||||
emap_alloc_ctx_t alloc_ctx;
|
||||
emap_alloc_ctx_lookup(tsd_tsdn(tsd), &emap_global, ptr, &alloc_ctx);
|
||||
assert(alloc_ctx.szind != SC_NSIZES);
|
||||
old_usize = sz_index2size(alloc_ctx.szind);
|
||||
assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
@ -3453,7 +3455,8 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
|
||||
* xallocx() should keep using the same edata_t object (though its
|
||||
* content can be changed).
|
||||
*/
|
||||
assert(emap_lookup(tsd_tsdn(tsd), &emap_global, ptr) == old_edata);
|
||||
assert(emap_edata_lookup(tsd_tsdn(tsd), &emap_global, ptr)
|
||||
== old_edata);
|
||||
|
||||
if (unlikely(usize == old_usize)) {
|
||||
te_alloc_rollback(tsd, usize);
|
||||
|
@ -269,7 +269,7 @@ void *
|
||||
large_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t usize,
|
||||
size_t alignment, bool zero, tcache_t *tcache,
|
||||
hook_ralloc_args_t *hook_args) {
|
||||
edata_t *edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
|
||||
size_t oldusize = edata_usize_get(edata);
|
||||
/* The following should have been caught by callers. */
|
||||
|
@ -148,7 +148,7 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) {
|
||||
void
|
||||
prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t size,
|
||||
size_t usize, prof_tctx_t *tctx) {
|
||||
edata_t *edata = emap_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
edata_t *edata = emap_edata_lookup(tsd_tsdn(tsd), &emap_global, ptr);
|
||||
prof_info_set(tsd, edata, tctx);
|
||||
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock);
|
||||
|
@ -129,7 +129,7 @@ tbin_edatas_lookup_size_check(tsd_t *tsd, cache_bin_t *tbin, szind_t binind,
|
||||
void **bottom_item = cache_bin_bottom_item_get(tbin, binind);
|
||||
for (unsigned i = 0 ; i < nflush; i++) {
|
||||
emap_full_alloc_ctx_t full_alloc_ctx;
|
||||
emap_full_alloc_info_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
emap_full_alloc_ctx_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
*(bottom_item - i), &full_alloc_ctx);
|
||||
edatas[i] = full_alloc_ctx.edata;
|
||||
szind_sum -= full_alloc_ctx.szind;
|
||||
@ -160,8 +160,8 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
|
||||
item_edata);
|
||||
} else {
|
||||
for (unsigned i = 0 ; i < nflush; i++) {
|
||||
item_edata[i] = emap_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
*(bottom_item - i));
|
||||
item_edata[i] = emap_edata_lookup(tsd_tsdn(tsd),
|
||||
&emap_global, *(bottom_item - i));
|
||||
}
|
||||
}
|
||||
|
||||
@ -259,7 +259,7 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, szind_t
|
||||
#ifndef JEMALLOC_EXTRA_SIZE_CHECK
|
||||
/* Look up edata once per item. */
|
||||
for (unsigned i = 0 ; i < nflush; i++) {
|
||||
item_edata[i] = emap_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
item_edata[i] = emap_edata_lookup(tsd_tsdn(tsd), &emap_global,
|
||||
*(bottom_item - i));
|
||||
}
|
||||
#else
|
||||
|
@ -61,8 +61,8 @@ get_large_size(size_t ind) {
|
||||
static size_t
|
||||
vsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
emap_full_alloc_ctx_t full_alloc_ctx;
|
||||
bool missing = emap_full_alloc_info_try_lookup(tsdn, &emap_global,
|
||||
ptr, &full_alloc_ctx);
|
||||
bool missing = emap_full_alloc_ctx_try_lookup(tsdn, &emap_global, ptr,
|
||||
&full_alloc_ctx);
|
||||
if (missing) {
|
||||
return 0;
|
||||
}
|
||||
|
@ -62,12 +62,12 @@ thd_start(void *varg) {
|
||||
ptr = mallocx(1, MALLOCX_TCACHE_NONE);
|
||||
ptr2 = mallocx(129, MALLOCX_TCACHE_NONE);
|
||||
|
||||
edata = emap_lookup(tsdn, &emap_global, ptr);
|
||||
edata = emap_edata_lookup(tsdn, &emap_global, ptr);
|
||||
shard1 = edata_binshard_get(edata);
|
||||
dallocx(ptr, 0);
|
||||
assert_u_lt(shard1, 16, "Unexpected bin shard used");
|
||||
|
||||
edata = emap_lookup(tsdn, &emap_global, ptr2);
|
||||
edata = emap_edata_lookup(tsdn, &emap_global, ptr2);
|
||||
shard2 = edata_binshard_get(edata);
|
||||
dallocx(ptr2, 0);
|
||||
assert_u_lt(shard2, 4, "Unexpected bin shard used");
|
||||
|
@ -101,7 +101,7 @@ TEST_END
|
||||
|
||||
static void confirm_malloc(tsd_t *tsd, void *p) {
|
||||
assert_ptr_not_null(p, "malloc failed unexpectedly");
|
||||
edata_t *e = emap_lookup(TSDN_NULL, &emap_global, p);
|
||||
edata_t *e = emap_edata_lookup(TSDN_NULL, &emap_global, p);
|
||||
assert_ptr_not_null(e, "NULL edata for living pointer");
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), &prof_recent_alloc_mtx);
|
||||
prof_recent_t *n = edata_prof_recent_alloc_get(tsd, e);
|
||||
|
Loading…
Reference in New Issue
Block a user