Pass alloc_ctx down profiling path.
With this change, when profiling is enabled, we avoid doing redundant rtree lookups. Also changed dalloc_atx_t to alloc_atx_t, as it's now used on allocation path as well (to speed up profiling).
This commit is contained in:
parent
f35213bae4
commit
ccfe68a916
@ -3,9 +3,10 @@
|
||||
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
|
||||
prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr);
|
||||
prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr,
|
||||
alloc_ctx_t *ctx);
|
||||
void arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
alloc_ctx_t *ctx, prof_tctx_t *tctx);
|
||||
void arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx);
|
||||
void arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks);
|
||||
void arena_decay_tick(tsdn_t *tsdn, arena_t *arena);
|
||||
@ -16,10 +17,10 @@ size_t arena_salloc(tsdn_t *tsdn, const void *ptr);
|
||||
size_t arena_vsalloc(tsdn_t *tsdn, const void *ptr);
|
||||
void arena_dalloc_no_tcache(tsdn_t *tsdn, void *ptr);
|
||||
void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool slow_path);
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path);
|
||||
void arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size);
|
||||
void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool slow_path);
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||
@ -30,28 +31,42 @@ arena_bin_index(arena_t *arena, arena_bin_t *bin) {
|
||||
return binind;
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE prof_tctx_t *
|
||||
arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr) {
|
||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||
arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
/* Static check. */
|
||||
if (alloc_ctx == NULL) {
|
||||
const extent_t *extent = iealloc(tsdn, ptr);
|
||||
if (unlikely(!extent_slab_get(extent))) {
|
||||
return large_prof_tctx_get(tsdn, extent);
|
||||
}
|
||||
} else {
|
||||
if (unlikely(!alloc_ctx->slab)) {
|
||||
return large_prof_tctx_get(tsdn, iealloc(tsdn, ptr));
|
||||
}
|
||||
}
|
||||
return (prof_tctx_t *)(uintptr_t)1U;
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx) {
|
||||
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
/* Static check. */
|
||||
if (alloc_ctx == NULL) {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
if (unlikely(!extent_slab_get(extent))) {
|
||||
large_prof_tctx_set(tsdn, extent, tctx);
|
||||
}
|
||||
} else {
|
||||
if (unlikely(!alloc_ctx->slab)) {
|
||||
large_prof_tctx_set(tsdn, iealloc(tsdn, ptr), tctx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
@ -196,7 +211,7 @@ arena_dalloc_no_tcache(tsdn_t *tsdn, void *ptr) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool slow_path) {
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path) {
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
|
||||
@ -208,9 +223,9 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
szind_t szind;
|
||||
bool slab;
|
||||
rtree_ctx_t *rtree_ctx;
|
||||
if (dalloc_ctx != NULL) {
|
||||
szind = dalloc_ctx->szind;
|
||||
slab = dalloc_ctx->slab;
|
||||
if (alloc_ctx != NULL) {
|
||||
szind = alloc_ctx->szind;
|
||||
slab = alloc_ctx->slab;
|
||||
assert(szind != NSIZES);
|
||||
} else {
|
||||
rtree_ctx = tsd_rtree_ctx(tsdn_tsd(tsdn));
|
||||
@ -293,7 +308,7 @@ arena_sdalloc_no_tcache(tsdn_t *tsdn, void *ptr, size_t size) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool slow_path) {
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path) {
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
assert(size <= LARGE_MAXCLASS);
|
||||
@ -305,9 +320,9 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
|
||||
szind_t szind;
|
||||
bool slab;
|
||||
UNUSED dalloc_ctx_t local_ctx;
|
||||
UNUSED alloc_ctx_t local_ctx;
|
||||
if (config_prof && opt_prof) {
|
||||
if (dalloc_ctx == NULL) {
|
||||
if (alloc_ctx == NULL) {
|
||||
/* Uncommon case and should be a static check. */
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
|
||||
@ -316,10 +331,10 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
(uintptr_t)ptr, true, &local_ctx.szind,
|
||||
&local_ctx.slab);
|
||||
assert(local_ctx.szind == size2index(size));
|
||||
dalloc_ctx = &local_ctx;
|
||||
alloc_ctx = &local_ctx;
|
||||
}
|
||||
slab = dalloc_ctx->slab;
|
||||
szind = dalloc_ctx->szind;
|
||||
slab = alloc_ctx->slab;
|
||||
szind = alloc_ctx->szind;
|
||||
} else {
|
||||
/*
|
||||
* There is no risk of being confused by a promoted sampled
|
||||
|
@ -260,8 +260,8 @@ struct arena_tdata_s {
|
||||
ticker_t decay_ticker;
|
||||
};
|
||||
|
||||
/* Used to pass rtree lookup context down the deallocation path. */
|
||||
struct dalloc_ctx_s {
|
||||
/* Used to pass rtree lookup context down the path. */
|
||||
struct alloc_ctx_s {
|
||||
szind_t szind;
|
||||
bool slab;
|
||||
};
|
||||
|
@ -19,7 +19,7 @@ typedef struct arena_decay_s arena_decay_t;
|
||||
typedef struct arena_bin_s arena_bin_t;
|
||||
typedef struct arena_s arena_t;
|
||||
typedef struct arena_tdata_s arena_tdata_t;
|
||||
typedef struct dalloc_ctx_s dalloc_ctx_t;
|
||||
typedef struct alloc_ctx_s alloc_ctx_t;
|
||||
|
||||
typedef enum {
|
||||
percpu_arena_disabled = 0,
|
||||
|
@ -15,10 +15,10 @@ void *ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
|
||||
size_t ivsalloc(tsdn_t *tsdn, const void *ptr);
|
||||
void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool is_internal, bool slow_path);
|
||||
alloc_ctx_t *alloc_ctx, bool is_internal, bool slow_path);
|
||||
void idalloc(tsd_t *tsd, void *ptr);
|
||||
void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool slow_path);
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path);
|
||||
void *iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
arena_t *arena);
|
||||
@ -107,7 +107,7 @@ ivsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, dalloc_ctx_t *dalloc_ctx,
|
||||
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx,
|
||||
bool is_internal, bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(!is_internal || tcache == NULL);
|
||||
@ -120,7 +120,7 @@ idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, dalloc_ctx_t *dalloc_ctx,
|
||||
if (!is_internal && *tsd_reentrancy_levelp_get(tsdn_tsd(tsdn)) != 0) {
|
||||
tcache = NULL;
|
||||
}
|
||||
arena_dalloc(tsdn, ptr, tcache, dalloc_ctx, slow_path);
|
||||
arena_dalloc(tsdn, ptr, tcache, alloc_ctx, slow_path);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
@ -130,9 +130,9 @@ idalloc(tsd_t *tsd, void *ptr) {
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
dalloc_ctx_t *dalloc_ctx, bool slow_path) {
|
||||
alloc_ctx_t *alloc_ctx, bool slow_path) {
|
||||
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
||||
arena_sdalloc(tsdn, ptr, size, tcache, dalloc_ctx, slow_path);
|
||||
arena_sdalloc(tsdn, ptr, size, tcache, alloc_ctx, slow_path);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
|
@ -5,20 +5,22 @@
|
||||
bool prof_active_get_unlocked(void);
|
||||
bool prof_gdump_get_unlocked(void);
|
||||
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
|
||||
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const void *ptr);
|
||||
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const void *ptr,
|
||||
alloc_ctx_t *alloc_ctx);
|
||||
void prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx);
|
||||
void prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx);
|
||||
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
|
||||
prof_tdata_t **tdata_out);
|
||||
prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
|
||||
bool update);
|
||||
void prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx);
|
||||
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
|
||||
size_t old_usize, prof_tctx_t *old_tctx);
|
||||
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
|
||||
void prof_free(tsd_t *tsd, const void *ptr, size_t usize,
|
||||
alloc_ctx_t *alloc_ctx);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PROF_C_))
|
||||
@ -67,19 +69,20 @@ prof_tdata_get(tsd_t *tsd, bool create) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||
prof_tctx_get(tsdn_t *tsdn, const void *ptr) {
|
||||
prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
return arena_prof_tctx_get(tsdn, ptr);
|
||||
return arena_prof_tctx_get(tsdn, ptr, alloc_ctx);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
|
||||
prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
alloc_ctx_t *alloc_ctx, prof_tctx_t *tctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
arena_prof_tctx_set(tsdn, ptr, usize, tctx);
|
||||
arena_prof_tctx_set(tsdn, ptr, usize, alloc_ctx, tctx);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
@ -145,7 +148,8 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
|
||||
prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx,
|
||||
prof_tctx_t *tctx) {
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
assert(usize == isalloc(tsdn, ptr));
|
||||
@ -153,7 +157,8 @@ prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
|
||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
|
||||
prof_malloc_sample_object(tsdn, ptr, usize, tctx);
|
||||
} else {
|
||||
prof_tctx_set(tsdn, ptr, usize, (prof_tctx_t *)(uintptr_t)1U);
|
||||
prof_tctx_set(tsdn, ptr, usize, alloc_ctx,
|
||||
(prof_tctx_t *)(uintptr_t)1U);
|
||||
}
|
||||
}
|
||||
|
||||
@ -188,7 +193,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||
if (unlikely(sampled)) {
|
||||
prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx);
|
||||
} else if (moved) {
|
||||
prof_tctx_set(tsd_tsdn(tsd), ptr, usize,
|
||||
prof_tctx_set(tsd_tsdn(tsd), ptr, usize, NULL,
|
||||
(prof_tctx_t *)(uintptr_t)1U);
|
||||
} else if (unlikely(old_sampled)) {
|
||||
/*
|
||||
@ -199,7 +204,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||
*/
|
||||
prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx);
|
||||
} else {
|
||||
assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr) ==
|
||||
assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) ==
|
||||
(uintptr_t)1U);
|
||||
}
|
||||
|
||||
@ -216,8 +221,8 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_free(tsd_t *tsd, const void *ptr, size_t usize) {
|
||||
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
|
||||
prof_free(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx) {
|
||||
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx);
|
||||
|
||||
cassert(config_prof);
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
|
11
src/arena.c
11
src/arena.c
@ -1064,12 +1064,19 @@ arena_reset(tsd_t *tsd, arena_t *arena) {
|
||||
size_t usize;
|
||||
|
||||
malloc_mutex_unlock(tsd_tsdn(tsd), &arena->large_mtx);
|
||||
alloc_ctx_t alloc_ctx;
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab);
|
||||
assert(alloc_ctx.szind != NSIZES);
|
||||
|
||||
if (config_stats || (config_prof && opt_prof)) {
|
||||
usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||
usize = index2size(alloc_ctx.szind);
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
}
|
||||
/* Remove large allocation from prof sample set. */
|
||||
if (config_prof && opt_prof) {
|
||||
prof_free(tsd, ptr, usize);
|
||||
prof_free(tsd, ptr, usize, &alloc_ctx);
|
||||
}
|
||||
large_dalloc(tsd_tsdn(tsd), extent);
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), &arena->large_mtx);
|
||||
|
@ -1749,7 +1749,14 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts) {
|
||||
*/
|
||||
prof_tctx_t *tctx = prof_alloc_prep(
|
||||
tsd, usize, prof_active_get_unlocked(), true);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
if (likely((uintptr_t)tctx == (uintptr_t)1U)) {
|
||||
if (usize > SMALL_MAXCLASS) {
|
||||
alloc_ctx.slab = false;
|
||||
} else {
|
||||
alloc_ctx.slab = true;
|
||||
}
|
||||
allocation = imalloc_no_sample(
|
||||
sopts, dopts, tsd, usize, usize, ind);
|
||||
} else if ((uintptr_t)tctx > (uintptr_t)1U) {
|
||||
@ -1759,6 +1766,7 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts) {
|
||||
*/
|
||||
allocation = imalloc_sample(
|
||||
sopts, dopts, tsd, usize, ind);
|
||||
alloc_ctx.slab = false;
|
||||
} else {
|
||||
allocation = NULL;
|
||||
}
|
||||
@ -1767,9 +1775,7 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts) {
|
||||
prof_alloc_rollback(tsd, tctx, true);
|
||||
goto label_oom;
|
||||
}
|
||||
|
||||
prof_malloc(tsd_tsdn(tsd), allocation, usize, tctx);
|
||||
|
||||
prof_malloc(tsd_tsdn(tsd), allocation, usize, &alloc_ctx, tctx);
|
||||
} else {
|
||||
/*
|
||||
* If dopts->alignment > 0, then ind is still 0, but usize was
|
||||
@ -2016,13 +2022,14 @@ irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C void *
|
||||
irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize) {
|
||||
irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
|
||||
alloc_ctx_t *alloc_ctx) {
|
||||
void *p;
|
||||
bool prof_active;
|
||||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
prof_active = prof_active_get_unlocked();
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr);
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr, alloc_ctx);
|
||||
tctx = prof_alloc_prep(tsd, usize, prof_active, true);
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
p = irealloc_prof_sample(tsd, old_ptr, old_usize, usize, tctx);
|
||||
@ -2048,28 +2055,28 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
dalloc_ctx_t dalloc_ctx;
|
||||
alloc_ctx_t alloc_ctx;
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &dalloc_ctx.szind, &dalloc_ctx.slab);
|
||||
assert(dalloc_ctx.szind != NSIZES);
|
||||
(uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab);
|
||||
assert(alloc_ctx.szind != NSIZES);
|
||||
|
||||
size_t usize;
|
||||
if (config_prof && opt_prof) {
|
||||
usize = index2size(dalloc_ctx.szind);
|
||||
prof_free(tsd, ptr, usize);
|
||||
usize = index2size(alloc_ctx.szind);
|
||||
prof_free(tsd, ptr, usize, &alloc_ctx);
|
||||
} else if (config_stats) {
|
||||
usize = index2size(dalloc_ctx.szind);
|
||||
usize = index2size(alloc_ctx.szind);
|
||||
}
|
||||
if (config_stats) {
|
||||
*tsd_thread_deallocatedp_get(tsd) += usize;
|
||||
}
|
||||
|
||||
if (likely(!slow_path)) {
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, &dalloc_ctx, false,
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false,
|
||||
false);
|
||||
} else {
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, &dalloc_ctx, false,
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false,
|
||||
true);
|
||||
}
|
||||
}
|
||||
@ -2083,14 +2090,14 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
dalloc_ctx_t dalloc_ctx, *ctx;
|
||||
alloc_ctx_t alloc_ctx, *ctx;
|
||||
if (config_prof && opt_prof) {
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &dalloc_ctx.szind, &dalloc_ctx.slab);
|
||||
assert(dalloc_ctx.szind == size2index(usize));
|
||||
prof_free(tsd, ptr, usize);
|
||||
ctx = &dalloc_ctx;
|
||||
(uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab);
|
||||
assert(alloc_ctx.szind == size2index(usize));
|
||||
ctx = &alloc_ctx;
|
||||
prof_free(tsd, ptr, usize, ctx);
|
||||
} else {
|
||||
ctx = NULL;
|
||||
}
|
||||
@ -2138,11 +2145,18 @@ je_realloc(void *ptr, size_t size) {
|
||||
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
|
||||
old_usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||
alloc_ctx_t alloc_ctx;
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab);
|
||||
assert(alloc_ctx.szind != NSIZES);
|
||||
old_usize = index2size(alloc_ctx.szind);
|
||||
assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
if (config_prof && opt_prof) {
|
||||
usize = s2u(size);
|
||||
ret = unlikely(usize == 0 || usize > LARGE_MAXCLASS) ?
|
||||
NULL : irealloc_prof(tsd, ptr, old_usize, usize);
|
||||
NULL : irealloc_prof(tsd, ptr, old_usize, usize,
|
||||
&alloc_ctx);
|
||||
} else {
|
||||
if (config_stats) {
|
||||
usize = s2u(size);
|
||||
@ -2398,13 +2412,13 @@ irallocx_prof_sample(tsdn_t *tsdn, void *old_ptr, size_t old_usize,
|
||||
JEMALLOC_ALWAYS_INLINE_C void *
|
||||
irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
|
||||
size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
|
||||
arena_t *arena) {
|
||||
arena_t *arena, alloc_ctx_t *alloc_ctx) {
|
||||
void *p;
|
||||
bool prof_active;
|
||||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
prof_active = prof_active_get_unlocked();
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr);
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr, alloc_ctx);
|
||||
tctx = prof_alloc_prep(tsd, *usize, prof_active, false);
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize,
|
||||
@ -2474,15 +2488,20 @@ je_rallocx(void *ptr, size_t size, int flags) {
|
||||
tcache = tcache_get(tsd);
|
||||
}
|
||||
|
||||
old_usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab);
|
||||
assert(alloc_ctx.szind != NSIZES);
|
||||
old_usize = index2size(alloc_ctx.szind);
|
||||
assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
if (config_prof && opt_prof) {
|
||||
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
|
||||
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
|
||||
goto label_oom;
|
||||
}
|
||||
p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize,
|
||||
zero, tcache, arena);
|
||||
zero, tcache, arena, &alloc_ctx);
|
||||
if (unlikely(p == NULL)) {
|
||||
goto label_oom;
|
||||
}
|
||||
@ -2544,13 +2563,13 @@ ixallocx_prof_sample(tsdn_t *tsdn, void *ptr, size_t old_usize, size_t size,
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C size_t
|
||||
ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero) {
|
||||
size_t extra, size_t alignment, bool zero, alloc_ctx_t *alloc_ctx) {
|
||||
size_t usize_max, usize;
|
||||
bool prof_active;
|
||||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
prof_active = prof_active_get_unlocked();
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx);
|
||||
/*
|
||||
* usize isn't knowable before ixalloc() returns when extra is non-zero.
|
||||
* Therefore, compute its maximum possible value and use that in
|
||||
@ -2605,8 +2624,13 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
|
||||
tsd = tsd_fetch();
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
|
||||
old_usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||
|
||||
alloc_ctx_t alloc_ctx;
|
||||
rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
|
||||
rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab);
|
||||
assert(alloc_ctx.szind != NSIZES);
|
||||
old_usize = index2size(alloc_ctx.szind);
|
||||
assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
/*
|
||||
* The API explicitly absolves itself of protecting against (size +
|
||||
* extra) numerical overflow, but we may need to clamp extra to avoid
|
||||
@ -2626,7 +2650,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
usize = ixallocx_prof(tsd, ptr, old_usize, size, extra,
|
||||
alignment, zero);
|
||||
alignment, zero, &alloc_ctx);
|
||||
} else {
|
||||
usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
|
||||
extra, alignment, zero);
|
||||
|
@ -234,7 +234,7 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) {
|
||||
void
|
||||
prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx) {
|
||||
prof_tctx_set(tsdn, ptr, usize, tctx);
|
||||
prof_tctx_set(tsdn, ptr, usize, NULL, tctx);
|
||||
|
||||
malloc_mutex_lock(tsdn, tctx->tdata->lock);
|
||||
tctx->cnts.curobjs++;
|
||||
|
@ -15,7 +15,7 @@ TEST_BEGIN(test_prof_realloc) {
|
||||
prof_cnt_all(&curobjs_0, NULL, NULL, NULL);
|
||||
p = mallocx(1024, flags);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() failure");
|
||||
tctx_p = prof_tctx_get(tsdn, p);
|
||||
tctx_p = prof_tctx_get(tsdn, p, NULL);
|
||||
assert_ptr_ne(tctx_p, (prof_tctx_t *)(uintptr_t)1U,
|
||||
"Expected valid tctx");
|
||||
prof_cnt_all(&curobjs_1, NULL, NULL, NULL);
|
||||
@ -25,7 +25,7 @@ TEST_BEGIN(test_prof_realloc) {
|
||||
q = rallocx(p, 2048, flags);
|
||||
assert_ptr_ne(p, q, "Expected move");
|
||||
assert_ptr_not_null(p, "Unexpected rmallocx() failure");
|
||||
tctx_q = prof_tctx_get(tsdn, q);
|
||||
tctx_q = prof_tctx_get(tsdn, q, NULL);
|
||||
assert_ptr_ne(tctx_q, (prof_tctx_t *)(uintptr_t)1U,
|
||||
"Expected valid tctx");
|
||||
prof_cnt_all(&curobjs_2, NULL, NULL, NULL);
|
||||
|
Loading…
Reference in New Issue
Block a user