Restructure setters for profiling info

Explicitly define three setters:

- `prof_tctx_reset()`: set `prof_tctx` to `1U`, if we don't know in
advance whether the allocation is large or not;
- `prof_tctx_reset_sampled()`: set `prof_tctx` to `1U`, if we already
know in advance that the allocation is large;
- `prof_info_set()`: set a real `prof_tctx`, and also set other
profiling info e.g. the allocation time.

Code structure wise, the prof level is kept as a thin wrapper, the
large level only provides low level setter APIs, and the arena level
carries out the main logic.
This commit is contained in:
Yinan Zhang 2019-12-13 16:48:03 -08:00
parent 1d01e4c770
commit 4afd709d1f
5 changed files with 29 additions and 35 deletions

View File

@ -68,8 +68,7 @@ arena_prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
arena_prof_tctx_set(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx, arena_prof_tctx_reset(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx) {
prof_tctx_t *tctx) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
@ -77,17 +76,17 @@ arena_prof_tctx_set(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
if (alloc_ctx == NULL) { if (alloc_ctx == NULL) {
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr); extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
if (unlikely(!extent_slab_get(extent))) { if (unlikely(!extent_slab_get(extent))) {
large_prof_tctx_set(extent, tctx); large_prof_tctx_reset(extent);
} }
} else { } else {
if (unlikely(!alloc_ctx->slab)) { if (unlikely(!alloc_ctx->slab)) {
large_prof_tctx_set(iealloc(tsd_tsdn(tsd), ptr), tctx); large_prof_tctx_reset(iealloc(tsd_tsdn(tsd), ptr));
} }
} }
} }
static inline void JEMALLOC_ALWAYS_INLINE void
arena_prof_tctx_reset(tsd_t *tsd, const void *ptr, prof_tctx_t *tctx) { arena_prof_tctx_reset_sampled(tsd_t *tsd, const void *ptr) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
@ -98,13 +97,13 @@ arena_prof_tctx_reset(tsd_t *tsd, const void *ptr, prof_tctx_t *tctx) {
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
arena_prof_alloc_time_set(tsd_t *tsd, const void *ptr, nstime_t *t) { arena_prof_info_set(tsd_t *tsd, const void *ptr, prof_tctx_t *tctx) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr); extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
assert(!extent_slab_get(extent)); assert(!extent_slab_get(extent));
large_prof_alloc_time_set(extent, t); large_prof_info_set(extent, tctx);
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void

View File

@ -23,8 +23,7 @@ void large_dalloc_finish(tsdn_t *tsdn, extent_t *extent);
void large_dalloc(tsdn_t *tsdn, extent_t *extent); void large_dalloc(tsdn_t *tsdn, extent_t *extent);
size_t large_salloc(tsdn_t *tsdn, const extent_t *extent); size_t large_salloc(tsdn_t *tsdn, const extent_t *extent);
void large_prof_info_get(const extent_t *extent, prof_info_t *prof_info); void large_prof_info_get(const extent_t *extent, prof_info_t *prof_info);
void large_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx);
void large_prof_tctx_reset(extent_t *extent); void large_prof_tctx_reset(extent_t *extent);
void large_prof_alloc_time_set(extent_t *extent, nstime_t *time); void large_prof_info_set(extent_t *extent, prof_tctx_t *tctx);
#endif /* JEMALLOC_INTERNAL_LARGE_EXTERNS_H */ #endif /* JEMALLOC_INTERNAL_LARGE_EXTERNS_H */

View File

@ -50,28 +50,28 @@ prof_info_get(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx,
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
prof_tctx_set(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx, prof_tctx_reset(tsd_t *tsd, const void *ptr, alloc_ctx_t *alloc_ctx) {
prof_tctx_t *tctx) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
arena_prof_tctx_set(tsd, ptr, alloc_ctx, tctx); arena_prof_tctx_reset(tsd, ptr, alloc_ctx);
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
prof_tctx_reset(tsd_t *tsd, const void *ptr, prof_tctx_t *tctx) { prof_tctx_reset_sampled(tsd_t *tsd, const void *ptr) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
arena_prof_tctx_reset(tsd, ptr, tctx); arena_prof_tctx_reset_sampled(tsd, ptr);
} }
JEMALLOC_ALWAYS_INLINE void JEMALLOC_ALWAYS_INLINE void
prof_alloc_time_set(tsd_t *tsd, const void *ptr, nstime_t *t) { prof_info_set(tsd_t *tsd, const void *ptr, prof_tctx_t *tctx) {
cassert(config_prof); cassert(config_prof);
assert(ptr != NULL); assert(ptr != NULL);
assert((uintptr_t)tctx > (uintptr_t)1U);
arena_prof_alloc_time_set(tsd, ptr, t); arena_prof_info_set(tsd, ptr, tctx);
} }
JEMALLOC_ALWAYS_INLINE bool JEMALLOC_ALWAYS_INLINE bool
@ -125,8 +125,7 @@ prof_malloc(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx,
if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) { if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
prof_malloc_sample_object(tsd, ptr, usize, tctx); prof_malloc_sample_object(tsd, ptr, usize, tctx);
} else { } else {
prof_tctx_set(tsd, ptr, alloc_ctx, prof_tctx_reset(tsd, ptr, alloc_ctx);
(prof_tctx_t *)(uintptr_t)1U);
} }
} }
@ -161,15 +160,15 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
if (unlikely(sampled)) { if (unlikely(sampled)) {
prof_malloc_sample_object(tsd, ptr, usize, tctx); prof_malloc_sample_object(tsd, ptr, usize, tctx);
} else if (moved) { } else if (moved) {
prof_tctx_set(tsd, ptr, NULL, (prof_tctx_t *)(uintptr_t)1U); prof_tctx_reset(tsd, ptr, NULL);
} else if (unlikely(old_sampled)) { } else if (unlikely(old_sampled)) {
/* /*
* prof_tctx_set() would work for the !moved case as well, but * prof_tctx_reset() would work for the !moved case as well,
* prof_tctx_reset() is slightly cheaper, and the proper thing * but prof_tctx_reset_sampled() is slightly cheaper, and the
* to do here in the presence of explicit knowledge re: moved * proper thing to do here in the presence of explicit
* state. * knowledge re: moved state.
*/ */
prof_tctx_reset(tsd, ptr, tctx); prof_tctx_reset_sampled(tsd, ptr);
} else { } else {
prof_info_t prof_info; prof_info_t prof_info;
prof_info_get(tsd, ptr, NULL, &prof_info); prof_info_get(tsd, ptr, NULL, &prof_info);

View File

@ -372,7 +372,7 @@ large_prof_info_get(const extent_t *extent, prof_info_t *prof_info) {
extent_prof_info_get(extent, prof_info); extent_prof_info_get(extent, prof_info);
} }
void static void
large_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx) { large_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx) {
extent_prof_tctx_set(extent, tctx); extent_prof_tctx_set(extent, tctx);
} }
@ -383,6 +383,9 @@ large_prof_tctx_reset(extent_t *extent) {
} }
void void
large_prof_alloc_time_set(extent_t *extent, nstime_t *t) { large_prof_info_set(extent_t *extent, prof_tctx_t *tctx) {
extent_prof_alloc_time_set(extent, t); large_prof_tctx_set(extent, tctx);
nstime_t t;
nstime_init_update(&t);
extent_prof_alloc_time_set(extent, &t);
} }

View File

@ -162,13 +162,7 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) {
void void
prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t usize, prof_malloc_sample_object(tsd_t *tsd, const void *ptr, size_t usize,
prof_tctx_t *tctx) { prof_tctx_t *tctx) {
prof_tctx_set(tsd, ptr, NULL, tctx); prof_info_set(tsd, ptr, tctx);
/* Get the current time and set this in the extent_t. We'll read this
* when free() is called. */
nstime_t t;
nstime_init_update(&t);
prof_alloc_time_set(tsd, ptr, &t);
malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock);
tctx->cnts.curobjs++; tctx->cnts.curobjs++;