Push down iealloc() calls.

Call iealloc() as deep into call chains as possible without causing
redundant calls.
This commit is contained in:
Jason Evans
2017-03-20 11:00:07 -07:00
parent 51a2ec92a1
commit 5e67fbc367
9 changed files with 176 additions and 227 deletions

View File

@@ -5,24 +5,20 @@
bool prof_active_get_unlocked(void);
bool prof_gdump_get_unlocked(void);
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const extent_t *extent,
const void *ptr);
void prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx);
void prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const void *ptr);
void prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx);
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
prof_tdata_t **tdata_out);
prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
bool update);
void prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx);
void prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx, bool prof_active, bool updated,
extent_t *old_extent, const void *old_ptr, size_t old_usize,
prof_tctx_t *old_tctx);
void prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr,
size_t usize);
void prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
size_t old_usize, prof_tctx_t *old_tctx);
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PROF_C_))
@@ -71,29 +67,27 @@ prof_tdata_get(tsd_t *tsd, bool create) {
}
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
prof_tctx_get(tsdn_t *tsdn, const void *ptr) {
cassert(config_prof);
assert(ptr != NULL);
return arena_prof_tctx_get(tsdn, extent, ptr);
return arena_prof_tctx_get(tsdn, ptr);
}
JEMALLOC_ALWAYS_INLINE void
prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
prof_tctx_t *tctx) {
prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
arena_prof_tctx_set(tsdn, extent, ptr, usize, tctx);
arena_prof_tctx_set(tsdn, ptr, usize, tctx);
}
JEMALLOC_ALWAYS_INLINE void
prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
prof_tctx_t *tctx) {
prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
arena_prof_tctx_reset(tsdn, extent, ptr, tctx);
arena_prof_tctx_reset(tsdn, ptr, tctx);
}
JEMALLOC_ALWAYS_INLINE bool
@@ -151,24 +145,22 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) {
}
JEMALLOC_ALWAYS_INLINE void
prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
prof_tctx_t *tctx) {
prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
assert(usize == isalloc(tsdn, ptr));
if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
prof_malloc_sample_object(tsdn, extent, ptr, usize, tctx);
prof_malloc_sample_object(tsdn, ptr, usize, tctx);
} else {
prof_tctx_set(tsdn, extent, ptr, usize,
(prof_tctx_t *)(uintptr_t)1U);
prof_tctx_set(tsdn, ptr, usize, (prof_tctx_t *)(uintptr_t)1U);
}
}
JEMALLOC_ALWAYS_INLINE void
prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
prof_tctx_t *tctx, bool prof_active, bool updated, extent_t *old_extent,
const void *old_ptr, size_t old_usize, prof_tctx_t *old_tctx) {
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
bool prof_active, bool updated, const void *old_ptr, size_t old_usize,
prof_tctx_t *old_tctx) {
bool sampled, old_sampled, moved;
cassert(config_prof);
@@ -194,10 +186,9 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
moved = (ptr != old_ptr);
if (unlikely(sampled)) {
prof_malloc_sample_object(tsd_tsdn(tsd), extent, ptr, usize,
tctx);
prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx);
} else if (moved) {
prof_tctx_set(tsd_tsdn(tsd), extent, ptr, usize,
prof_tctx_set(tsd_tsdn(tsd), ptr, usize,
(prof_tctx_t *)(uintptr_t)1U);
} else if (unlikely(old_sampled)) {
/*
@@ -206,9 +197,9 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
* to do here in the presence of explicit knowledge re: moved
* state.
*/
prof_tctx_reset(tsd_tsdn(tsd), extent, ptr, tctx);
prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx);
} else {
assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), extent, ptr) ==
assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr) ==
(uintptr_t)1U);
}
@@ -225,8 +216,8 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
}
JEMALLOC_ALWAYS_INLINE void
prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr, size_t usize) {
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
prof_free(tsd_t *tsd, const void *ptr, size_t usize) {
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
cassert(config_prof);
assert(usize == isalloc(tsd_tsdn(tsd), ptr));