Push down iealloc() calls.

Call iealloc() as deep into call chains as possible without causing
redundant calls.
This commit is contained in:
Jason Evans 2017-03-20 11:00:07 -07:00
parent 51a2ec92a1
commit 5e67fbc367
9 changed files with 176 additions and 227 deletions

View File

@ -65,18 +65,16 @@ void *arena_malloc_hard(tsdn_t *tsdn, arena_t *arena, size_t size,
szind_t ind, bool zero);
void *arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
size_t alignment, bool zero, tcache_t *tcache);
void arena_prof_promote(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize);
void arena_dalloc_promoted(tsdn_t *tsdn, extent_t *extent, void *ptr,
tcache_t *tcache, bool slow_path);
void arena_prof_promote(tsdn_t *tsdn, const void *ptr, size_t usize);
void arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
bool slow_path);
void arena_dalloc_bin_junked_locked(tsdn_t *tsdn, arena_t *arena,
extent_t *extent, void *ptr);
void arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, extent_t *extent,
void *ptr);
bool arena_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr,
size_t oldsize, size_t size, size_t extra, bool zero);
void *arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache);
void arena_dalloc_small(tsdn_t *tsdn, void *ptr);
bool arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
size_t extra, bool zero);
void *arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
size_t size, size_t alignment, bool zero, tcache_t *tcache);
dss_prec_t arena_dss_prec_get(arena_t *arena);
bool arena_dss_prec_set(arena_t *arena, dss_prec_t dss_prec);
ssize_t arena_dirty_decay_time_default_get(void);

View File

@ -3,12 +3,10 @@
#ifndef JEMALLOC_ENABLE_INLINE
szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent,
const void *ptr);
void arena_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx);
void arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr);
void arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx);
void arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx);
void arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks);
void arena_decay_tick(tsdn_t *tsdn, arena_t *arena);
void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
@ -30,10 +28,11 @@ arena_bin_index(arena_t *arena, arena_bin_t *bin) {
}
JEMALLOC_INLINE prof_tctx_t *
arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr) {
cassert(config_prof);
assert(ptr != NULL);
const extent_t *extent = iealloc(tsdn, ptr);
if (unlikely(!extent_slab_get(extent))) {
return large_prof_tctx_get(tsdn, extent);
}
@ -41,21 +40,23 @@ arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
}
JEMALLOC_INLINE void
arena_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx) {
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
extent_t *extent = iealloc(tsdn, ptr);
if (unlikely(!extent_slab_get(extent))) {
large_prof_tctx_set(tsdn, extent, tctx);
}
}
JEMALLOC_INLINE void
arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
prof_tctx_t *tctx) {
arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
extent_t *extent = iealloc(tsdn, ptr);
assert(!extent_slab_get(extent));
large_prof_tctx_reset(tsdn, extent);
@ -187,16 +188,13 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path) {
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
slow_path);
} else {
extent_t *extent = iealloc(tsdn, ptr);
arena_dalloc_small(tsdn, extent_arena_get(extent),
extent, ptr);
arena_dalloc_small(tsdn, ptr);
}
} else {
if (likely(tcache != NULL) && szind < nhbins) {
if (config_prof && unlikely(szind < NBINS)) {
extent_t *extent = iealloc(tsdn, ptr);
arena_dalloc_promoted(tsdn, extent, ptr,
tcache, slow_path);
arena_dalloc_promoted(tsdn, ptr, tcache,
slow_path);
} else {
tcache_dalloc_large(tsdn_tsd(tsdn), tcache,
ptr, szind, slow_path);
@ -250,16 +248,13 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
slow_path);
} else {
extent_t *extent = iealloc(tsdn, ptr);
arena_dalloc_small(tsdn, extent_arena_get(extent),
extent, ptr);
arena_dalloc_small(tsdn, ptr);
}
} else {
if (likely(tcache != NULL) && szind < nhbins) {
if (config_prof && unlikely(szind < NBINS)) {
extent_t *extent = iealloc(tsdn, ptr);
arena_dalloc_promoted(tsdn, extent, ptr,
tcache, slow_path);
arena_dalloc_promoted(tsdn, ptr, tcache,
slow_path);
} else {
tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr,
szind, slow_path);

View File

@ -1007,32 +1007,32 @@ iealloc(tsdn_t *tsdn, const void *ptr) {
#include "jemalloc/internal/hash_inlines.h"
#ifndef JEMALLOC_ENABLE_INLINE
arena_t *iaalloc(tsdn_t *tsdn, const void *ptr);
size_t isalloc(tsdn_t *tsdn, const void *ptr);
void *iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero,
arena_t *iaalloc(tsdn_t *tsdn, const void *ptr);
size_t isalloc(tsdn_t *tsdn, const void *ptr);
void *iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero,
tcache_t *tcache, bool is_internal, arena_t *arena, bool slow_path);
void *ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero,
void *ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero,
bool slow_path);
void *ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
void *ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
tcache_t *tcache, bool is_internal, arena_t *arena);
void *ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
void *ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
tcache_t *tcache, arena_t *arena);
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
size_t ivsalloc(tsdn_t *tsdn, const void *ptr);
void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_internal,
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
size_t ivsalloc(tsdn_t *tsdn, const void *ptr);
void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_internal,
bool slow_path);
void idalloc(tsd_t *tsd, void *ptr);
void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
void idalloc(tsd_t *tsd, void *ptr);
void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
bool slow_path);
void *iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
void *iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
arena_t *arena);
void *iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
void *iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize,
size_t size, size_t alignment, bool zero);
bool ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
size_t size, size_t extra, size_t alignment, bool zero);
void *iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
void *iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
size_t alignment, bool zero);
bool ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra,
size_t alignment, bool zero);
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
@ -1176,8 +1176,8 @@ iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
}
JEMALLOC_ALWAYS_INLINE void *
iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) {
iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment,
bool zero, tcache_t *tcache, arena_t *arena) {
assert(ptr != NULL);
assert(size != 0);
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
@ -1192,20 +1192,20 @@ iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
zero, tcache, arena);
}
return arena_ralloc(tsdn, arena, extent, ptr, oldsize, size, alignment,
zero, tcache);
return arena_ralloc(tsdn, arena, ptr, oldsize, size, alignment, zero,
tcache);
}
JEMALLOC_ALWAYS_INLINE void *
iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize, size_t size,
size_t alignment, bool zero) {
return iralloct(tsd_tsdn(tsd), extent, ptr, oldsize, size, alignment,
zero, tcache_get(tsd, true), NULL);
iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
bool zero) {
return iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero,
tcache_get(tsd, true), NULL);
}
JEMALLOC_ALWAYS_INLINE bool
ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
size_t extra, size_t alignment, bool zero) {
ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra,
size_t alignment, bool zero) {
assert(ptr != NULL);
assert(size != 0);
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
@ -1216,8 +1216,7 @@ ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
return true;
}
return arena_ralloc_no_move(tsdn, extent, ptr, oldsize, size, extra,
zero);
return arena_ralloc_no_move(tsdn, ptr, oldsize, size, extra, zero);
}
#endif

View File

@ -39,8 +39,8 @@ extern uint64_t prof_interval;
extern size_t lg_prof_sample;
void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated);
void prof_malloc_sample_object(tsdn_t *tsdn, extent_t *extent,
const void *ptr, size_t usize, prof_tctx_t *tctx);
void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
void bt_init(prof_bt_t *bt, void **vec);
void prof_backtrace(prof_bt_t *bt);

View File

@ -5,24 +5,20 @@
bool prof_active_get_unlocked(void);
bool prof_gdump_get_unlocked(void);
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const extent_t *extent,
const void *ptr);
void prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx);
void prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const void *ptr);
void prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx);
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
prof_tdata_t **tdata_out);
prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
bool update);
void prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx);
void prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx, bool prof_active, bool updated,
extent_t *old_extent, const void *old_ptr, size_t old_usize,
prof_tctx_t *old_tctx);
void prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr,
size_t usize);
void prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx);
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
size_t old_usize, prof_tctx_t *old_tctx);
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
#endif
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PROF_C_))
@ -71,29 +67,27 @@ prof_tdata_get(tsd_t *tsd, bool create) {
}
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
prof_tctx_get(tsdn_t *tsdn, const void *ptr) {
cassert(config_prof);
assert(ptr != NULL);
return arena_prof_tctx_get(tsdn, extent, ptr);
return arena_prof_tctx_get(tsdn, ptr);
}
JEMALLOC_ALWAYS_INLINE void
prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
prof_tctx_t *tctx) {
prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
arena_prof_tctx_set(tsdn, extent, ptr, usize, tctx);
arena_prof_tctx_set(tsdn, ptr, usize, tctx);
}
JEMALLOC_ALWAYS_INLINE void
prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
prof_tctx_t *tctx) {
prof_tctx_reset(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
arena_prof_tctx_reset(tsdn, extent, ptr, tctx);
arena_prof_tctx_reset(tsdn, ptr, tctx);
}
JEMALLOC_ALWAYS_INLINE bool
@ -151,24 +145,22 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) {
}
JEMALLOC_ALWAYS_INLINE void
prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
prof_tctx_t *tctx) {
prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx) {
cassert(config_prof);
assert(ptr != NULL);
assert(usize == isalloc(tsdn, ptr));
if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
prof_malloc_sample_object(tsdn, extent, ptr, usize, tctx);
prof_malloc_sample_object(tsdn, ptr, usize, tctx);
} else {
prof_tctx_set(tsdn, extent, ptr, usize,
(prof_tctx_t *)(uintptr_t)1U);
prof_tctx_set(tsdn, ptr, usize, (prof_tctx_t *)(uintptr_t)1U);
}
}
JEMALLOC_ALWAYS_INLINE void
prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
prof_tctx_t *tctx, bool prof_active, bool updated, extent_t *old_extent,
const void *old_ptr, size_t old_usize, prof_tctx_t *old_tctx) {
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
bool prof_active, bool updated, const void *old_ptr, size_t old_usize,
prof_tctx_t *old_tctx) {
bool sampled, old_sampled, moved;
cassert(config_prof);
@ -194,10 +186,9 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
moved = (ptr != old_ptr);
if (unlikely(sampled)) {
prof_malloc_sample_object(tsd_tsdn(tsd), extent, ptr, usize,
tctx);
prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx);
} else if (moved) {
prof_tctx_set(tsd_tsdn(tsd), extent, ptr, usize,
prof_tctx_set(tsd_tsdn(tsd), ptr, usize,
(prof_tctx_t *)(uintptr_t)1U);
} else if (unlikely(old_sampled)) {
/*
@ -206,9 +197,9 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
* to do here in the presence of explicit knowledge re: moved
* state.
*/
prof_tctx_reset(tsd_tsdn(tsd), extent, ptr, tctx);
prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx);
} else {
assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), extent, ptr) ==
assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr) ==
(uintptr_t)1U);
}
@ -225,8 +216,8 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
}
JEMALLOC_ALWAYS_INLINE void
prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr, size_t usize) {
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
prof_free(tsd_t *tsd, const void *ptr, size_t usize) {
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
cassert(config_prof);
assert(usize == isalloc(tsd_tsdn(tsd), ptr));

View File

@ -1033,7 +1033,7 @@ arena_reset(tsd_t *tsd, arena_t *arena) {
}
/* Remove large allocation from prof sample set. */
if (config_prof && opt_prof) {
prof_free(tsd, extent, ptr, usize);
prof_free(tsd, ptr, usize);
}
large_dalloc(tsd_tsdn(tsd), extent);
malloc_mutex_lock(tsd_tsdn(tsd), &arena->large_mtx);
@ -1459,19 +1459,21 @@ arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
}
void
arena_prof_promote(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize) {
arena_t *arena = extent_arena_get(extent);
arena_prof_promote(tsdn_t *tsdn, const void *ptr, size_t usize) {
cassert(config_prof);
assert(ptr != NULL);
assert(isalloc(tsdn, ptr) == LARGE_MINCLASS);
assert(usize <= SMALL_MAXCLASS);
szind_t szind = size2index(usize);
extent_szind_set(extent, szind);
rtree_ctx_t rtree_ctx_fallback;
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
extent_t *extent = rtree_extent_read(tsdn, &extents_rtree, rtree_ctx,
(uintptr_t)ptr, true);
arena_t *arena = extent_arena_get(extent);
szind_t szind = size2index(usize);
extent_szind_set(extent, szind);
rtree_szind_slab_update(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr,
szind, false);
@ -1497,14 +1499,13 @@ arena_prof_demote(tsdn_t *tsdn, extent_t *extent, const void *ptr) {
}
void
arena_dalloc_promoted(tsdn_t *tsdn, extent_t *extent, void *ptr,
tcache_t *tcache, bool slow_path) {
size_t usize;
arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache,
bool slow_path) {
cassert(config_prof);
assert(opt_prof);
usize = arena_prof_demote(tsdn, extent, ptr);
extent_t *extent = iealloc(tsdn, ptr);
size_t usize = arena_prof_demote(tsdn, extent, ptr);
if (usize <= tcache_maxclass) {
tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr,
size2index(usize), slow_path);
@ -1621,16 +1622,17 @@ arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr) {
}
void
arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr) {
arena_dalloc_small(tsdn_t *tsdn, void *ptr) {
extent_t *extent = iealloc(tsdn, ptr);
arena_t *arena = extent_arena_get(extent);
arena_dalloc_bin(tsdn, arena, extent, ptr);
arena_decay_tick(tsdn, arena);
}
bool
arena_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
size_t size, size_t extra, bool zero) {
size_t usize_min, usize_max;
arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
size_t extra, bool zero) {
/* Calls with non-zero extra had to clamp extra. */
assert(extra == 0 || size + extra <= LARGE_MAXCLASS);
@ -1638,8 +1640,9 @@ arena_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
return true;
}
usize_min = s2u(size);
usize_max = s2u(size + extra);
extent_t *extent = iealloc(tsdn, ptr);
size_t usize_min = s2u(size);
size_t usize_max = s2u(size + extra);
if (likely(oldsize <= SMALL_MAXCLASS && usize_min <= SMALL_MAXCLASS)) {
/*
* Avoid moving the allocation if the size class can be left the
@ -1678,36 +1681,31 @@ arena_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize,
}
void *
arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
size_t oldsize, size_t size, size_t alignment, bool zero,
tcache_t *tcache) {
void *ret;
size_t usize, copysize;
usize = s2u(size);
arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
size_t size, size_t alignment, bool zero, tcache_t *tcache) {
size_t usize = s2u(size);
if (unlikely(usize == 0 || size > LARGE_MAXCLASS)) {
return NULL;
}
if (likely(usize <= SMALL_MAXCLASS)) {
/* Try to avoid moving the allocation. */
if (!arena_ralloc_no_move(tsdn, extent, ptr, oldsize, usize, 0,
zero)) {
if (!arena_ralloc_no_move(tsdn, ptr, oldsize, usize, 0, zero)) {
return ptr;
}
}
if (oldsize >= LARGE_MINCLASS && usize >= LARGE_MINCLASS) {
return large_ralloc(tsdn, arena, extent, usize, alignment,
zero, tcache);
return large_ralloc(tsdn, arena, iealloc(tsdn, ptr), usize,
alignment, zero, tcache);
}
/*
* size and oldsize are different enough that we need to move the
* object. In that case, fall back to allocating new space and copying.
*/
ret = arena_ralloc_move_helper(tsdn, arena, usize, alignment, zero,
tcache);
void *ret = arena_ralloc_move_helper(tsdn, arena, usize, alignment,
zero, tcache);
if (ret == NULL) {
return NULL;
}
@ -1717,7 +1715,7 @@ arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
* ipalloc()/arena_malloc().
*/
copysize = (usize < oldsize) ? usize : oldsize;
size_t copysize = (usize < oldsize) ? usize : oldsize;
memcpy(ret, ptr, copysize);
isdalloct(tsdn, ptr, oldsize, tcache, true);
return ret;

View File

@ -1589,8 +1589,7 @@ imalloc_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd,
if (unlikely(ret == NULL)) {
return NULL;
}
arena_prof_promote(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ret),
ret, usize);
arena_prof_promote(tsd_tsdn(tsd), ret, usize);
} else {
ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind);
}
@ -1741,8 +1740,7 @@ imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts) {
goto label_oom;
}
prof_malloc(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), allocation),
allocation, usize, tctx);
prof_malloc(tsd_tsdn(tsd), allocation, usize, tctx);
} else {
/*
@ -1955,53 +1953,46 @@ je_calloc(size_t num, size_t size) {
}
static void *
irealloc_prof_sample(tsd_t *tsd, extent_t *extent, void *old_ptr,
size_t old_usize, size_t usize, prof_tctx_t *tctx) {
irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
prof_tctx_t *tctx) {
void *p;
if (tctx == NULL) {
return NULL;
}
if (usize <= SMALL_MAXCLASS) {
p = iralloc(tsd, extent, old_ptr, old_usize, LARGE_MINCLASS, 0,
false);
p = iralloc(tsd, old_ptr, old_usize, LARGE_MINCLASS, 0, false);
if (p == NULL) {
return NULL;
}
arena_prof_promote(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), p), p,
usize);
arena_prof_promote(tsd_tsdn(tsd), p, usize);
} else {
p = iralloc(tsd, extent, old_ptr, old_usize, usize, 0, false);
p = iralloc(tsd, old_ptr, old_usize, usize, 0, false);
}
return p;
}
JEMALLOC_ALWAYS_INLINE_C void *
irealloc_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize,
size_t usize) {
irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize) {
void *p;
extent_t *extent;
bool prof_active;
prof_tctx_t *old_tctx, *tctx;
prof_active = prof_active_get_unlocked();
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_extent, old_ptr);
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr);
tctx = prof_alloc_prep(tsd, usize, prof_active, true);
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
p = irealloc_prof_sample(tsd, old_extent, old_ptr, old_usize,
usize, tctx);
p = irealloc_prof_sample(tsd, old_ptr, old_usize, usize, tctx);
} else {
p = iralloc(tsd, old_extent, old_ptr, old_usize, usize, 0,
false);
p = iralloc(tsd, old_ptr, old_usize, usize, 0, false);
}
if (unlikely(p == NULL)) {
prof_alloc_rollback(tsd, tctx, true);
return NULL;
}
extent = (p == old_ptr) ? old_extent : iealloc(tsd_tsdn(tsd), p);
prof_realloc(tsd, extent, p, usize, tctx, prof_active, true, old_extent,
old_ptr, old_usize, old_tctx);
prof_realloc(tsd, p, usize, tctx, prof_active, true, old_ptr, old_usize,
old_tctx);
return p;
}
@ -2016,8 +2007,7 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
size_t usize;
if (config_prof && opt_prof) {
usize = isalloc(tsd_tsdn(tsd), ptr);
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
prof_free(tsd, extent, ptr, usize);
prof_free(tsd, ptr, usize);
} else if (config_stats) {
usize = isalloc(tsd_tsdn(tsd), ptr);
}
@ -2040,8 +2030,7 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
assert(malloc_initialized() || IS_INITIALIZER);
if (config_prof && opt_prof) {
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
prof_free(tsd, extent, ptr, usize);
prof_free(tsd, ptr, usize);
}
if (config_stats) {
*tsd_thread_deallocatedp_get(tsd) += usize;
@ -2077,27 +2066,21 @@ je_realloc(void *ptr, size_t size) {
}
if (likely(ptr != NULL)) {
tsd_t *tsd;
extent_t *extent;
assert(malloc_initialized() || IS_INITIALIZER);
tsd = tsd_fetch();
tsd_t *tsd = tsd_fetch();
witness_assert_lockless(tsd_tsdn(tsd));
extent = iealloc(tsd_tsdn(tsd), ptr);
old_usize = isalloc(tsd_tsdn(tsd), ptr);
if (config_prof && opt_prof) {
usize = s2u(size);
ret = unlikely(usize == 0 || usize > LARGE_MAXCLASS) ?
NULL : irealloc_prof(tsd, extent, ptr, old_usize,
usize);
NULL : irealloc_prof(tsd, ptr, old_usize, usize);
} else {
if (config_stats) {
usize = s2u(size);
}
ret = iralloc(tsd, extent, ptr, old_usize, size, 0,
false);
ret = iralloc(tsd, ptr, old_usize, size, 0, false);
}
tsdn = tsd_tsdn(tsd);
} else {
@ -2314,47 +2297,46 @@ je_mallocx(size_t size, int flags) {
}
static void *
irallocx_prof_sample(tsdn_t *tsdn, extent_t *extent, void *old_ptr,
size_t old_usize, size_t usize, size_t alignment, bool zero,
tcache_t *tcache, arena_t *arena, prof_tctx_t *tctx) {
irallocx_prof_sample(tsdn_t *tsdn, void *old_ptr, size_t old_usize,
size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena,
prof_tctx_t *tctx) {
void *p;
if (tctx == NULL) {
return NULL;
}
if (usize <= SMALL_MAXCLASS) {
p = iralloct(tsdn, extent, old_ptr, old_usize, LARGE_MINCLASS,
p = iralloct(tsdn, old_ptr, old_usize, LARGE_MINCLASS,
alignment, zero, tcache, arena);
if (p == NULL) {
return NULL;
}
arena_prof_promote(tsdn, iealloc(tsdn, p), p, usize);
arena_prof_promote(tsdn, p, usize);
} else {
p = iralloct(tsdn, extent, old_ptr, old_usize, usize, alignment,
zero, tcache, arena);
p = iralloct(tsdn, old_ptr, old_usize, usize, alignment, zero,
tcache, arena);
}
return p;
}
JEMALLOC_ALWAYS_INLINE_C void *
irallocx_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize,
size_t size, size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
arena_t *arena) {
void *p;
extent_t *extent;
bool prof_active;
prof_tctx_t *old_tctx, *tctx;
prof_active = prof_active_get_unlocked();
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_extent, old_ptr);
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr);
tctx = prof_alloc_prep(tsd, *usize, prof_active, false);
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
p = irallocx_prof_sample(tsd_tsdn(tsd), old_extent, old_ptr,
old_usize, *usize, alignment, zero, tcache, arena, tctx);
p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize,
*usize, alignment, zero, tcache, arena, tctx);
} else {
p = iralloct(tsd_tsdn(tsd), old_extent, old_ptr, old_usize,
size, alignment, zero, tcache, arena);
p = iralloct(tsd_tsdn(tsd), old_ptr, old_usize, size, alignment,
zero, tcache, arena);
}
if (unlikely(p == NULL)) {
prof_alloc_rollback(tsd, tctx, false);
@ -2370,13 +2352,10 @@ irallocx_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize,
* be the same as the current usize because of in-place large
* reallocation. Therefore, query the actual value of usize.
*/
extent = old_extent;
*usize = isalloc(tsd_tsdn(tsd), p);
} else {
extent = iealloc(tsd_tsdn(tsd), p);
}
prof_realloc(tsd, extent, p, *usize, tctx, prof_active, false,
old_extent, old_ptr, old_usize, old_tctx);
prof_realloc(tsd, p, *usize, tctx, prof_active, false, old_ptr,
old_usize, old_tctx);
return p;
}
@ -2387,7 +2366,6 @@ JEMALLOC_ALLOC_SIZE(2)
je_rallocx(void *ptr, size_t size, int flags) {
void *p;
tsd_t *tsd;
extent_t *extent;
size_t usize;
size_t old_usize;
size_t alignment = MALLOCX_ALIGN_GET(flags);
@ -2400,7 +2378,6 @@ je_rallocx(void *ptr, size_t size, int flags) {
assert(malloc_initialized() || IS_INITIALIZER);
tsd = tsd_fetch();
witness_assert_lockless(tsd_tsdn(tsd));
extent = iealloc(tsd_tsdn(tsd), ptr);
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
@ -2429,14 +2406,14 @@ je_rallocx(void *ptr, size_t size, int flags) {
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
goto label_oom;
}
p = irallocx_prof(tsd, extent, ptr, old_usize, size, alignment,
&usize, zero, tcache, arena);
p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize,
zero, tcache, arena);
if (unlikely(p == NULL)) {
goto label_oom;
}
} else {
p = iralloct(tsd_tsdn(tsd), extent, ptr, old_usize, size,
alignment, zero, tcache, arena);
p = iralloct(tsd_tsdn(tsd), ptr, old_usize, size, alignment,
zero, tcache, arena);
if (unlikely(p == NULL)) {
goto label_oom;
}
@ -2464,12 +2441,11 @@ label_oom:
}
JEMALLOC_ALWAYS_INLINE_C size_t
ixallocx_helper(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t old_usize,
size_t size, size_t extra, size_t alignment, bool zero) {
ixallocx_helper(tsdn_t *tsdn, void *ptr, size_t old_usize, size_t size,
size_t extra, size_t alignment, bool zero) {
size_t usize;
if (ixalloc(tsdn, extent, ptr, old_usize, size, extra, alignment,
zero)) {
if (ixalloc(tsdn, ptr, old_usize, size, extra, alignment, zero)) {
return old_usize;
}
usize = isalloc(tsdn, ptr);
@ -2478,29 +2454,28 @@ ixallocx_helper(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t old_usize,
}
static size_t
ixallocx_prof_sample(tsdn_t *tsdn, extent_t *extent, void *ptr,
size_t old_usize, size_t size, size_t extra, size_t alignment, bool zero,
prof_tctx_t *tctx) {
ixallocx_prof_sample(tsdn_t *tsdn, void *ptr, size_t old_usize, size_t size,
size_t extra, size_t alignment, bool zero, prof_tctx_t *tctx) {
size_t usize;
if (tctx == NULL) {
return old_usize;
}
usize = ixallocx_helper(tsdn, extent, ptr, old_usize, size, extra,
alignment, zero);
usize = ixallocx_helper(tsdn, ptr, old_usize, size, extra, alignment,
zero);
return usize;
}
JEMALLOC_ALWAYS_INLINE_C size_t
ixallocx_prof(tsd_t *tsd, extent_t *extent, void *ptr, size_t old_usize,
size_t size, size_t extra, size_t alignment, bool zero) {
ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
size_t extra, size_t alignment, bool zero) {
size_t usize_max, usize;
bool prof_active;
prof_tctx_t *old_tctx, *tctx;
prof_active = prof_active_get_unlocked();
old_tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
old_tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
/*
* usize isn't knowable before ixalloc() returns when extra is non-zero.
* Therefore, compute its maximum possible value and use that in
@ -2525,18 +2500,18 @@ ixallocx_prof(tsd_t *tsd, extent_t *extent, void *ptr, size_t old_usize,
tctx = prof_alloc_prep(tsd, usize_max, prof_active, false);
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
usize = ixallocx_prof_sample(tsd_tsdn(tsd), extent, ptr,
old_usize, size, extra, alignment, zero, tctx);
usize = ixallocx_prof_sample(tsd_tsdn(tsd), ptr, old_usize,
size, extra, alignment, zero, tctx);
} else {
usize = ixallocx_helper(tsd_tsdn(tsd), extent, ptr, old_usize,
size, extra, alignment, zero);
usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
extra, alignment, zero);
}
if (usize == old_usize) {
prof_alloc_rollback(tsd, tctx, false);
return usize;
}
prof_realloc(tsd, extent, ptr, usize, tctx, prof_active, false, extent,
ptr, old_usize, old_tctx);
prof_realloc(tsd, ptr, usize, tctx, prof_active, false, ptr, old_usize,
old_tctx);
return usize;
}
@ -2544,7 +2519,6 @@ ixallocx_prof(tsd_t *tsd, extent_t *extent, void *ptr, size_t old_usize,
JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW
je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
tsd_t *tsd;
extent_t *extent;
size_t usize, old_usize;
size_t alignment = MALLOCX_ALIGN_GET(flags);
bool zero = flags & MALLOCX_ZERO;
@ -2555,7 +2529,6 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
assert(malloc_initialized() || IS_INITIALIZER);
tsd = tsd_fetch();
witness_assert_lockless(tsd_tsdn(tsd));
extent = iealloc(tsd_tsdn(tsd), ptr);
old_usize = isalloc(tsd_tsdn(tsd), ptr);
@ -2577,11 +2550,11 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags) {
}
if (config_prof && opt_prof) {
usize = ixallocx_prof(tsd, extent, ptr, old_usize, size, extra,
usize = ixallocx_prof(tsd, ptr, old_usize, size, extra,
alignment, zero);
} else {
usize = ixallocx_helper(tsd_tsdn(tsd), extent, ptr, old_usize,
size, extra, alignment, zero);
usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
extra, alignment, zero);
}
if (unlikely(usize == old_usize)) {
goto label_not_resized;

View File

@ -222,9 +222,9 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) {
}
void
prof_malloc_sample_object(tsdn_t *tsdn, extent_t *extent, const void *ptr,
size_t usize, prof_tctx_t *tctx) {
prof_tctx_set(tsdn, extent, ptr, usize, tctx);
prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
prof_tctx_t *tctx) {
prof_tctx_set(tsdn, ptr, usize, tctx);
malloc_mutex_lock(tsdn, tctx->tdata->lock);
tctx->cnts.curobjs++;

View File

@ -4,7 +4,6 @@ TEST_BEGIN(test_prof_realloc) {
tsdn_t *tsdn;
int flags;
void *p, *q;
extent_t *extent_p, *extent_q;
prof_tctx_t *tctx_p, *tctx_q;
uint64_t curobjs_0, curobjs_1, curobjs_2, curobjs_3;
@ -16,9 +15,7 @@ TEST_BEGIN(test_prof_realloc) {
prof_cnt_all(&curobjs_0, NULL, NULL, NULL);
p = mallocx(1024, flags);
assert_ptr_not_null(p, "Unexpected mallocx() failure");
extent_p = iealloc(tsdn, p);
assert_ptr_not_null(extent_p, "Unexpected iealloc() failure");
tctx_p = prof_tctx_get(tsdn, extent_p, p);
tctx_p = prof_tctx_get(tsdn, p);
assert_ptr_ne(tctx_p, (prof_tctx_t *)(uintptr_t)1U,
"Expected valid tctx");
prof_cnt_all(&curobjs_1, NULL, NULL, NULL);
@ -28,9 +25,7 @@ TEST_BEGIN(test_prof_realloc) {
q = rallocx(p, 2048, flags);
assert_ptr_ne(p, q, "Expected move");
assert_ptr_not_null(p, "Unexpected rmallocx() failure");
extent_q = iealloc(tsdn, q);
assert_ptr_not_null(extent_q, "Unexpected iealloc() failure");
tctx_q = prof_tctx_get(tsdn, extent_q, q);
tctx_q = prof_tctx_get(tsdn, q);
assert_ptr_ne(tctx_q, (prof_tctx_t *)(uintptr_t)1U,
"Expected valid tctx");
prof_cnt_all(&curobjs_2, NULL, NULL, NULL);