Remove extent dereferences from the deallocation fast paths.
This commit is contained in:
parent
4f341412e5
commit
51a2ec92a1
@ -16,10 +16,9 @@ void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
|
||||
arena_t *arena_aalloc(tsdn_t *tsdn, const void *ptr);
|
||||
size_t arena_salloc(tsdn_t *tsdn, const void *ptr);
|
||||
size_t arena_vsalloc(tsdn_t *tsdn, const void *ptr);
|
||||
void arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
tcache_t *tcache, bool slow_path);
|
||||
void arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path);
|
||||
void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path);
|
||||
void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||
@ -163,24 +162,39 @@ arena_vsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
bool slow_path) {
|
||||
arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
|
||||
szind_t szind = extent_szind_get(extent);
|
||||
if (likely(extent_slab_get(extent))) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||
|
||||
szind_t szind;
|
||||
bool slab;
|
||||
rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr,
|
||||
true, &szind, &slab);
|
||||
|
||||
if (config_debug) {
|
||||
extent_t *extent = rtree_extent_read(tsdn, &extents_rtree,
|
||||
rtree_ctx, (uintptr_t)ptr, true);
|
||||
assert(szind == extent_szind_get(extent));
|
||||
assert(slab == extent_slab_get(extent));
|
||||
}
|
||||
|
||||
if (likely(slab)) {
|
||||
/* Small allocation. */
|
||||
if (likely(tcache != NULL)) {
|
||||
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
|
||||
slow_path);
|
||||
} else {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
arena_dalloc_small(tsdn, extent_arena_get(extent),
|
||||
extent, ptr);
|
||||
}
|
||||
} else {
|
||||
if (likely(tcache != NULL) && szind < nhbins) {
|
||||
if (config_prof && unlikely(szind < NBINS)) {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
arena_dalloc_promoted(tsdn, extent, ptr,
|
||||
tcache, slow_path);
|
||||
} else {
|
||||
@ -188,30 +202,62 @@ arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
ptr, szind, slow_path);
|
||||
}
|
||||
} else {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
large_dalloc(tsdn, extent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path) {
|
||||
arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path) {
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
|
||||
szind_t szind = size2index(size);
|
||||
if (likely(extent_slab_get(extent))) {
|
||||
szind_t szind;
|
||||
bool slab;
|
||||
if (!config_prof || !opt_prof) {
|
||||
/*
|
||||
* There is no risk of being confused by a promoted sampled
|
||||
* object, so base szind and slab on the given size.
|
||||
*/
|
||||
szind = size2index(size);
|
||||
slab = (szind < NBINS);
|
||||
}
|
||||
|
||||
if ((config_prof && opt_prof) || config_debug) {
|
||||
rtree_ctx_t rtree_ctx_fallback;
|
||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
|
||||
&rtree_ctx_fallback);
|
||||
|
||||
rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx,
|
||||
(uintptr_t)ptr, true, &szind, &slab);
|
||||
|
||||
assert(szind == size2index(size));
|
||||
assert((config_prof && opt_prof) || slab == (szind < NBINS));
|
||||
|
||||
if (config_debug) {
|
||||
extent_t *extent = rtree_extent_read(tsdn,
|
||||
&extents_rtree, rtree_ctx, (uintptr_t)ptr, true);
|
||||
assert(szind == extent_szind_get(extent));
|
||||
assert(slab == extent_slab_get(extent));
|
||||
}
|
||||
}
|
||||
|
||||
if (likely(slab)) {
|
||||
/* Small allocation. */
|
||||
if (likely(tcache != NULL)) {
|
||||
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
|
||||
slow_path);
|
||||
} else {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
arena_dalloc_small(tsdn, extent_arena_get(extent),
|
||||
extent, ptr);
|
||||
}
|
||||
} else {
|
||||
if (likely(tcache != NULL) && szind < nhbins) {
|
||||
if (config_prof && unlikely(szind < NBINS)) {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
arena_dalloc_promoted(tsdn, extent, ptr,
|
||||
tcache, slow_path);
|
||||
} else {
|
||||
@ -219,6 +265,7 @@ arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
szind, slow_path);
|
||||
}
|
||||
} else {
|
||||
extent_t *extent = iealloc(tsdn, ptr);
|
||||
large_dalloc(tsdn, extent);
|
||||
}
|
||||
}
|
||||
|
@ -1019,14 +1019,14 @@ void *ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||
tcache_t *tcache, arena_t *arena);
|
||||
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
|
||||
size_t ivsalloc(tsdn_t *tsdn, const void *ptr);
|
||||
void idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
bool is_internal, bool slow_path);
|
||||
void idalloc(tsd_t *tsd, extent_t *extent, void *ptr);
|
||||
void isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path);
|
||||
void *iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
tcache_t *tcache, arena_t *arena);
|
||||
void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_internal,
|
||||
bool slow_path);
|
||||
void idalloc(tsd_t *tsd, void *ptr);
|
||||
void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path);
|
||||
void *iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
arena_t *arena);
|
||||
void *iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
|
||||
void *iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize,
|
||||
@ -1112,8 +1112,8 @@ ivsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
bool is_internal, bool slow_path) {
|
||||
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_internal,
|
||||
bool slow_path) {
|
||||
assert(ptr != NULL);
|
||||
assert(!is_internal || tcache == NULL);
|
||||
assert(!is_internal || arena_ind_get(iaalloc(tsdn, ptr)) <
|
||||
@ -1123,25 +1123,24 @@ idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
arena_internal_sub(iaalloc(tsdn, ptr), isalloc(tsdn, ptr));
|
||||
}
|
||||
|
||||
arena_dalloc(tsdn, extent, ptr, tcache, slow_path);
|
||||
arena_dalloc(tsdn, ptr, tcache, slow_path);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloc(tsd_t *tsd, extent_t *extent, void *ptr) {
|
||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache_get(tsd, false), false,
|
||||
true);
|
||||
idalloc(tsd_t *tsd, void *ptr) {
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd, false), false, true);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
isdalloct(tsdn_t *tsdn, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path) {
|
||||
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
||||
arena_sdalloc(tsdn, extent, ptr, size, tcache, slow_path);
|
||||
arena_sdalloc(tsdn, ptr, size, tcache, slow_path);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
arena_t *arena) {
|
||||
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
||||
void *p;
|
||||
@ -1172,7 +1171,7 @@ iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
*/
|
||||
copysize = (size < oldsize) ? size : oldsize;
|
||||
memcpy(p, ptr, copysize);
|
||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
||||
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||
return p;
|
||||
}
|
||||
|
||||
@ -1189,8 +1188,8 @@ iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||
* Existing object alignment is inadequate; allocate new space
|
||||
* and copy.
|
||||
*/
|
||||
return iralloct_realign(tsdn, extent, ptr, oldsize, size, 0,
|
||||
alignment, zero, tcache, arena);
|
||||
return iralloct_realign(tsdn, ptr, oldsize, size, 0, alignment,
|
||||
zero, tcache, arena);
|
||||
}
|
||||
|
||||
return arena_ralloc(tsdn, arena, extent, ptr, oldsize, size, alignment,
|
||||
|
@ -1719,7 +1719,7 @@ arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
|
||||
|
||||
copysize = (usize < oldsize) ? usize : oldsize;
|
||||
memcpy(ret, ptr, copysize);
|
||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
||||
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
15
src/ckh.c
15
src/ckh.c
@ -282,14 +282,12 @@ ckh_grow(tsd_t *tsd, ckh_t *ckh) {
|
||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||
|
||||
if (!ckh_rebuild(ckh, tab)) {
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tab),
|
||||
tab, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tab, NULL, true, true);
|
||||
break;
|
||||
}
|
||||
|
||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab),
|
||||
ckh->tab, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, true, true);
|
||||
ckh->tab = tab;
|
||||
ckh->lg_curbuckets = lg_prevbuckets;
|
||||
}
|
||||
@ -331,8 +329,7 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh) {
|
||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||
|
||||
if (!ckh_rebuild(ckh, tab)) {
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tab), tab, NULL,
|
||||
true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tab, NULL, true, true);
|
||||
#ifdef CKH_COUNT
|
||||
ckh->nshrinks++;
|
||||
#endif
|
||||
@ -340,8 +337,7 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh) {
|
||||
}
|
||||
|
||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab), ckh->tab,
|
||||
NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, true, true);
|
||||
ckh->tab = tab;
|
||||
ckh->lg_curbuckets = lg_prevbuckets;
|
||||
#ifdef CKH_COUNT
|
||||
@ -422,8 +418,7 @@ ckh_delete(tsd_t *tsd, ckh_t *ckh) {
|
||||
(unsigned long long)ckh->nrelocs);
|
||||
#endif
|
||||
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab), ckh->tab,
|
||||
NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, true, true);
|
||||
if (config_debug) {
|
||||
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
||||
}
|
||||
|
@ -310,8 +310,8 @@ a0ialloc(size_t size, bool zero, bool is_internal) {
|
||||
}
|
||||
|
||||
static void
|
||||
a0idalloc(extent_t *extent, void *ptr, bool is_internal) {
|
||||
idalloctm(TSDN_NULL, extent, ptr, false, is_internal, true);
|
||||
a0idalloc(void *ptr, bool is_internal) {
|
||||
idalloctm(TSDN_NULL, ptr, false, is_internal, true);
|
||||
}
|
||||
|
||||
void *
|
||||
@ -321,7 +321,7 @@ a0malloc(size_t size) {
|
||||
|
||||
void
|
||||
a0dalloc(void *ptr) {
|
||||
a0idalloc(iealloc(NULL, ptr), ptr, true);
|
||||
a0idalloc(ptr, true);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -358,7 +358,7 @@ bootstrap_free(void *ptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
a0idalloc(iealloc(NULL, ptr), ptr, false);
|
||||
a0idalloc(ptr, false);
|
||||
}
|
||||
|
||||
void
|
||||
@ -2008,17 +2008,15 @@ irealloc_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize,
|
||||
|
||||
JEMALLOC_INLINE_C void
|
||||
ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||
extent_t *extent;
|
||||
size_t usize;
|
||||
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||
size_t usize;
|
||||
if (config_prof && opt_prof) {
|
||||
usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||
prof_free(tsd, extent, ptr, usize);
|
||||
} else if (config_stats) {
|
||||
usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||
@ -2028,21 +2026,21 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||
}
|
||||
|
||||
if (likely(!slow_path)) {
|
||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache, false, false);
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, false, false);
|
||||
} else {
|
||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache, false, true);
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, false, true);
|
||||
}
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE_C void
|
||||
isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache,
|
||||
bool slow_path) {
|
||||
isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||
prof_free(tsd, extent, ptr, usize);
|
||||
}
|
||||
if (config_stats) {
|
||||
@ -2050,9 +2048,9 @@ isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache,
|
||||
}
|
||||
|
||||
if (likely(!slow_path)) {
|
||||
isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, false);
|
||||
isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, false);
|
||||
} else {
|
||||
isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, true);
|
||||
isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2667,14 +2665,12 @@ inallocx(tsdn_t *tsdn, size_t size, int flags) {
|
||||
JEMALLOC_EXPORT void JEMALLOC_NOTHROW
|
||||
je_sdallocx(void *ptr, size_t size, int flags) {
|
||||
tsd_t *tsd;
|
||||
extent_t *extent;
|
||||
size_t usize;
|
||||
tcache_t *tcache;
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
tsd = tsd_fetch();
|
||||
extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||
usize = inallocx(tsd_tsdn(tsd), size, flags);
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||
|
||||
@ -2691,9 +2687,9 @@ je_sdallocx(void *ptr, size_t size, int flags) {
|
||||
|
||||
UTRACE(ptr, 0, 0);
|
||||
if (likely(!malloc_slow)) {
|
||||
isfree(tsd, extent, ptr, usize, tcache, false);
|
||||
isfree(tsd, ptr, usize, tcache, false);
|
||||
} else {
|
||||
isfree(tsd, extent, ptr, usize, tcache, true);
|
||||
isfree(tsd, ptr, usize, tcache, true);
|
||||
}
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
}
|
||||
|
@ -303,8 +303,7 @@ large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
||||
|
||||
size_t copysize = (usize < oldusize) ? usize : oldusize;
|
||||
memcpy(ret, extent_addr_get(extent), copysize);
|
||||
isdalloct(tsdn, extent, extent_addr_get(extent), oldusize, tcache,
|
||||
true);
|
||||
isdalloct(tsdn, extent_addr_get(extent), oldusize, tcache, true);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
33
src/prof.c
33
src/prof.c
@ -582,8 +582,7 @@ prof_gctx_try_destroy(tsd_t *tsd, prof_tdata_t *tdata_self, prof_gctx_t *gctx,
|
||||
prof_leave(tsd, tdata_self);
|
||||
/* Destroy gctx. */
|
||||
malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), gctx), gctx,
|
||||
NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), gctx, NULL, true, true);
|
||||
} else {
|
||||
/*
|
||||
* Compensate for increment in prof_tctx_destroy() or
|
||||
@ -697,8 +696,7 @@ prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx) {
|
||||
}
|
||||
|
||||
if (destroy_tctx) {
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tctx), tctx,
|
||||
NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tctx, NULL, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -730,8 +728,8 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata,
|
||||
if (ckh_insert(tsd, &bt2gctx, btkey.v, gctx.v)) {
|
||||
/* OOM. */
|
||||
prof_leave(tsd, tdata);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||
gctx.v), gctx.v, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), gctx.v, NULL, true,
|
||||
true);
|
||||
return true;
|
||||
}
|
||||
new_gctx = true;
|
||||
@ -755,8 +753,7 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata,
|
||||
|
||||
if (tgctx.v != NULL) {
|
||||
/* Lost race to insert. */
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||
tgctx.v), tgctx.v, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tgctx.v, NULL, true, true);
|
||||
}
|
||||
}
|
||||
prof_leave(tsd, tdata);
|
||||
@ -828,8 +825,7 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt) {
|
||||
if (new_gctx) {
|
||||
prof_gctx_try_destroy(tsd, tdata, gctx, tdata);
|
||||
}
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ret.v),
|
||||
ret.v, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), ret.v, NULL, true, true);
|
||||
return NULL;
|
||||
}
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock);
|
||||
@ -1240,9 +1236,8 @@ prof_gctx_finish(tsd_t *tsd, prof_gctx_tree_t *gctxs) {
|
||||
to_destroy);
|
||||
tctx_tree_remove(&gctx->tctxs,
|
||||
to_destroy);
|
||||
idalloctm(tsd_tsdn(tsd),
|
||||
iealloc(tsd_tsdn(tsd), to_destroy),
|
||||
to_destroy, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), to_destroy,
|
||||
NULL, true, true);
|
||||
} else {
|
||||
next = NULL;
|
||||
}
|
||||
@ -1910,8 +1905,7 @@ prof_tdata_init_impl(tsd_t *tsd, uint64_t thr_uid, uint64_t thr_discrim,
|
||||
|
||||
if (ckh_new(tsd, &tdata->bt2tctx, PROF_CKH_MINITEMS, prof_bt_hash,
|
||||
prof_bt_keycomp)) {
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tdata), tdata,
|
||||
NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tdata, NULL, true, true);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -1967,12 +1961,10 @@ prof_tdata_destroy_locked(tsd_t *tsd, prof_tdata_t *tdata,
|
||||
assert(prof_tdata_should_destroy_unlocked(tdata, even_if_attached));
|
||||
|
||||
if (tdata->thread_name != NULL) {
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||
tdata->thread_name), tdata->thread_name, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tdata->thread_name, NULL, true, true);
|
||||
}
|
||||
ckh_delete(tsd, &tdata->bt2tctx);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tdata), tdata, NULL,
|
||||
true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tdata, NULL, true, true);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -2169,8 +2161,7 @@ prof_thread_name_set(tsd_t *tsd, const char *thread_name) {
|
||||
}
|
||||
|
||||
if (tdata->thread_name != NULL) {
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
||||
tdata->thread_name), tdata->thread_name, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tdata->thread_name, NULL, true, true);
|
||||
tdata->thread_name = NULL;
|
||||
}
|
||||
if (strlen(s) > 0) {
|
||||
|
@ -389,8 +389,7 @@ tcache_destroy(tsd_t *tsd, tcache_t *tcache) {
|
||||
prof_idump(tsd_tsdn(tsd));
|
||||
}
|
||||
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tcache), tcache, NULL,
|
||||
true, true);
|
||||
idalloctm(tsd_tsdn(tsd), tcache, NULL, true, true);
|
||||
}
|
||||
|
||||
void
|
||||
|
Loading…
Reference in New Issue
Block a user