Remove extent dereferences from the deallocation fast paths.
This commit is contained in:
parent
4f341412e5
commit
51a2ec92a1
@ -16,10 +16,9 @@ void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
|
|||||||
arena_t *arena_aalloc(tsdn_t *tsdn, const void *ptr);
|
arena_t *arena_aalloc(tsdn_t *tsdn, const void *ptr);
|
||||||
size_t arena_salloc(tsdn_t *tsdn, const void *ptr);
|
size_t arena_salloc(tsdn_t *tsdn, const void *ptr);
|
||||||
size_t arena_vsalloc(tsdn_t *tsdn, const void *ptr);
|
size_t arena_vsalloc(tsdn_t *tsdn, const void *ptr);
|
||||||
void arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path);
|
||||||
tcache_t *tcache, bool slow_path);
|
void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||||
void arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
bool slow_path);
|
||||||
tcache_t *tcache, bool slow_path);
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||||
@ -163,24 +162,39 @@ arena_vsalloc(tsdn_t *tsdn, const void *ptr) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||||
bool slow_path) {
|
|
||||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
szind_t szind = extent_szind_get(extent);
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
if (likely(extent_slab_get(extent))) {
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
|
|
||||||
|
szind_t szind;
|
||||||
|
bool slab;
|
||||||
|
rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx, (uintptr_t)ptr,
|
||||||
|
true, &szind, &slab);
|
||||||
|
|
||||||
|
if (config_debug) {
|
||||||
|
extent_t *extent = rtree_extent_read(tsdn, &extents_rtree,
|
||||||
|
rtree_ctx, (uintptr_t)ptr, true);
|
||||||
|
assert(szind == extent_szind_get(extent));
|
||||||
|
assert(slab == extent_slab_get(extent));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (likely(slab)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (likely(tcache != NULL)) {
|
if (likely(tcache != NULL)) {
|
||||||
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
|
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
|
||||||
slow_path);
|
slow_path);
|
||||||
} else {
|
} else {
|
||||||
|
extent_t *extent = iealloc(tsdn, ptr);
|
||||||
arena_dalloc_small(tsdn, extent_arena_get(extent),
|
arena_dalloc_small(tsdn, extent_arena_get(extent),
|
||||||
extent, ptr);
|
extent, ptr);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (likely(tcache != NULL) && szind < nhbins) {
|
if (likely(tcache != NULL) && szind < nhbins) {
|
||||||
if (config_prof && unlikely(szind < NBINS)) {
|
if (config_prof && unlikely(szind < NBINS)) {
|
||||||
|
extent_t *extent = iealloc(tsdn, ptr);
|
||||||
arena_dalloc_promoted(tsdn, extent, ptr,
|
arena_dalloc_promoted(tsdn, extent, ptr,
|
||||||
tcache, slow_path);
|
tcache, slow_path);
|
||||||
} else {
|
} else {
|
||||||
@ -188,30 +202,62 @@ arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
|||||||
ptr, szind, slow_path);
|
ptr, szind, slow_path);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
extent_t *extent = iealloc(tsdn, ptr);
|
||||||
large_dalloc(tsdn, extent);
|
large_dalloc(tsdn, extent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||||
tcache_t *tcache, bool slow_path) {
|
bool slow_path) {
|
||||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
szind_t szind = size2index(size);
|
szind_t szind;
|
||||||
if (likely(extent_slab_get(extent))) {
|
bool slab;
|
||||||
|
if (!config_prof || !opt_prof) {
|
||||||
|
/*
|
||||||
|
* There is no risk of being confused by a promoted sampled
|
||||||
|
* object, so base szind and slab on the given size.
|
||||||
|
*/
|
||||||
|
szind = size2index(size);
|
||||||
|
slab = (szind < NBINS);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((config_prof && opt_prof) || config_debug) {
|
||||||
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
|
||||||
|
&rtree_ctx_fallback);
|
||||||
|
|
||||||
|
rtree_szind_slab_read(tsdn, &extents_rtree, rtree_ctx,
|
||||||
|
(uintptr_t)ptr, true, &szind, &slab);
|
||||||
|
|
||||||
|
assert(szind == size2index(size));
|
||||||
|
assert((config_prof && opt_prof) || slab == (szind < NBINS));
|
||||||
|
|
||||||
|
if (config_debug) {
|
||||||
|
extent_t *extent = rtree_extent_read(tsdn,
|
||||||
|
&extents_rtree, rtree_ctx, (uintptr_t)ptr, true);
|
||||||
|
assert(szind == extent_szind_get(extent));
|
||||||
|
assert(slab == extent_slab_get(extent));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (likely(slab)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (likely(tcache != NULL)) {
|
if (likely(tcache != NULL)) {
|
||||||
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
|
tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind,
|
||||||
slow_path);
|
slow_path);
|
||||||
} else {
|
} else {
|
||||||
|
extent_t *extent = iealloc(tsdn, ptr);
|
||||||
arena_dalloc_small(tsdn, extent_arena_get(extent),
|
arena_dalloc_small(tsdn, extent_arena_get(extent),
|
||||||
extent, ptr);
|
extent, ptr);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (likely(tcache != NULL) && szind < nhbins) {
|
if (likely(tcache != NULL) && szind < nhbins) {
|
||||||
if (config_prof && unlikely(szind < NBINS)) {
|
if (config_prof && unlikely(szind < NBINS)) {
|
||||||
|
extent_t *extent = iealloc(tsdn, ptr);
|
||||||
arena_dalloc_promoted(tsdn, extent, ptr,
|
arena_dalloc_promoted(tsdn, extent, ptr,
|
||||||
tcache, slow_path);
|
tcache, slow_path);
|
||||||
} else {
|
} else {
|
||||||
@ -219,6 +265,7 @@ arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
|||||||
szind, slow_path);
|
szind, slow_path);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
extent_t *extent = iealloc(tsdn, ptr);
|
||||||
large_dalloc(tsdn, extent);
|
large_dalloc(tsdn, extent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1019,14 +1019,14 @@ void *ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
|||||||
tcache_t *tcache, arena_t *arena);
|
tcache_t *tcache, arena_t *arena);
|
||||||
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
|
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
|
||||||
size_t ivsalloc(tsdn_t *tsdn, const void *ptr);
|
size_t ivsalloc(tsdn_t *tsdn, const void *ptr);
|
||||||
void idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_internal,
|
||||||
bool is_internal, bool slow_path);
|
bool slow_path);
|
||||||
void idalloc(tsd_t *tsd, extent_t *extent, void *ptr);
|
void idalloc(tsd_t *tsd, void *ptr);
|
||||||
void isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||||
tcache_t *tcache, bool slow_path);
|
bool slow_path);
|
||||||
void *iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
void *iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||||
size_t oldsize, size_t size, size_t extra, size_t alignment, bool zero,
|
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||||
tcache_t *tcache, arena_t *arena);
|
arena_t *arena);
|
||||||
void *iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
void *iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||||
size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
|
size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
|
||||||
void *iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize,
|
void *iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize,
|
||||||
@ -1112,8 +1112,8 @@ ivsalloc(tsdn_t *tsdn, const void *ptr) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_internal,
|
||||||
bool is_internal, bool slow_path) {
|
bool slow_path) {
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(!is_internal || tcache == NULL);
|
assert(!is_internal || tcache == NULL);
|
||||||
assert(!is_internal || arena_ind_get(iaalloc(tsdn, ptr)) <
|
assert(!is_internal || arena_ind_get(iaalloc(tsdn, ptr)) <
|
||||||
@ -1123,25 +1123,24 @@ idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
|||||||
arena_internal_sub(iaalloc(tsdn, ptr), isalloc(tsdn, ptr));
|
arena_internal_sub(iaalloc(tsdn, ptr), isalloc(tsdn, ptr));
|
||||||
}
|
}
|
||||||
|
|
||||||
arena_dalloc(tsdn, extent, ptr, tcache, slow_path);
|
arena_dalloc(tsdn, ptr, tcache, slow_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
idalloc(tsd_t *tsd, extent_t *extent, void *ptr) {
|
idalloc(tsd_t *tsd, void *ptr) {
|
||||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache_get(tsd, false), false,
|
idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd, false), false, true);
|
||||||
true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
isdalloct(tsdn_t *tsdn, void *ptr, size_t size,
|
||||||
tcache_t *tcache, bool slow_path) {
|
tcache_t *tcache, bool slow_path) {
|
||||||
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
||||||
arena_sdalloc(tsdn, extent, ptr, size, tcache, slow_path);
|
arena_sdalloc(tsdn, ptr, size, tcache, slow_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||||
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||||
arena_t *arena) {
|
arena_t *arena) {
|
||||||
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
witness_assert_depth_to_rank(tsdn, WITNESS_RANK_CORE, 0);
|
||||||
void *p;
|
void *p;
|
||||||
@ -1172,7 +1171,7 @@ iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
|||||||
*/
|
*/
|
||||||
copysize = (size < oldsize) ? size : oldsize;
|
copysize = (size < oldsize) ? size : oldsize;
|
||||||
memcpy(p, ptr, copysize);
|
memcpy(p, ptr, copysize);
|
||||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1189,8 +1188,8 @@ iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
|||||||
* Existing object alignment is inadequate; allocate new space
|
* Existing object alignment is inadequate; allocate new space
|
||||||
* and copy.
|
* and copy.
|
||||||
*/
|
*/
|
||||||
return iralloct_realign(tsdn, extent, ptr, oldsize, size, 0,
|
return iralloct_realign(tsdn, ptr, oldsize, size, 0, alignment,
|
||||||
alignment, zero, tcache, arena);
|
zero, tcache, arena);
|
||||||
}
|
}
|
||||||
|
|
||||||
return arena_ralloc(tsdn, arena, extent, ptr, oldsize, size, alignment,
|
return arena_ralloc(tsdn, arena, extent, ptr, oldsize, size, alignment,
|
||||||
|
@ -1719,7 +1719,7 @@ arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
|
|||||||
|
|
||||||
copysize = (usize < oldsize) ? usize : oldsize;
|
copysize = (usize < oldsize) ? usize : oldsize;
|
||||||
memcpy(ret, ptr, copysize);
|
memcpy(ret, ptr, copysize);
|
||||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
15
src/ckh.c
15
src/ckh.c
@ -282,14 +282,12 @@ ckh_grow(tsd_t *tsd, ckh_t *ckh) {
|
|||||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||||
|
|
||||||
if (!ckh_rebuild(ckh, tab)) {
|
if (!ckh_rebuild(ckh, tab)) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tab),
|
idalloctm(tsd_tsdn(tsd), tab, NULL, true, true);
|
||||||
tab, NULL, true, true);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab),
|
idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, true, true);
|
||||||
ckh->tab, NULL, true, true);
|
|
||||||
ckh->tab = tab;
|
ckh->tab = tab;
|
||||||
ckh->lg_curbuckets = lg_prevbuckets;
|
ckh->lg_curbuckets = lg_prevbuckets;
|
||||||
}
|
}
|
||||||
@ -331,8 +329,7 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh) {
|
|||||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||||
|
|
||||||
if (!ckh_rebuild(ckh, tab)) {
|
if (!ckh_rebuild(ckh, tab)) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tab), tab, NULL,
|
idalloctm(tsd_tsdn(tsd), tab, NULL, true, true);
|
||||||
true, true);
|
|
||||||
#ifdef CKH_COUNT
|
#ifdef CKH_COUNT
|
||||||
ckh->nshrinks++;
|
ckh->nshrinks++;
|
||||||
#endif
|
#endif
|
||||||
@ -340,8 +337,7 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab), ckh->tab,
|
idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, true, true);
|
||||||
NULL, true, true);
|
|
||||||
ckh->tab = tab;
|
ckh->tab = tab;
|
||||||
ckh->lg_curbuckets = lg_prevbuckets;
|
ckh->lg_curbuckets = lg_prevbuckets;
|
||||||
#ifdef CKH_COUNT
|
#ifdef CKH_COUNT
|
||||||
@ -422,8 +418,7 @@ ckh_delete(tsd_t *tsd, ckh_t *ckh) {
|
|||||||
(unsigned long long)ckh->nrelocs);
|
(unsigned long long)ckh->nrelocs);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab), ckh->tab,
|
idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, true, true);
|
||||||
NULL, true, true);
|
|
||||||
if (config_debug) {
|
if (config_debug) {
|
||||||
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
||||||
}
|
}
|
||||||
|
@ -310,8 +310,8 @@ a0ialloc(size_t size, bool zero, bool is_internal) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
a0idalloc(extent_t *extent, void *ptr, bool is_internal) {
|
a0idalloc(void *ptr, bool is_internal) {
|
||||||
idalloctm(TSDN_NULL, extent, ptr, false, is_internal, true);
|
idalloctm(TSDN_NULL, ptr, false, is_internal, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
@ -321,7 +321,7 @@ a0malloc(size_t size) {
|
|||||||
|
|
||||||
void
|
void
|
||||||
a0dalloc(void *ptr) {
|
a0dalloc(void *ptr) {
|
||||||
a0idalloc(iealloc(NULL, ptr), ptr, true);
|
a0idalloc(ptr, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -358,7 +358,7 @@ bootstrap_free(void *ptr) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
a0idalloc(iealloc(NULL, ptr), ptr, false);
|
a0idalloc(ptr, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
@ -2008,17 +2008,15 @@ irealloc_prof(tsd_t *tsd, extent_t *old_extent, void *old_ptr, size_t old_usize,
|
|||||||
|
|
||||||
JEMALLOC_INLINE_C void
|
JEMALLOC_INLINE_C void
|
||||||
ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
||||||
extent_t *extent;
|
|
||||||
size_t usize;
|
|
||||||
|
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
|
|
||||||
extent = iealloc(tsd_tsdn(tsd), ptr);
|
size_t usize;
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
usize = isalloc(tsd_tsdn(tsd), ptr);
|
usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||||
|
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
prof_free(tsd, extent, ptr, usize);
|
prof_free(tsd, extent, ptr, usize);
|
||||||
} else if (config_stats) {
|
} else if (config_stats) {
|
||||||
usize = isalloc(tsd_tsdn(tsd), ptr);
|
usize = isalloc(tsd_tsdn(tsd), ptr);
|
||||||
@ -2028,21 +2026,21 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (likely(!slow_path)) {
|
if (likely(!slow_path)) {
|
||||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache, false, false);
|
idalloctm(tsd_tsdn(tsd), ptr, tcache, false, false);
|
||||||
} else {
|
} else {
|
||||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache, false, true);
|
idalloctm(tsd_tsdn(tsd), ptr, tcache, false, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE_C void
|
JEMALLOC_INLINE_C void
|
||||||
isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache,
|
isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) {
|
||||||
bool slow_path) {
|
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
|
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
|
extent_t *extent = iealloc(tsd_tsdn(tsd), ptr);
|
||||||
prof_free(tsd, extent, ptr, usize);
|
prof_free(tsd, extent, ptr, usize);
|
||||||
}
|
}
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
@ -2050,9 +2048,9 @@ isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (likely(!slow_path)) {
|
if (likely(!slow_path)) {
|
||||||
isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, false);
|
isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, false);
|
||||||
} else {
|
} else {
|
||||||
isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, true);
|
isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2667,14 +2665,12 @@ inallocx(tsdn_t *tsdn, size_t size, int flags) {
|
|||||||
JEMALLOC_EXPORT void JEMALLOC_NOTHROW
|
JEMALLOC_EXPORT void JEMALLOC_NOTHROW
|
||||||
je_sdallocx(void *ptr, size_t size, int flags) {
|
je_sdallocx(void *ptr, size_t size, int flags) {
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
extent_t *extent;
|
|
||||||
size_t usize;
|
size_t usize;
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(malloc_initialized() || IS_INITIALIZER);
|
assert(malloc_initialized() || IS_INITIALIZER);
|
||||||
tsd = tsd_fetch();
|
tsd = tsd_fetch();
|
||||||
extent = iealloc(tsd_tsdn(tsd), ptr);
|
|
||||||
usize = inallocx(tsd_tsdn(tsd), size, flags);
|
usize = inallocx(tsd_tsdn(tsd), size, flags);
|
||||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
assert(usize == isalloc(tsd_tsdn(tsd), ptr));
|
||||||
|
|
||||||
@ -2691,9 +2687,9 @@ je_sdallocx(void *ptr, size_t size, int flags) {
|
|||||||
|
|
||||||
UTRACE(ptr, 0, 0);
|
UTRACE(ptr, 0, 0);
|
||||||
if (likely(!malloc_slow)) {
|
if (likely(!malloc_slow)) {
|
||||||
isfree(tsd, extent, ptr, usize, tcache, false);
|
isfree(tsd, ptr, usize, tcache, false);
|
||||||
} else {
|
} else {
|
||||||
isfree(tsd, extent, ptr, usize, tcache, true);
|
isfree(tsd, ptr, usize, tcache, true);
|
||||||
}
|
}
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
}
|
}
|
||||||
|
@ -303,8 +303,7 @@ large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
|||||||
|
|
||||||
size_t copysize = (usize < oldusize) ? usize : oldusize;
|
size_t copysize = (usize < oldusize) ? usize : oldusize;
|
||||||
memcpy(ret, extent_addr_get(extent), copysize);
|
memcpy(ret, extent_addr_get(extent), copysize);
|
||||||
isdalloct(tsdn, extent, extent_addr_get(extent), oldusize, tcache,
|
isdalloct(tsdn, extent_addr_get(extent), oldusize, tcache, true);
|
||||||
true);
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
33
src/prof.c
33
src/prof.c
@ -582,8 +582,7 @@ prof_gctx_try_destroy(tsd_t *tsd, prof_tdata_t *tdata_self, prof_gctx_t *gctx,
|
|||||||
prof_leave(tsd, tdata_self);
|
prof_leave(tsd, tdata_self);
|
||||||
/* Destroy gctx. */
|
/* Destroy gctx. */
|
||||||
malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock);
|
malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock);
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), gctx), gctx,
|
idalloctm(tsd_tsdn(tsd), gctx, NULL, true, true);
|
||||||
NULL, true, true);
|
|
||||||
} else {
|
} else {
|
||||||
/*
|
/*
|
||||||
* Compensate for increment in prof_tctx_destroy() or
|
* Compensate for increment in prof_tctx_destroy() or
|
||||||
@ -697,8 +696,7 @@ prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (destroy_tctx) {
|
if (destroy_tctx) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tctx), tctx,
|
idalloctm(tsd_tsdn(tsd), tctx, NULL, true, true);
|
||||||
NULL, true, true);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -730,8 +728,8 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata,
|
|||||||
if (ckh_insert(tsd, &bt2gctx, btkey.v, gctx.v)) {
|
if (ckh_insert(tsd, &bt2gctx, btkey.v, gctx.v)) {
|
||||||
/* OOM. */
|
/* OOM. */
|
||||||
prof_leave(tsd, tdata);
|
prof_leave(tsd, tdata);
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
idalloctm(tsd_tsdn(tsd), gctx.v, NULL, true,
|
||||||
gctx.v), gctx.v, NULL, true, true);
|
true);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
new_gctx = true;
|
new_gctx = true;
|
||||||
@ -755,8 +753,7 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata,
|
|||||||
|
|
||||||
if (tgctx.v != NULL) {
|
if (tgctx.v != NULL) {
|
||||||
/* Lost race to insert. */
|
/* Lost race to insert. */
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
idalloctm(tsd_tsdn(tsd), tgctx.v, NULL, true, true);
|
||||||
tgctx.v), tgctx.v, NULL, true, true);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
prof_leave(tsd, tdata);
|
prof_leave(tsd, tdata);
|
||||||
@ -828,8 +825,7 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt) {
|
|||||||
if (new_gctx) {
|
if (new_gctx) {
|
||||||
prof_gctx_try_destroy(tsd, tdata, gctx, tdata);
|
prof_gctx_try_destroy(tsd, tdata, gctx, tdata);
|
||||||
}
|
}
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ret.v),
|
idalloctm(tsd_tsdn(tsd), ret.v, NULL, true, true);
|
||||||
ret.v, NULL, true, true);
|
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock);
|
malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock);
|
||||||
@ -1240,9 +1236,8 @@ prof_gctx_finish(tsd_t *tsd, prof_gctx_tree_t *gctxs) {
|
|||||||
to_destroy);
|
to_destroy);
|
||||||
tctx_tree_remove(&gctx->tctxs,
|
tctx_tree_remove(&gctx->tctxs,
|
||||||
to_destroy);
|
to_destroy);
|
||||||
idalloctm(tsd_tsdn(tsd),
|
idalloctm(tsd_tsdn(tsd), to_destroy,
|
||||||
iealloc(tsd_tsdn(tsd), to_destroy),
|
NULL, true, true);
|
||||||
to_destroy, NULL, true, true);
|
|
||||||
} else {
|
} else {
|
||||||
next = NULL;
|
next = NULL;
|
||||||
}
|
}
|
||||||
@ -1910,8 +1905,7 @@ prof_tdata_init_impl(tsd_t *tsd, uint64_t thr_uid, uint64_t thr_discrim,
|
|||||||
|
|
||||||
if (ckh_new(tsd, &tdata->bt2tctx, PROF_CKH_MINITEMS, prof_bt_hash,
|
if (ckh_new(tsd, &tdata->bt2tctx, PROF_CKH_MINITEMS, prof_bt_hash,
|
||||||
prof_bt_keycomp)) {
|
prof_bt_keycomp)) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tdata), tdata,
|
idalloctm(tsd_tsdn(tsd), tdata, NULL, true, true);
|
||||||
NULL, true, true);
|
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1967,12 +1961,10 @@ prof_tdata_destroy_locked(tsd_t *tsd, prof_tdata_t *tdata,
|
|||||||
assert(prof_tdata_should_destroy_unlocked(tdata, even_if_attached));
|
assert(prof_tdata_should_destroy_unlocked(tdata, even_if_attached));
|
||||||
|
|
||||||
if (tdata->thread_name != NULL) {
|
if (tdata->thread_name != NULL) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
idalloctm(tsd_tsdn(tsd), tdata->thread_name, NULL, true, true);
|
||||||
tdata->thread_name), tdata->thread_name, NULL, true, true);
|
|
||||||
}
|
}
|
||||||
ckh_delete(tsd, &tdata->bt2tctx);
|
ckh_delete(tsd, &tdata->bt2tctx);
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tdata), tdata, NULL,
|
idalloctm(tsd_tsdn(tsd), tdata, NULL, true, true);
|
||||||
true, true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
@ -2169,8 +2161,7 @@ prof_thread_name_set(tsd_t *tsd, const char *thread_name) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (tdata->thread_name != NULL) {
|
if (tdata->thread_name != NULL) {
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd),
|
idalloctm(tsd_tsdn(tsd), tdata->thread_name, NULL, true, true);
|
||||||
tdata->thread_name), tdata->thread_name, NULL, true, true);
|
|
||||||
tdata->thread_name = NULL;
|
tdata->thread_name = NULL;
|
||||||
}
|
}
|
||||||
if (strlen(s) > 0) {
|
if (strlen(s) > 0) {
|
||||||
|
@ -389,8 +389,7 @@ tcache_destroy(tsd_t *tsd, tcache_t *tcache) {
|
|||||||
prof_idump(tsd_tsdn(tsd));
|
prof_idump(tsd_tsdn(tsd));
|
||||||
}
|
}
|
||||||
|
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tcache), tcache, NULL,
|
idalloctm(tsd_tsdn(tsd), tcache, NULL, true, true);
|
||||||
true, true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
Loading…
Reference in New Issue
Block a user