Apply likely()/unlikely() to allocation/deallocation fast paths.
This commit is contained in:
parent
91566fc079
commit
9c640bfdd4
@ -488,7 +488,8 @@ void arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
|
|||||||
void *arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache);
|
void *arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache);
|
||||||
size_t arena_salloc(const void *ptr, bool demote);
|
size_t arena_salloc(const void *ptr, bool demote);
|
||||||
void arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache);
|
void arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache);
|
||||||
void arena_sdalloc(arena_chunk_t *chunk, void *ptr, size_t size, bool try_tcache);
|
void arena_sdalloc(arena_chunk_t *chunk, void *ptr, size_t size,
|
||||||
|
bool try_tcache);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||||
@ -539,7 +540,7 @@ small_size2bin(size_t size)
|
|||||||
{
|
{
|
||||||
|
|
||||||
assert(size > 0);
|
assert(size > 0);
|
||||||
if (size <= LOOKUP_MAXCLASS)
|
if (likely(size <= LOOKUP_MAXCLASS))
|
||||||
return (small_size2bin_lookup(size));
|
return (small_size2bin_lookup(size));
|
||||||
else
|
else
|
||||||
return (small_size2bin_compute(size));
|
return (small_size2bin_compute(size));
|
||||||
@ -627,7 +628,7 @@ small_s2u(size_t size)
|
|||||||
{
|
{
|
||||||
|
|
||||||
assert(size > 0);
|
assert(size > 0);
|
||||||
if (size <= LOOKUP_MAXCLASS)
|
if (likely(size <= LOOKUP_MAXCLASS))
|
||||||
return (small_s2u_lookup(size));
|
return (small_s2u_lookup(size));
|
||||||
else
|
else
|
||||||
return (small_s2u_compute(size));
|
return (small_s2u_compute(size));
|
||||||
@ -864,7 +865,7 @@ arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes)
|
|||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
if (prof_interval == 0)
|
if (likely(prof_interval == 0))
|
||||||
return (false);
|
return (false);
|
||||||
return (arena_prof_accum_impl(arena, accumbytes));
|
return (arena_prof_accum_impl(arena, accumbytes));
|
||||||
}
|
}
|
||||||
@ -875,7 +876,7 @@ arena_prof_accum(arena_t *arena, uint64_t accumbytes)
|
|||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
if (prof_interval == 0)
|
if (likely(prof_interval == 0))
|
||||||
return (false);
|
return (false);
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -995,8 +996,8 @@ arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info, const void *ptr)
|
|||||||
SIZE_INV(28), SIZE_INV(29), SIZE_INV(30), SIZE_INV(31)
|
SIZE_INV(28), SIZE_INV(29), SIZE_INV(30), SIZE_INV(31)
|
||||||
};
|
};
|
||||||
|
|
||||||
if (interval <= ((sizeof(interval_invs) / sizeof(unsigned)) +
|
if (likely(interval <= ((sizeof(interval_invs) /
|
||||||
2)) {
|
sizeof(unsigned)) + 2))) {
|
||||||
regind = (diff * interval_invs[interval - 3]) >>
|
regind = (diff * interval_invs[interval - 3]) >>
|
||||||
SIZE_INV_SHIFT;
|
SIZE_INV_SHIFT;
|
||||||
} else
|
} else
|
||||||
@ -1025,7 +1026,7 @@ arena_prof_tctx_get(const void *ptr)
|
|||||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
mapbits = arena_mapbits_get(chunk, pageind);
|
mapbits = arena_mapbits_get(chunk, pageind);
|
||||||
assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
|
assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
|
||||||
if ((mapbits & CHUNK_MAP_LARGE) == 0)
|
if (likely((mapbits & CHUNK_MAP_LARGE) == 0))
|
||||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||||
else
|
else
|
||||||
ret = arena_miscelm_get(chunk, pageind)->prof_tctx;
|
ret = arena_miscelm_get(chunk, pageind)->prof_tctx;
|
||||||
@ -1047,7 +1048,7 @@ arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx)
|
|||||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||||
|
|
||||||
if (arena_mapbits_large_get(chunk, pageind) != 0)
|
if (unlikely(arena_mapbits_large_get(chunk, pageind) != 0))
|
||||||
arena_miscelm_get(chunk, pageind)->prof_tctx = tctx;
|
arena_miscelm_get(chunk, pageind)->prof_tctx = tctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1059,8 +1060,9 @@ arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
|||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
assert(size <= arena_maxclass);
|
assert(size <= arena_maxclass);
|
||||||
|
|
||||||
if (size <= SMALL_MAXCLASS) {
|
if (likely(size <= SMALL_MAXCLASS)) {
|
||||||
if (try_tcache && (tcache = tcache_get(true)) != NULL)
|
if (likely(try_tcache) && likely((tcache = tcache_get(true)) !=
|
||||||
|
NULL))
|
||||||
return (tcache_alloc_small(tcache, size, zero));
|
return (tcache_alloc_small(tcache, size, zero));
|
||||||
else {
|
else {
|
||||||
return (arena_malloc_small(choose_arena(arena), size,
|
return (arena_malloc_small(choose_arena(arena), size,
|
||||||
@ -1071,8 +1073,8 @@ arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
|||||||
* Initialize tcache after checking size in order to avoid
|
* Initialize tcache after checking size in order to avoid
|
||||||
* infinite recursion during tcache initialization.
|
* infinite recursion during tcache initialization.
|
||||||
*/
|
*/
|
||||||
if (try_tcache && size <= tcache_maxclass && (tcache =
|
if (try_tcache && size <= tcache_maxclass && likely((tcache =
|
||||||
tcache_get(true)) != NULL)
|
tcache_get(true)) != NULL))
|
||||||
return (tcache_alloc_large(tcache, size, zero));
|
return (tcache_alloc_large(tcache, size, zero));
|
||||||
else {
|
else {
|
||||||
return (arena_malloc_large(choose_arena(arena), size,
|
return (arena_malloc_large(choose_arena(arena), size,
|
||||||
@ -1096,8 +1098,8 @@ arena_salloc(const void *ptr, bool demote)
|
|||||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||||
binind = arena_mapbits_binind_get(chunk, pageind);
|
binind = arena_mapbits_binind_get(chunk, pageind);
|
||||||
if (binind == BININD_INVALID || (config_prof && demote == false &&
|
if (unlikely(binind == BININD_INVALID || (config_prof && demote == false
|
||||||
arena_mapbits_large_get(chunk, pageind) != 0)) {
|
&& arena_mapbits_large_get(chunk, pageind) != 0))) {
|
||||||
/*
|
/*
|
||||||
* Large allocation. In the common case (demote == true), and
|
* Large allocation. In the common case (demote == true), and
|
||||||
* as this is an inline function, most callers will only end up
|
* as this is an inline function, most callers will only end up
|
||||||
@ -1137,10 +1139,12 @@ arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
|||||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||||
mapbits = arena_mapbits_get(chunk, pageind);
|
mapbits = arena_mapbits_get(chunk, pageind);
|
||||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||||
if ((mapbits & CHUNK_MAP_LARGE) == 0) {
|
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (try_tcache && (tcache = tcache_get(false)) != NULL) {
|
if (likely(try_tcache) && likely((tcache = tcache_get(false)) !=
|
||||||
size_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
NULL)) {
|
||||||
|
size_t binind = arena_ptr_small_binind_get(ptr,
|
||||||
|
mapbits);
|
||||||
tcache_dalloc_small(tcache, ptr, binind);
|
tcache_dalloc_small(tcache, ptr, binind);
|
||||||
} else
|
} else
|
||||||
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
||||||
@ -1149,8 +1153,8 @@ arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
|||||||
|
|
||||||
assert(((uintptr_t)ptr & PAGE_MASK) == 0);
|
assert(((uintptr_t)ptr & PAGE_MASK) == 0);
|
||||||
|
|
||||||
if (try_tcache && size <= tcache_maxclass && (tcache =
|
if (try_tcache && size <= tcache_maxclass && likely((tcache =
|
||||||
tcache_get(false)) != NULL) {
|
tcache_get(false)) != NULL)) {
|
||||||
tcache_dalloc_large(tcache, ptr, size);
|
tcache_dalloc_large(tcache, ptr, size);
|
||||||
} else
|
} else
|
||||||
arena_dalloc_large(chunk->arena, chunk, ptr);
|
arena_dalloc_large(chunk->arena, chunk, ptr);
|
||||||
@ -1165,13 +1169,15 @@ arena_sdalloc(arena_chunk_t *chunk, void *ptr, size_t size, bool try_tcache)
|
|||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(CHUNK_ADDR2BASE(ptr) != ptr);
|
assert(CHUNK_ADDR2BASE(ptr) != ptr);
|
||||||
|
|
||||||
if (size < PAGE) {
|
if (likely(size <= SMALL_MAXCLASS)) {
|
||||||
/* Small allocation. */
|
/* Small allocation. */
|
||||||
if (try_tcache && (tcache = tcache_get(false)) != NULL) {
|
if (likely(try_tcache) && likely((tcache = tcache_get(false)) !=
|
||||||
|
NULL)) {
|
||||||
size_t binind = small_size2bin(size);
|
size_t binind = small_size2bin(size);
|
||||||
tcache_dalloc_small(tcache, ptr, binind);
|
tcache_dalloc_small(tcache, ptr, binind);
|
||||||
} else {
|
} else {
|
||||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
|
||||||
|
LG_PAGE;
|
||||||
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -814,7 +814,7 @@ JEMALLOC_ALWAYS_INLINE void
|
|||||||
iqalloc(void *ptr, bool try_tcache)
|
iqalloc(void *ptr, bool try_tcache)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (config_fill && opt_quarantine)
|
if (config_fill && unlikely(opt_quarantine))
|
||||||
quarantine(ptr);
|
quarantine(ptr);
|
||||||
else
|
else
|
||||||
idalloct(ptr, try_tcache);
|
idalloct(ptr, try_tcache);
|
||||||
@ -824,7 +824,7 @@ JEMALLOC_ALWAYS_INLINE void
|
|||||||
isqalloc(void *ptr, size_t size, bool try_tcache)
|
isqalloc(void *ptr, size_t size, bool try_tcache)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (config_fill && opt_quarantine)
|
if (config_fill && unlikely(opt_quarantine))
|
||||||
quarantine(ptr);
|
quarantine(ptr);
|
||||||
else
|
else
|
||||||
isdalloct(ptr, size, try_tcache);
|
isdalloct(ptr, size, try_tcache);
|
||||||
|
@ -400,7 +400,8 @@ prof_alloc_prep(size_t usize, bool update)
|
|||||||
|
|
||||||
assert(usize == s2u(usize));
|
assert(usize == s2u(usize));
|
||||||
|
|
||||||
if (!opt_prof_active || prof_sample_accum_update(usize, update, &tdata))
|
if (!opt_prof_active || likely(prof_sample_accum_update(usize, update,
|
||||||
|
&tdata)))
|
||||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||||
else {
|
else {
|
||||||
bt_init(&bt, tdata->vec);
|
bt_init(&bt, tdata->vec);
|
||||||
@ -419,7 +420,7 @@ prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx)
|
|||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(usize == isalloc(ptr, true));
|
assert(usize == isalloc(ptr, true));
|
||||||
|
|
||||||
if ((uintptr_t)tctx > (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||||
prof_malloc_sample_object(ptr, usize, tctx);
|
prof_malloc_sample_object(ptr, usize, tctx);
|
||||||
else
|
else
|
||||||
prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
|
prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
|
||||||
@ -447,9 +448,9 @@ prof_realloc(const void *ptr, size_t usize, prof_tctx_t *tctx, bool updated,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((uintptr_t)old_tctx > (uintptr_t)1U)
|
if (unlikely((uintptr_t)old_tctx > (uintptr_t)1U))
|
||||||
prof_free_sampled_object(old_usize, old_tctx);
|
prof_free_sampled_object(old_usize, old_tctx);
|
||||||
if ((uintptr_t)tctx > (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||||
prof_malloc_sample_object(ptr, usize, tctx);
|
prof_malloc_sample_object(ptr, usize, tctx);
|
||||||
else
|
else
|
||||||
prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
|
prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
|
||||||
@ -463,7 +464,7 @@ prof_free(const void *ptr, size_t usize)
|
|||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(usize == isalloc(ptr, true));
|
assert(usize == isalloc(ptr, true));
|
||||||
|
|
||||||
if ((uintptr_t)tctx > (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||||
prof_free_sampled_object(usize, tctx);
|
prof_free_sampled_object(usize, tctx);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -236,7 +236,7 @@ tcache_event(tcache_t *tcache)
|
|||||||
|
|
||||||
tcache->ev_cnt++;
|
tcache->ev_cnt++;
|
||||||
assert(tcache->ev_cnt <= TCACHE_GC_INCR);
|
assert(tcache->ev_cnt <= TCACHE_GC_INCR);
|
||||||
if (tcache->ev_cnt == TCACHE_GC_INCR)
|
if (unlikely(tcache->ev_cnt == TCACHE_GC_INCR))
|
||||||
tcache_event_hard(tcache);
|
tcache_event_hard(tcache);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,12 +245,12 @@ tcache_alloc_easy(tcache_bin_t *tbin)
|
|||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
if (tbin->ncached == 0) {
|
if (unlikely(tbin->ncached == 0)) {
|
||||||
tbin->low_water = -1;
|
tbin->low_water = -1;
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
tbin->ncached--;
|
tbin->ncached--;
|
||||||
if ((int)tbin->ncached < tbin->low_water)
|
if (unlikely((int)tbin->ncached < tbin->low_water))
|
||||||
tbin->low_water = tbin->ncached;
|
tbin->low_water = tbin->ncached;
|
||||||
ret = tbin->avail[tbin->ncached];
|
ret = tbin->avail[tbin->ncached];
|
||||||
return (ret);
|
return (ret);
|
||||||
@ -268,23 +268,23 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
|||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
size = small_bin2size(binind);
|
size = small_bin2size(binind);
|
||||||
ret = tcache_alloc_easy(tbin);
|
ret = tcache_alloc_easy(tbin);
|
||||||
if (ret == NULL) {
|
if (unlikely(ret == NULL)) {
|
||||||
ret = tcache_alloc_small_hard(tcache, tbin, binind);
|
ret = tcache_alloc_small_hard(tcache, tbin, binind);
|
||||||
if (ret == NULL)
|
if (ret == NULL)
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
assert(tcache_salloc(ret) == size);
|
assert(tcache_salloc(ret) == size);
|
||||||
|
|
||||||
if (zero == false) {
|
if (likely(zero == false)) {
|
||||||
if (config_fill) {
|
if (config_fill) {
|
||||||
if (opt_junk) {
|
if (unlikely(opt_junk)) {
|
||||||
arena_alloc_junk_small(ret,
|
arena_alloc_junk_small(ret,
|
||||||
&arena_bin_info[binind], false);
|
&arena_bin_info[binind], false);
|
||||||
} else if (opt_zero)
|
} else if (unlikely(opt_zero))
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (config_fill && opt_junk) {
|
if (config_fill && unlikely(opt_junk)) {
|
||||||
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
||||||
true);
|
true);
|
||||||
}
|
}
|
||||||
@ -312,7 +312,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
|||||||
assert(binind < nhbins);
|
assert(binind < nhbins);
|
||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
ret = tcache_alloc_easy(tbin);
|
ret = tcache_alloc_easy(tbin);
|
||||||
if (ret == NULL) {
|
if (unlikely(ret == NULL)) {
|
||||||
/*
|
/*
|
||||||
* Only allocate one large object at a time, because it's quite
|
* Only allocate one large object at a time, because it's quite
|
||||||
* expensive to create one and not use it.
|
* expensive to create one and not use it.
|
||||||
@ -329,11 +329,11 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
|||||||
arena_mapbits_large_binind_set(chunk, pageind,
|
arena_mapbits_large_binind_set(chunk, pageind,
|
||||||
BININD_INVALID);
|
BININD_INVALID);
|
||||||
}
|
}
|
||||||
if (zero == false) {
|
if (likely(zero == false)) {
|
||||||
if (config_fill) {
|
if (config_fill) {
|
||||||
if (opt_junk)
|
if (unlikely(opt_junk))
|
||||||
memset(ret, 0xa5, size);
|
memset(ret, 0xa5, size);
|
||||||
else if (opt_zero)
|
else if (unlikely(opt_zero))
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
@ -357,12 +357,12 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, size_t binind)
|
|||||||
|
|
||||||
assert(tcache_salloc(ptr) <= SMALL_MAXCLASS);
|
assert(tcache_salloc(ptr) <= SMALL_MAXCLASS);
|
||||||
|
|
||||||
if (config_fill && opt_junk)
|
if (config_fill && unlikely(opt_junk))
|
||||||
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
|
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
|
||||||
|
|
||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
tbin_info = &tcache_bin_info[binind];
|
tbin_info = &tcache_bin_info[binind];
|
||||||
if (tbin->ncached == tbin_info->ncached_max) {
|
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
|
||||||
tcache_bin_flush_small(tbin, binind, (tbin_info->ncached_max >>
|
tcache_bin_flush_small(tbin, binind, (tbin_info->ncached_max >>
|
||||||
1), tcache);
|
1), tcache);
|
||||||
}
|
}
|
||||||
@ -386,12 +386,12 @@ tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
|||||||
|
|
||||||
binind = NBINS + (size >> LG_PAGE) - 1;
|
binind = NBINS + (size >> LG_PAGE) - 1;
|
||||||
|
|
||||||
if (config_fill && opt_junk)
|
if (config_fill && unlikely(opt_junk))
|
||||||
memset(ptr, 0x5a, size);
|
memset(ptr, 0x5a, size);
|
||||||
|
|
||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
tbin_info = &tcache_bin_info[binind];
|
tbin_info = &tcache_bin_info[binind];
|
||||||
if (tbin->ncached == tbin_info->ncached_max) {
|
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
|
||||||
tcache_bin_flush_large(tbin, binind, (tbin_info->ncached_max >>
|
tcache_bin_flush_large(tbin, binind, (tbin_info->ncached_max >>
|
||||||
1), tcache);
|
1), tcache);
|
||||||
}
|
}
|
||||||
|
28
src/arena.c
28
src/arena.c
@ -1365,7 +1365,7 @@ arena_tcache_fill_small(arena_t *arena, tcache_bin_t *tbin, size_t binind,
|
|||||||
ptr = arena_bin_malloc_hard(arena, bin);
|
ptr = arena_bin_malloc_hard(arena, bin);
|
||||||
if (ptr == NULL)
|
if (ptr == NULL)
|
||||||
break;
|
break;
|
||||||
if (config_fill && opt_junk) {
|
if (config_fill && unlikely(opt_junk)) {
|
||||||
arena_alloc_junk_small(ptr, &arena_bin_info[binind],
|
arena_alloc_junk_small(ptr, &arena_bin_info[binind],
|
||||||
true);
|
true);
|
||||||
}
|
}
|
||||||
@ -1519,15 +1519,15 @@ arena_malloc_small(arena_t *arena, size_t size, bool zero)
|
|||||||
|
|
||||||
if (zero == false) {
|
if (zero == false) {
|
||||||
if (config_fill) {
|
if (config_fill) {
|
||||||
if (opt_junk) {
|
if (unlikely(opt_junk)) {
|
||||||
arena_alloc_junk_small(ret,
|
arena_alloc_junk_small(ret,
|
||||||
&arena_bin_info[binind], false);
|
&arena_bin_info[binind], false);
|
||||||
} else if (opt_zero)
|
} else if (unlikely(opt_zero))
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
}
|
}
|
||||||
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ret, size);
|
||||||
} else {
|
} else {
|
||||||
if (config_fill && opt_junk) {
|
if (config_fill && unlikely(opt_junk)) {
|
||||||
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
||||||
true);
|
true);
|
||||||
}
|
}
|
||||||
@ -1568,9 +1568,9 @@ arena_malloc_large(arena_t *arena, size_t size, bool zero)
|
|||||||
|
|
||||||
if (zero == false) {
|
if (zero == false) {
|
||||||
if (config_fill) {
|
if (config_fill) {
|
||||||
if (opt_junk)
|
if (unlikely(opt_junk))
|
||||||
memset(ret, 0xa5, size);
|
memset(ret, 0xa5, size);
|
||||||
else if (opt_zero)
|
else if (unlikely(opt_zero))
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1626,9 +1626,9 @@ arena_palloc(arena_t *arena, size_t size, size_t alignment, bool zero)
|
|||||||
malloc_mutex_unlock(&arena->lock);
|
malloc_mutex_unlock(&arena->lock);
|
||||||
|
|
||||||
if (config_fill && zero == false) {
|
if (config_fill && zero == false) {
|
||||||
if (opt_junk)
|
if (unlikely(opt_junk))
|
||||||
memset(ret, 0xa5, size);
|
memset(ret, 0xa5, size);
|
||||||
else if (opt_zero)
|
else if (unlikely(opt_zero))
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
}
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
@ -1771,7 +1771,7 @@ arena_dalloc_bin_locked(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
if (config_fill || config_stats)
|
if (config_fill || config_stats)
|
||||||
size = bin_info->reg_size;
|
size = bin_info->reg_size;
|
||||||
|
|
||||||
if (config_fill && opt_junk)
|
if (config_fill && unlikely(opt_junk))
|
||||||
arena_dalloc_junk_small(ptr, bin_info);
|
arena_dalloc_junk_small(ptr, bin_info);
|
||||||
|
|
||||||
arena_run_reg_dalloc(run, ptr);
|
arena_run_reg_dalloc(run, ptr);
|
||||||
@ -1825,7 +1825,7 @@ static void
|
|||||||
arena_dalloc_junk_large(void *ptr, size_t usize)
|
arena_dalloc_junk_large(void *ptr, size_t usize)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (config_fill && opt_junk)
|
if (config_fill && unlikely(opt_junk))
|
||||||
memset(ptr, 0x5a, usize);
|
memset(ptr, 0x5a, usize);
|
||||||
}
|
}
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
@ -1967,7 +1967,7 @@ static void
|
|||||||
arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
|
arena_ralloc_junk_large(void *ptr, size_t old_usize, size_t usize)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (config_fill && opt_junk) {
|
if (config_fill && unlikely(opt_junk)) {
|
||||||
memset((void *)((uintptr_t)ptr + usize), 0x5a,
|
memset((void *)((uintptr_t)ptr + usize), 0x5a,
|
||||||
old_usize - usize);
|
old_usize - usize);
|
||||||
}
|
}
|
||||||
@ -2011,11 +2011,11 @@ arena_ralloc_large(void *ptr, size_t oldsize, size_t size, size_t extra,
|
|||||||
oldsize, PAGE_CEILING(size),
|
oldsize, PAGE_CEILING(size),
|
||||||
psize - PAGE_CEILING(size), zero);
|
psize - PAGE_CEILING(size), zero);
|
||||||
if (config_fill && ret == false && zero == false) {
|
if (config_fill && ret == false && zero == false) {
|
||||||
if (opt_junk) {
|
if (unlikely(opt_junk)) {
|
||||||
memset((void *)((uintptr_t)ptr +
|
memset((void *)((uintptr_t)ptr +
|
||||||
oldsize), 0xa5, isalloc(ptr,
|
oldsize), 0xa5, isalloc(ptr,
|
||||||
config_prof) - oldsize);
|
config_prof) - oldsize);
|
||||||
} else if (opt_zero) {
|
} else if (unlikely(opt_zero)) {
|
||||||
memset((void *)((uintptr_t)ptr +
|
memset((void *)((uintptr_t)ptr +
|
||||||
oldsize), 0, isalloc(ptr,
|
oldsize), 0, isalloc(ptr,
|
||||||
config_prof) - oldsize);
|
config_prof) - oldsize);
|
||||||
@ -2272,7 +2272,7 @@ bin_info_run_size_calc(arena_bin_info_t *bin_info, size_t min_run_size)
|
|||||||
* minimum alignment; without the padding, each redzone would have to
|
* minimum alignment; without the padding, each redzone would have to
|
||||||
* be twice as large in order to maintain alignment.
|
* be twice as large in order to maintain alignment.
|
||||||
*/
|
*/
|
||||||
if (config_fill && opt_redzone) {
|
if (config_fill && unlikely(opt_redzone)) {
|
||||||
size_t align_min = ZU(1) << (jemalloc_ffs(bin_info->reg_size) -
|
size_t align_min = ZU(1) << (jemalloc_ffs(bin_info->reg_size) -
|
||||||
1);
|
1);
|
||||||
if (align_min <= REDZONE_MINSIZE) {
|
if (align_min <= REDZONE_MINSIZE) {
|
||||||
|
@ -62,9 +62,9 @@ huge_palloc(arena_t *arena, size_t size, size_t alignment, bool zero)
|
|||||||
malloc_mutex_unlock(&huge_mtx);
|
malloc_mutex_unlock(&huge_mtx);
|
||||||
|
|
||||||
if (config_fill && zero == false) {
|
if (config_fill && zero == false) {
|
||||||
if (opt_junk)
|
if (unlikely(opt_junk))
|
||||||
memset(ret, 0xa5, csize);
|
memset(ret, 0xa5, csize);
|
||||||
else if (opt_zero && is_zeroed == false)
|
else if (unlikely(opt_zero) && is_zeroed == false)
|
||||||
memset(ret, 0, csize);
|
memset(ret, 0, csize);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -141,7 +141,7 @@ static void
|
|||||||
huge_dalloc_junk(void *ptr, size_t usize)
|
huge_dalloc_junk(void *ptr, size_t usize)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (config_fill && have_dss && opt_junk) {
|
if (config_fill && have_dss && unlikely(opt_junk)) {
|
||||||
/*
|
/*
|
||||||
* Only bother junk filling if the chunk isn't about to be
|
* Only bother junk filling if the chunk isn't about to be
|
||||||
* unmapped.
|
* unmapped.
|
||||||
|
130
src/jemalloc.c
130
src/jemalloc.c
@ -87,7 +87,7 @@ typedef struct {
|
|||||||
|
|
||||||
#ifdef JEMALLOC_UTRACE
|
#ifdef JEMALLOC_UTRACE
|
||||||
# define UTRACE(a, b, c) do { \
|
# define UTRACE(a, b, c) do { \
|
||||||
if (opt_utrace) { \
|
if (unlikely(opt_utrace)) { \
|
||||||
int utrace_serrno = errno; \
|
int utrace_serrno = errno; \
|
||||||
malloc_utrace_t ut; \
|
malloc_utrace_t ut; \
|
||||||
ut.p = (a); \
|
ut.p = (a); \
|
||||||
@ -283,7 +283,7 @@ malloc_thread_init(void)
|
|||||||
* a best effort attempt at initializing its TSD by hooking all
|
* a best effort attempt at initializing its TSD by hooking all
|
||||||
* allocation events.
|
* allocation events.
|
||||||
*/
|
*/
|
||||||
if (config_fill && opt_quarantine)
|
if (config_fill && unlikely(opt_quarantine))
|
||||||
quarantine_alloc_hook();
|
quarantine_alloc_hook();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -397,13 +397,13 @@ malloc_conf_init(void)
|
|||||||
*/
|
*/
|
||||||
if (config_valgrind) {
|
if (config_valgrind) {
|
||||||
in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false;
|
in_valgrind = (RUNNING_ON_VALGRIND != 0) ? true : false;
|
||||||
if (config_fill && in_valgrind) {
|
if (config_fill && unlikely(in_valgrind)) {
|
||||||
opt_junk = false;
|
opt_junk = false;
|
||||||
assert(opt_zero == false);
|
assert(opt_zero == false);
|
||||||
opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT;
|
opt_quarantine = JEMALLOC_VALGRIND_QUARANTINE_DEFAULT;
|
||||||
opt_redzone = true;
|
opt_redzone = true;
|
||||||
}
|
}
|
||||||
if (config_tcache && in_valgrind)
|
if (config_tcache && unlikely(in_valgrind))
|
||||||
opt_tcache = false;
|
opt_tcache = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -887,7 +887,7 @@ imalloc_prof(size_t usize)
|
|||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
|
|
||||||
tctx = prof_alloc_prep(usize, true);
|
tctx = prof_alloc_prep(usize, true);
|
||||||
if ((uintptr_t)tctx != (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = imalloc_prof_sample(usize, tctx);
|
p = imalloc_prof_sample(usize, tctx);
|
||||||
else
|
else
|
||||||
p = imalloc(usize);
|
p = imalloc(usize);
|
||||||
@ -912,7 +912,7 @@ imalloc_body(size_t size, size_t *usize)
|
|||||||
return (imalloc_prof(*usize));
|
return (imalloc_prof(*usize));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config_stats || (unlikely(config_valgrind && in_valgrind)))
|
if (config_stats || (config_valgrind && unlikely(in_valgrind)))
|
||||||
*usize = s2u(size);
|
*usize = s2u(size);
|
||||||
return (imalloc(size));
|
return (imalloc(size));
|
||||||
}
|
}
|
||||||
@ -927,15 +927,15 @@ je_malloc(size_t size)
|
|||||||
size = 1;
|
size = 1;
|
||||||
|
|
||||||
ret = imalloc_body(size, &usize);
|
ret = imalloc_body(size, &usize);
|
||||||
if (ret == NULL) {
|
if (unlikely(ret == NULL)) {
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error in malloc(): "
|
malloc_write("<jemalloc>: Error in malloc(): "
|
||||||
"out of memory\n");
|
"out of memory\n");
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
set_errno(ENOMEM);
|
set_errno(ENOMEM);
|
||||||
}
|
}
|
||||||
if (config_stats && ret != NULL) {
|
if (config_stats && likely(ret != NULL)) {
|
||||||
assert(usize == isalloc(ret, config_prof));
|
assert(usize == isalloc(ret, config_prof));
|
||||||
thread_allocated_tsd_get()->allocated += usize;
|
thread_allocated_tsd_get()->allocated += usize;
|
||||||
}
|
}
|
||||||
@ -970,7 +970,7 @@ imemalign_prof(size_t alignment, size_t usize)
|
|||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
|
|
||||||
tctx = prof_alloc_prep(usize, true);
|
tctx = prof_alloc_prep(usize, true);
|
||||||
if ((uintptr_t)tctx != (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = imemalign_prof_sample(alignment, usize, tctx);
|
p = imemalign_prof_sample(alignment, usize, tctx);
|
||||||
else
|
else
|
||||||
p = ipalloc(usize, alignment, false);
|
p = ipalloc(usize, alignment, false);
|
||||||
@ -1001,9 +1001,9 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
|||||||
size = 1;
|
size = 1;
|
||||||
|
|
||||||
/* Make sure that alignment is a large enough power of 2. */
|
/* Make sure that alignment is a large enough power of 2. */
|
||||||
if (((alignment - 1) & alignment) != 0
|
if (unlikely(((alignment - 1) & alignment) != 0
|
||||||
|| (alignment < min_alignment)) {
|
|| (alignment < min_alignment))) {
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error allocating "
|
malloc_write("<jemalloc>: Error allocating "
|
||||||
"aligned memory: invalid alignment\n");
|
"aligned memory: invalid alignment\n");
|
||||||
abort();
|
abort();
|
||||||
@ -1014,7 +1014,7 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
|||||||
}
|
}
|
||||||
|
|
||||||
usize = sa2u(size, alignment);
|
usize = sa2u(size, alignment);
|
||||||
if (usize == 0) {
|
if (unlikely(usize == 0)) {
|
||||||
result = NULL;
|
result = NULL;
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
}
|
}
|
||||||
@ -1023,14 +1023,14 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
|||||||
result = imemalign_prof(alignment, usize);
|
result = imemalign_prof(alignment, usize);
|
||||||
else
|
else
|
||||||
result = ipalloc(usize, alignment, false);
|
result = ipalloc(usize, alignment, false);
|
||||||
if (result == NULL)
|
if (unlikely(result == NULL))
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
}
|
}
|
||||||
|
|
||||||
*memptr = result;
|
*memptr = result;
|
||||||
ret = 0;
|
ret = 0;
|
||||||
label_return:
|
label_return:
|
||||||
if (config_stats && result != NULL) {
|
if (config_stats && likely(result != NULL)) {
|
||||||
assert(usize == isalloc(result, config_prof));
|
assert(usize == isalloc(result, config_prof));
|
||||||
thread_allocated_tsd_get()->allocated += usize;
|
thread_allocated_tsd_get()->allocated += usize;
|
||||||
}
|
}
|
||||||
@ -1038,7 +1038,7 @@ label_return:
|
|||||||
return (ret);
|
return (ret);
|
||||||
label_oom:
|
label_oom:
|
||||||
assert(result == NULL);
|
assert(result == NULL);
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error allocating aligned memory: "
|
malloc_write("<jemalloc>: Error allocating aligned memory: "
|
||||||
"out of memory\n");
|
"out of memory\n");
|
||||||
abort();
|
abort();
|
||||||
@ -1062,7 +1062,7 @@ je_aligned_alloc(size_t alignment, size_t size)
|
|||||||
void *ret;
|
void *ret;
|
||||||
int err;
|
int err;
|
||||||
|
|
||||||
if ((err = imemalign(&ret, alignment, size, 1)) != 0) {
|
if (unlikely((err = imemalign(&ret, alignment, size, 1)) != 0)) {
|
||||||
ret = NULL;
|
ret = NULL;
|
||||||
set_errno(err);
|
set_errno(err);
|
||||||
}
|
}
|
||||||
@ -1096,7 +1096,7 @@ icalloc_prof(size_t usize)
|
|||||||
prof_tctx_t *tctx;
|
prof_tctx_t *tctx;
|
||||||
|
|
||||||
tctx = prof_alloc_prep(usize, true);
|
tctx = prof_alloc_prep(usize, true);
|
||||||
if ((uintptr_t)tctx != (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = icalloc_prof_sample(usize, tctx);
|
p = icalloc_prof_sample(usize, tctx);
|
||||||
else
|
else
|
||||||
p = icalloc(usize);
|
p = icalloc(usize);
|
||||||
@ -1123,7 +1123,7 @@ je_calloc(size_t num, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
num_size = num * size;
|
num_size = num * size;
|
||||||
if (num_size == 0) {
|
if (unlikely(num_size == 0)) {
|
||||||
if (num == 0 || size == 0)
|
if (num == 0 || size == 0)
|
||||||
num_size = 1;
|
num_size = 1;
|
||||||
else {
|
else {
|
||||||
@ -1135,8 +1135,8 @@ je_calloc(size_t num, size_t size)
|
|||||||
* overflow during multiplication if neither operand uses any of the
|
* overflow during multiplication if neither operand uses any of the
|
||||||
* most significant half of the bits in a size_t.
|
* most significant half of the bits in a size_t.
|
||||||
*/
|
*/
|
||||||
} else if (((num | size) & (SIZE_T_MAX << (sizeof(size_t) << 2)))
|
} else if (unlikely(((num | size) & (SIZE_T_MAX << (sizeof(size_t) <<
|
||||||
&& (num_size / size != num)) {
|
2))) && (num_size / size != num))) {
|
||||||
/* size_t overflow. */
|
/* size_t overflow. */
|
||||||
ret = NULL;
|
ret = NULL;
|
||||||
goto label_return;
|
goto label_return;
|
||||||
@ -1146,21 +1146,21 @@ je_calloc(size_t num, size_t size)
|
|||||||
usize = s2u(num_size);
|
usize = s2u(num_size);
|
||||||
ret = icalloc_prof(usize);
|
ret = icalloc_prof(usize);
|
||||||
} else {
|
} else {
|
||||||
if (config_stats || unlikely(config_valgrind && in_valgrind))
|
if (config_stats || (config_valgrind && unlikely(in_valgrind)))
|
||||||
usize = s2u(num_size);
|
usize = s2u(num_size);
|
||||||
ret = icalloc(num_size);
|
ret = icalloc(num_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
label_return:
|
label_return:
|
||||||
if (ret == NULL) {
|
if (unlikely(ret == NULL)) {
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error in calloc(): out of "
|
malloc_write("<jemalloc>: Error in calloc(): out of "
|
||||||
"memory\n");
|
"memory\n");
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
set_errno(ENOMEM);
|
set_errno(ENOMEM);
|
||||||
}
|
}
|
||||||
if (config_stats && ret != NULL) {
|
if (config_stats && likely(ret != NULL)) {
|
||||||
assert(usize == isalloc(ret, config_prof));
|
assert(usize == isalloc(ret, config_prof));
|
||||||
thread_allocated_tsd_get()->allocated += usize;
|
thread_allocated_tsd_get()->allocated += usize;
|
||||||
}
|
}
|
||||||
@ -1195,7 +1195,7 @@ irealloc_prof(void *oldptr, size_t old_usize, size_t usize)
|
|||||||
|
|
||||||
old_tctx = prof_tctx_get(oldptr);
|
old_tctx = prof_tctx_get(oldptr);
|
||||||
tctx = prof_alloc_prep(usize, true);
|
tctx = prof_alloc_prep(usize, true);
|
||||||
if ((uintptr_t)tctx != (uintptr_t)1U)
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||||
p = irealloc_prof_sample(oldptr, usize, tctx);
|
p = irealloc_prof_sample(oldptr, usize, tctx);
|
||||||
else
|
else
|
||||||
p = iralloc(oldptr, usize, 0, false);
|
p = iralloc(oldptr, usize, 0, false);
|
||||||
@ -1222,7 +1222,7 @@ ifree(void *ptr, bool try_tcache)
|
|||||||
usize = isalloc(ptr, config_prof);
|
usize = isalloc(ptr, config_prof);
|
||||||
if (config_stats)
|
if (config_stats)
|
||||||
thread_allocated_tsd_get()->deallocated += usize;
|
thread_allocated_tsd_get()->deallocated += usize;
|
||||||
if (unlikely(config_valgrind && in_valgrind))
|
if (config_valgrind && unlikely(in_valgrind))
|
||||||
rzsize = p2rz(ptr);
|
rzsize = p2rz(ptr);
|
||||||
iqalloc(ptr, try_tcache);
|
iqalloc(ptr, try_tcache);
|
||||||
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
|
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
|
||||||
@ -1240,7 +1240,7 @@ isfree(void *ptr, size_t usize, bool try_tcache)
|
|||||||
prof_free(ptr, usize);
|
prof_free(ptr, usize);
|
||||||
if (config_stats)
|
if (config_stats)
|
||||||
thread_allocated_tsd_get()->deallocated += usize;
|
thread_allocated_tsd_get()->deallocated += usize;
|
||||||
if (unlikely(config_valgrind && in_valgrind))
|
if (config_valgrind && unlikely(in_valgrind))
|
||||||
rzsize = p2rz(ptr);
|
rzsize = p2rz(ptr);
|
||||||
isqalloc(ptr, usize, try_tcache);
|
isqalloc(ptr, usize, try_tcache);
|
||||||
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
|
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
|
||||||
@ -1254,7 +1254,7 @@ je_realloc(void *ptr, size_t size)
|
|||||||
size_t old_usize = 0;
|
size_t old_usize = 0;
|
||||||
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
|
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
|
||||||
|
|
||||||
if (size == 0) {
|
if (unlikely(size == 0)) {
|
||||||
if (ptr != NULL) {
|
if (ptr != NULL) {
|
||||||
/* realloc(ptr, 0) is equivalent to free(ptr). */
|
/* realloc(ptr, 0) is equivalent to free(ptr). */
|
||||||
UTRACE(ptr, 0, 0);
|
UTRACE(ptr, 0, 0);
|
||||||
@ -1264,21 +1264,22 @@ je_realloc(void *ptr, size_t size)
|
|||||||
size = 1;
|
size = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ptr != NULL) {
|
if (likely(ptr != NULL)) {
|
||||||
assert(malloc_initialized || IS_INITIALIZER);
|
assert(malloc_initialized || IS_INITIALIZER);
|
||||||
malloc_thread_init();
|
malloc_thread_init();
|
||||||
|
|
||||||
if ((config_prof && opt_prof) || config_stats ||
|
if ((config_prof && opt_prof) || config_stats ||
|
||||||
unlikely(config_valgrind && in_valgrind))
|
(config_valgrind && unlikely(in_valgrind)))
|
||||||
old_usize = isalloc(ptr, config_prof);
|
old_usize = isalloc(ptr, config_prof);
|
||||||
if (unlikely(config_valgrind && in_valgrind))
|
if (config_valgrind && unlikely(in_valgrind))
|
||||||
old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize);
|
old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize);
|
||||||
|
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
usize = s2u(size);
|
usize = s2u(size);
|
||||||
ret = irealloc_prof(ptr, old_usize, usize);
|
ret = irealloc_prof(ptr, old_usize, usize);
|
||||||
} else {
|
} else {
|
||||||
if (config_stats || unlikely(config_valgrind && in_valgrind))
|
if (config_stats || (config_valgrind &&
|
||||||
|
unlikely(in_valgrind)))
|
||||||
usize = s2u(size);
|
usize = s2u(size);
|
||||||
ret = iralloc(ptr, size, 0, false);
|
ret = iralloc(ptr, size, 0, false);
|
||||||
}
|
}
|
||||||
@ -1287,15 +1288,15 @@ je_realloc(void *ptr, size_t size)
|
|||||||
ret = imalloc_body(size, &usize);
|
ret = imalloc_body(size, &usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ret == NULL) {
|
if (unlikely(ret == NULL)) {
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error in realloc(): "
|
malloc_write("<jemalloc>: Error in realloc(): "
|
||||||
"out of memory\n");
|
"out of memory\n");
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
set_errno(ENOMEM);
|
set_errno(ENOMEM);
|
||||||
}
|
}
|
||||||
if (config_stats && ret != NULL) {
|
if (config_stats && likely(ret != NULL)) {
|
||||||
thread_allocated_t *ta;
|
thread_allocated_t *ta;
|
||||||
assert(usize == isalloc(ret, config_prof));
|
assert(usize == isalloc(ret, config_prof));
|
||||||
ta = thread_allocated_tsd_get();
|
ta = thread_allocated_tsd_get();
|
||||||
@ -1313,7 +1314,7 @@ je_free(void *ptr)
|
|||||||
{
|
{
|
||||||
|
|
||||||
UTRACE(ptr, 0, 0);
|
UTRACE(ptr, 0, 0);
|
||||||
if (ptr != NULL)
|
if (likely(ptr != NULL))
|
||||||
ifree(ptr, true);
|
ifree(ptr, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1410,7 +1411,7 @@ imallocx_flags_decode(size_t size, int flags, size_t *usize, size_t *alignment,
|
|||||||
bool *zero, bool *try_tcache, arena_t **arena)
|
bool *zero, bool *try_tcache, arena_t **arena)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (flags == 0) {
|
if (likely(flags == 0)) {
|
||||||
*usize = s2u(size);
|
*usize = s2u(size);
|
||||||
assert(usize != 0);
|
assert(usize != 0);
|
||||||
*alignment = 0;
|
*alignment = 0;
|
||||||
@ -1440,7 +1441,7 @@ imallocx_maybe_flags(size_t size, int flags, size_t usize, size_t alignment,
|
|||||||
bool zero, bool try_tcache, arena_t *arena)
|
bool zero, bool try_tcache, arena_t *arena)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (flags == 0)
|
if (likely(flags == 0))
|
||||||
return (imalloc(size));
|
return (imalloc(size));
|
||||||
return (imallocx_flags(usize, alignment, zero, try_tcache, arena));
|
return (imallocx_flags(usize, alignment, zero, try_tcache, arena));
|
||||||
}
|
}
|
||||||
@ -1479,7 +1480,7 @@ imallocx_prof(size_t size, int flags, size_t *usize)
|
|||||||
imallocx_flags_decode(size, flags, usize, &alignment, &zero,
|
imallocx_flags_decode(size, flags, usize, &alignment, &zero,
|
||||||
&try_tcache, &arena);
|
&try_tcache, &arena);
|
||||||
tctx = prof_alloc_prep(*usize, true);
|
tctx = prof_alloc_prep(*usize, true);
|
||||||
if ((uintptr_t)tctx == (uintptr_t)1U) {
|
if (likely((uintptr_t)tctx == (uintptr_t)1U)) {
|
||||||
p = imallocx_maybe_flags(size, flags, *usize, alignment, zero,
|
p = imallocx_maybe_flags(size, flags, *usize, alignment, zero,
|
||||||
try_tcache, arena);
|
try_tcache, arena);
|
||||||
} else if ((uintptr_t)tctx > (uintptr_t)1U) {
|
} else if ((uintptr_t)tctx > (uintptr_t)1U) {
|
||||||
@ -1487,7 +1488,7 @@ imallocx_prof(size_t size, int flags, size_t *usize)
|
|||||||
try_tcache, arena);
|
try_tcache, arena);
|
||||||
} else
|
} else
|
||||||
p = NULL;
|
p = NULL;
|
||||||
if (p == NULL) {
|
if (unlikely(p == NULL)) {
|
||||||
prof_alloc_rollback(tctx, true);
|
prof_alloc_rollback(tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
@ -1504,8 +1505,8 @@ imallocx_no_prof(size_t size, int flags, size_t *usize)
|
|||||||
bool try_tcache;
|
bool try_tcache;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
if (flags == 0) {
|
if (likely(flags == 0)) {
|
||||||
if (config_stats || unlikely(config_valgrind && in_valgrind))
|
if (config_stats || (config_valgrind && unlikely(in_valgrind)))
|
||||||
*usize = s2u(size);
|
*usize = s2u(size);
|
||||||
return (imalloc(size));
|
return (imalloc(size));
|
||||||
}
|
}
|
||||||
@ -1530,7 +1531,7 @@ je_mallocx(size_t size, int flags)
|
|||||||
p = imallocx_prof(size, flags, &usize);
|
p = imallocx_prof(size, flags, &usize);
|
||||||
else
|
else
|
||||||
p = imallocx_no_prof(size, flags, &usize);
|
p = imallocx_no_prof(size, flags, &usize);
|
||||||
if (p == NULL)
|
if (unlikely(p == NULL))
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
@ -1541,7 +1542,7 @@ je_mallocx(size_t size, int flags)
|
|||||||
JEMALLOC_VALGRIND_MALLOC(true, p, usize, MALLOCX_ZERO_GET(flags));
|
JEMALLOC_VALGRIND_MALLOC(true, p, usize, MALLOCX_ZERO_GET(flags));
|
||||||
return (p);
|
return (p);
|
||||||
label_oom:
|
label_oom:
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error in mallocx(): out of memory\n");
|
malloc_write("<jemalloc>: Error in mallocx(): out of memory\n");
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
@ -1582,14 +1583,14 @@ irallocx_prof(void *oldptr, size_t old_usize, size_t size, size_t alignment,
|
|||||||
|
|
||||||
old_tctx = prof_tctx_get(oldptr);
|
old_tctx = prof_tctx_get(oldptr);
|
||||||
tctx = prof_alloc_prep(*usize, true);
|
tctx = prof_alloc_prep(*usize, true);
|
||||||
if ((uintptr_t)tctx != (uintptr_t)1U) {
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||||
p = irallocx_prof_sample(oldptr, size, alignment, *usize, zero,
|
p = irallocx_prof_sample(oldptr, size, alignment, *usize, zero,
|
||||||
try_tcache_alloc, try_tcache_dalloc, arena, tctx);
|
try_tcache_alloc, try_tcache_dalloc, arena, tctx);
|
||||||
} else {
|
} else {
|
||||||
p = iralloct(oldptr, size, alignment, zero, try_tcache_alloc,
|
p = iralloct(oldptr, size, alignment, zero, try_tcache_alloc,
|
||||||
try_tcache_dalloc, arena);
|
try_tcache_dalloc, arena);
|
||||||
}
|
}
|
||||||
if (p == NULL) {
|
if (unlikely(p == NULL)) {
|
||||||
prof_alloc_rollback(tctx, true);
|
prof_alloc_rollback(tctx, true);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
@ -1614,7 +1615,8 @@ void *
|
|||||||
je_rallocx(void *ptr, size_t size, int flags)
|
je_rallocx(void *ptr, size_t size, int flags)
|
||||||
{
|
{
|
||||||
void *p;
|
void *p;
|
||||||
size_t usize, old_usize;
|
size_t usize;
|
||||||
|
UNUSED size_t old_usize JEMALLOC_CC_SILENCE_INIT(0);
|
||||||
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
|
UNUSED size_t old_rzsize JEMALLOC_CC_SILENCE_INIT(0);
|
||||||
size_t alignment = MALLOCX_ALIGN_GET(flags);
|
size_t alignment = MALLOCX_ALIGN_GET(flags);
|
||||||
bool zero = flags & MALLOCX_ZERO;
|
bool zero = flags & MALLOCX_ZERO;
|
||||||
@ -1626,7 +1628,7 @@ je_rallocx(void *ptr, size_t size, int flags)
|
|||||||
assert(malloc_initialized || IS_INITIALIZER);
|
assert(malloc_initialized || IS_INITIALIZER);
|
||||||
malloc_thread_init();
|
malloc_thread_init();
|
||||||
|
|
||||||
if ((flags & MALLOCX_ARENA_MASK) != 0) {
|
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
try_tcache_alloc = false;
|
try_tcache_alloc = false;
|
||||||
@ -1641,9 +1643,9 @@ je_rallocx(void *ptr, size_t size, int flags)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ((config_prof && opt_prof) || config_stats ||
|
if ((config_prof && opt_prof) || config_stats ||
|
||||||
(unlikely(config_valgrind && in_valgrind)))
|
((config_valgrind && unlikely(in_valgrind))))
|
||||||
old_usize = isalloc(ptr, config_prof);
|
old_usize = isalloc(ptr, config_prof);
|
||||||
if (unlikely(config_valgrind && in_valgrind))
|
if (config_valgrind && unlikely(in_valgrind))
|
||||||
old_rzsize = u2rz(old_usize);
|
old_rzsize = u2rz(old_usize);
|
||||||
|
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
@ -1651,14 +1653,14 @@ je_rallocx(void *ptr, size_t size, int flags)
|
|||||||
assert(usize != 0);
|
assert(usize != 0);
|
||||||
p = irallocx_prof(ptr, old_usize, size, alignment, &usize, zero,
|
p = irallocx_prof(ptr, old_usize, size, alignment, &usize, zero,
|
||||||
try_tcache_alloc, try_tcache_dalloc, arena);
|
try_tcache_alloc, try_tcache_dalloc, arena);
|
||||||
if (p == NULL)
|
if (unlikely(p == NULL))
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
} else {
|
} else {
|
||||||
p = iralloct(ptr, size, alignment, zero, try_tcache_alloc,
|
p = iralloct(ptr, size, alignment, zero, try_tcache_alloc,
|
||||||
try_tcache_dalloc, arena);
|
try_tcache_dalloc, arena);
|
||||||
if (p == NULL)
|
if (unlikely(p == NULL))
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
if (config_stats || (config_valgrind && in_valgrind))
|
if (config_stats || (config_valgrind && unlikely(in_valgrind)))
|
||||||
usize = isalloc(p, config_prof);
|
usize = isalloc(p, config_prof);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1673,7 +1675,7 @@ je_rallocx(void *ptr, size_t size, int flags)
|
|||||||
old_rzsize, false, zero);
|
old_rzsize, false, zero);
|
||||||
return (p);
|
return (p);
|
||||||
label_oom:
|
label_oom:
|
||||||
if (config_xmalloc && opt_xmalloc) {
|
if (config_xmalloc && unlikely(opt_xmalloc)) {
|
||||||
malloc_write("<jemalloc>: Error in rallocx(): out of memory\n");
|
malloc_write("<jemalloc>: Error in rallocx(): out of memory\n");
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
@ -1738,14 +1740,14 @@ ixallocx_prof(void *ptr, size_t old_usize, size_t size, size_t extra,
|
|||||||
max_usize = (alignment == 0) ? s2u(size+extra) : sa2u(size+extra,
|
max_usize = (alignment == 0) ? s2u(size+extra) : sa2u(size+extra,
|
||||||
alignment);
|
alignment);
|
||||||
tctx = prof_alloc_prep(max_usize, false);
|
tctx = prof_alloc_prep(max_usize, false);
|
||||||
if ((uintptr_t)tctx != (uintptr_t)1U) {
|
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||||
usize = ixallocx_prof_sample(ptr, old_usize, size, extra,
|
usize = ixallocx_prof_sample(ptr, old_usize, size, extra,
|
||||||
alignment, zero, max_usize, arena, tctx);
|
alignment, zero, max_usize, arena, tctx);
|
||||||
} else {
|
} else {
|
||||||
usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
|
usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
|
||||||
zero, arena);
|
zero, arena);
|
||||||
}
|
}
|
||||||
if (usize == old_usize) {
|
if (unlikely(usize == old_usize)) {
|
||||||
prof_alloc_rollback(tctx, false);
|
prof_alloc_rollback(tctx, false);
|
||||||
return (usize);
|
return (usize);
|
||||||
}
|
}
|
||||||
@ -1769,14 +1771,14 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
|||||||
assert(malloc_initialized || IS_INITIALIZER);
|
assert(malloc_initialized || IS_INITIALIZER);
|
||||||
malloc_thread_init();
|
malloc_thread_init();
|
||||||
|
|
||||||
if ((flags & MALLOCX_ARENA_MASK) != 0) {
|
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||||
arena = arenas[arena_ind];
|
arena = arenas[arena_ind];
|
||||||
} else
|
} else
|
||||||
arena = NULL;
|
arena = NULL;
|
||||||
|
|
||||||
old_usize = isalloc(ptr, config_prof);
|
old_usize = isalloc(ptr, config_prof);
|
||||||
if (unlikely(config_valgrind && in_valgrind))
|
if (config_valgrind && unlikely(in_valgrind))
|
||||||
old_rzsize = u2rz(old_usize);
|
old_rzsize = u2rz(old_usize);
|
||||||
|
|
||||||
if (config_prof && opt_prof) {
|
if (config_prof && opt_prof) {
|
||||||
@ -1786,7 +1788,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
|||||||
usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
|
usize = ixallocx_helper(ptr, old_usize, size, extra, alignment,
|
||||||
zero, arena);
|
zero, arena);
|
||||||
}
|
}
|
||||||
if (usize == old_usize)
|
if (unlikely(usize == old_usize))
|
||||||
goto label_not_resized;
|
goto label_not_resized;
|
||||||
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
@ -1828,7 +1830,7 @@ je_dallocx(void *ptr, int flags)
|
|||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(malloc_initialized || IS_INITIALIZER);
|
assert(malloc_initialized || IS_INITIALIZER);
|
||||||
|
|
||||||
if ((flags & MALLOCX_ARENA_MASK) != 0) {
|
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||||
try_tcache = (chunk == ptr || chunk->arena !=
|
try_tcache = (chunk == ptr || chunk->arena !=
|
||||||
@ -1845,7 +1847,7 @@ inallocx(size_t size, int flags)
|
|||||||
{
|
{
|
||||||
size_t usize;
|
size_t usize;
|
||||||
|
|
||||||
if ((flags & MALLOCX_LG_ALIGN_MASK) == 0)
|
if (likely((flags & MALLOCX_LG_ALIGN_MASK) == 0))
|
||||||
usize = s2u(size);
|
usize = s2u(size);
|
||||||
else
|
else
|
||||||
usize = sa2u(size, MALLOCX_ALIGN_GET_SPECIFIED(flags));
|
usize = sa2u(size, MALLOCX_ALIGN_GET_SPECIFIED(flags));
|
||||||
@ -1864,7 +1866,7 @@ je_sdallocx(void *ptr, size_t size, int flags)
|
|||||||
usize = inallocx(size, flags);
|
usize = inallocx(size, flags);
|
||||||
assert(usize == isalloc(ptr, config_prof));
|
assert(usize == isalloc(ptr, config_prof));
|
||||||
|
|
||||||
if ((flags & MALLOCX_ARENA_MASK) != 0) {
|
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||||
try_tcache = (chunk == ptr || chunk->arena !=
|
try_tcache = (chunk == ptr || chunk->arena !=
|
||||||
|
@ -141,12 +141,12 @@ quarantine(void *ptr)
|
|||||||
obj->usize = usize;
|
obj->usize = usize;
|
||||||
quarantine->curbytes += usize;
|
quarantine->curbytes += usize;
|
||||||
quarantine->curobjs++;
|
quarantine->curobjs++;
|
||||||
if (config_fill && opt_junk) {
|
if (config_fill && unlikely(opt_junk)) {
|
||||||
/*
|
/*
|
||||||
* Only do redzone validation if Valgrind isn't in
|
* Only do redzone validation if Valgrind isn't in
|
||||||
* operation.
|
* operation.
|
||||||
*/
|
*/
|
||||||
if ((config_valgrind == false || in_valgrind == false)
|
if ((!config_valgrind || likely(!in_valgrind))
|
||||||
&& usize <= SMALL_MAXCLASS)
|
&& usize <= SMALL_MAXCLASS)
|
||||||
arena_quarantine_junk_small(ptr, usize);
|
arena_quarantine_junk_small(ptr, usize);
|
||||||
else
|
else
|
||||||
|
Loading…
Reference in New Issue
Block a user