Apply likely()/unlikely() to allocation/deallocation fast paths.
This commit is contained in:
@@ -488,7 +488,8 @@ void arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
|
||||
void *arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache);
|
||||
size_t arena_salloc(const void *ptr, bool demote);
|
||||
void arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache);
|
||||
void arena_sdalloc(arena_chunk_t *chunk, void *ptr, size_t size, bool try_tcache);
|
||||
void arena_sdalloc(arena_chunk_t *chunk, void *ptr, size_t size,
|
||||
bool try_tcache);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||
@@ -539,7 +540,7 @@ small_size2bin(size_t size)
|
||||
{
|
||||
|
||||
assert(size > 0);
|
||||
if (size <= LOOKUP_MAXCLASS)
|
||||
if (likely(size <= LOOKUP_MAXCLASS))
|
||||
return (small_size2bin_lookup(size));
|
||||
else
|
||||
return (small_size2bin_compute(size));
|
||||
@@ -627,7 +628,7 @@ small_s2u(size_t size)
|
||||
{
|
||||
|
||||
assert(size > 0);
|
||||
if (size <= LOOKUP_MAXCLASS)
|
||||
if (likely(size <= LOOKUP_MAXCLASS))
|
||||
return (small_s2u_lookup(size));
|
||||
else
|
||||
return (small_s2u_compute(size));
|
||||
@@ -864,7 +865,7 @@ arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes)
|
||||
|
||||
cassert(config_prof);
|
||||
|
||||
if (prof_interval == 0)
|
||||
if (likely(prof_interval == 0))
|
||||
return (false);
|
||||
return (arena_prof_accum_impl(arena, accumbytes));
|
||||
}
|
||||
@@ -875,7 +876,7 @@ arena_prof_accum(arena_t *arena, uint64_t accumbytes)
|
||||
|
||||
cassert(config_prof);
|
||||
|
||||
if (prof_interval == 0)
|
||||
if (likely(prof_interval == 0))
|
||||
return (false);
|
||||
|
||||
{
|
||||
@@ -995,8 +996,8 @@ arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info, const void *ptr)
|
||||
SIZE_INV(28), SIZE_INV(29), SIZE_INV(30), SIZE_INV(31)
|
||||
};
|
||||
|
||||
if (interval <= ((sizeof(interval_invs) / sizeof(unsigned)) +
|
||||
2)) {
|
||||
if (likely(interval <= ((sizeof(interval_invs) /
|
||||
sizeof(unsigned)) + 2))) {
|
||||
regind = (diff * interval_invs[interval - 3]) >>
|
||||
SIZE_INV_SHIFT;
|
||||
} else
|
||||
@@ -1025,7 +1026,7 @@ arena_prof_tctx_get(const void *ptr)
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
mapbits = arena_mapbits_get(chunk, pageind);
|
||||
assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
|
||||
if ((mapbits & CHUNK_MAP_LARGE) == 0)
|
||||
if (likely((mapbits & CHUNK_MAP_LARGE) == 0))
|
||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||
else
|
||||
ret = arena_miscelm_get(chunk, pageind)->prof_tctx;
|
||||
@@ -1047,7 +1048,7 @@ arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx)
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
|
||||
if (arena_mapbits_large_get(chunk, pageind) != 0)
|
||||
if (unlikely(arena_mapbits_large_get(chunk, pageind) != 0))
|
||||
arena_miscelm_get(chunk, pageind)->prof_tctx = tctx;
|
||||
}
|
||||
|
||||
@@ -1059,8 +1060,9 @@ arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
||||
assert(size != 0);
|
||||
assert(size <= arena_maxclass);
|
||||
|
||||
if (size <= SMALL_MAXCLASS) {
|
||||
if (try_tcache && (tcache = tcache_get(true)) != NULL)
|
||||
if (likely(size <= SMALL_MAXCLASS)) {
|
||||
if (likely(try_tcache) && likely((tcache = tcache_get(true)) !=
|
||||
NULL))
|
||||
return (tcache_alloc_small(tcache, size, zero));
|
||||
else {
|
||||
return (arena_malloc_small(choose_arena(arena), size,
|
||||
@@ -1071,8 +1073,8 @@ arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
||||
* Initialize tcache after checking size in order to avoid
|
||||
* infinite recursion during tcache initialization.
|
||||
*/
|
||||
if (try_tcache && size <= tcache_maxclass && (tcache =
|
||||
tcache_get(true)) != NULL)
|
||||
if (try_tcache && size <= tcache_maxclass && likely((tcache =
|
||||
tcache_get(true)) != NULL))
|
||||
return (tcache_alloc_large(tcache, size, zero));
|
||||
else {
|
||||
return (arena_malloc_large(choose_arena(arena), size,
|
||||
@@ -1096,8 +1098,8 @@ arena_salloc(const void *ptr, bool demote)
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
binind = arena_mapbits_binind_get(chunk, pageind);
|
||||
if (binind == BININD_INVALID || (config_prof && demote == false &&
|
||||
arena_mapbits_large_get(chunk, pageind) != 0)) {
|
||||
if (unlikely(binind == BININD_INVALID || (config_prof && demote == false
|
||||
&& arena_mapbits_large_get(chunk, pageind) != 0))) {
|
||||
/*
|
||||
* Large allocation. In the common case (demote == true), and
|
||||
* as this is an inline function, most callers will only end up
|
||||
@@ -1137,10 +1139,12 @@ arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
mapbits = arena_mapbits_get(chunk, pageind);
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
if ((mapbits & CHUNK_MAP_LARGE) == 0) {
|
||||
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
||||
/* Small allocation. */
|
||||
if (try_tcache && (tcache = tcache_get(false)) != NULL) {
|
||||
size_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
||||
if (likely(try_tcache) && likely((tcache = tcache_get(false)) !=
|
||||
NULL)) {
|
||||
size_t binind = arena_ptr_small_binind_get(ptr,
|
||||
mapbits);
|
||||
tcache_dalloc_small(tcache, ptr, binind);
|
||||
} else
|
||||
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
||||
@@ -1149,8 +1153,8 @@ arena_dalloc(arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||
|
||||
assert(((uintptr_t)ptr & PAGE_MASK) == 0);
|
||||
|
||||
if (try_tcache && size <= tcache_maxclass && (tcache =
|
||||
tcache_get(false)) != NULL) {
|
||||
if (try_tcache && size <= tcache_maxclass && likely((tcache =
|
||||
tcache_get(false)) != NULL)) {
|
||||
tcache_dalloc_large(tcache, ptr, size);
|
||||
} else
|
||||
arena_dalloc_large(chunk->arena, chunk, ptr);
|
||||
@@ -1165,13 +1169,15 @@ arena_sdalloc(arena_chunk_t *chunk, void *ptr, size_t size, bool try_tcache)
|
||||
assert(ptr != NULL);
|
||||
assert(CHUNK_ADDR2BASE(ptr) != ptr);
|
||||
|
||||
if (size < PAGE) {
|
||||
if (likely(size <= SMALL_MAXCLASS)) {
|
||||
/* Small allocation. */
|
||||
if (try_tcache && (tcache = tcache_get(false)) != NULL) {
|
||||
if (likely(try_tcache) && likely((tcache = tcache_get(false)) !=
|
||||
NULL)) {
|
||||
size_t binind = small_size2bin(size);
|
||||
tcache_dalloc_small(tcache, ptr, binind);
|
||||
} else {
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
|
||||
LG_PAGE;
|
||||
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
||||
}
|
||||
} else {
|
||||
|
@@ -814,7 +814,7 @@ JEMALLOC_ALWAYS_INLINE void
|
||||
iqalloc(void *ptr, bool try_tcache)
|
||||
{
|
||||
|
||||
if (config_fill && opt_quarantine)
|
||||
if (config_fill && unlikely(opt_quarantine))
|
||||
quarantine(ptr);
|
||||
else
|
||||
idalloct(ptr, try_tcache);
|
||||
@@ -824,7 +824,7 @@ JEMALLOC_ALWAYS_INLINE void
|
||||
isqalloc(void *ptr, size_t size, bool try_tcache)
|
||||
{
|
||||
|
||||
if (config_fill && opt_quarantine)
|
||||
if (config_fill && unlikely(opt_quarantine))
|
||||
quarantine(ptr);
|
||||
else
|
||||
isdalloct(ptr, size, try_tcache);
|
||||
|
@@ -400,7 +400,8 @@ prof_alloc_prep(size_t usize, bool update)
|
||||
|
||||
assert(usize == s2u(usize));
|
||||
|
||||
if (!opt_prof_active || prof_sample_accum_update(usize, update, &tdata))
|
||||
if (!opt_prof_active || likely(prof_sample_accum_update(usize, update,
|
||||
&tdata)))
|
||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||
else {
|
||||
bt_init(&bt, tdata->vec);
|
||||
@@ -419,7 +420,7 @@ prof_malloc(const void *ptr, size_t usize, prof_tctx_t *tctx)
|
||||
assert(ptr != NULL);
|
||||
assert(usize == isalloc(ptr, true));
|
||||
|
||||
if ((uintptr_t)tctx > (uintptr_t)1U)
|
||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||
prof_malloc_sample_object(ptr, usize, tctx);
|
||||
else
|
||||
prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
|
||||
@@ -447,9 +448,9 @@ prof_realloc(const void *ptr, size_t usize, prof_tctx_t *tctx, bool updated,
|
||||
}
|
||||
}
|
||||
|
||||
if ((uintptr_t)old_tctx > (uintptr_t)1U)
|
||||
if (unlikely((uintptr_t)old_tctx > (uintptr_t)1U))
|
||||
prof_free_sampled_object(old_usize, old_tctx);
|
||||
if ((uintptr_t)tctx > (uintptr_t)1U)
|
||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||
prof_malloc_sample_object(ptr, usize, tctx);
|
||||
else
|
||||
prof_tctx_set(ptr, (prof_tctx_t *)(uintptr_t)1U);
|
||||
@@ -463,7 +464,7 @@ prof_free(const void *ptr, size_t usize)
|
||||
cassert(config_prof);
|
||||
assert(usize == isalloc(ptr, true));
|
||||
|
||||
if ((uintptr_t)tctx > (uintptr_t)1U)
|
||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||
prof_free_sampled_object(usize, tctx);
|
||||
}
|
||||
#endif
|
||||
|
@@ -236,7 +236,7 @@ tcache_event(tcache_t *tcache)
|
||||
|
||||
tcache->ev_cnt++;
|
||||
assert(tcache->ev_cnt <= TCACHE_GC_INCR);
|
||||
if (tcache->ev_cnt == TCACHE_GC_INCR)
|
||||
if (unlikely(tcache->ev_cnt == TCACHE_GC_INCR))
|
||||
tcache_event_hard(tcache);
|
||||
}
|
||||
|
||||
@@ -245,12 +245,12 @@ tcache_alloc_easy(tcache_bin_t *tbin)
|
||||
{
|
||||
void *ret;
|
||||
|
||||
if (tbin->ncached == 0) {
|
||||
if (unlikely(tbin->ncached == 0)) {
|
||||
tbin->low_water = -1;
|
||||
return (NULL);
|
||||
}
|
||||
tbin->ncached--;
|
||||
if ((int)tbin->ncached < tbin->low_water)
|
||||
if (unlikely((int)tbin->ncached < tbin->low_water))
|
||||
tbin->low_water = tbin->ncached;
|
||||
ret = tbin->avail[tbin->ncached];
|
||||
return (ret);
|
||||
@@ -268,23 +268,23 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||
tbin = &tcache->tbins[binind];
|
||||
size = small_bin2size(binind);
|
||||
ret = tcache_alloc_easy(tbin);
|
||||
if (ret == NULL) {
|
||||
if (unlikely(ret == NULL)) {
|
||||
ret = tcache_alloc_small_hard(tcache, tbin, binind);
|
||||
if (ret == NULL)
|
||||
return (NULL);
|
||||
}
|
||||
assert(tcache_salloc(ret) == size);
|
||||
|
||||
if (zero == false) {
|
||||
if (likely(zero == false)) {
|
||||
if (config_fill) {
|
||||
if (opt_junk) {
|
||||
if (unlikely(opt_junk)) {
|
||||
arena_alloc_junk_small(ret,
|
||||
&arena_bin_info[binind], false);
|
||||
} else if (opt_zero)
|
||||
} else if (unlikely(opt_zero))
|
||||
memset(ret, 0, size);
|
||||
}
|
||||
} else {
|
||||
if (config_fill && opt_junk) {
|
||||
if (config_fill && unlikely(opt_junk)) {
|
||||
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
||||
true);
|
||||
}
|
||||
@@ -312,7 +312,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
assert(binind < nhbins);
|
||||
tbin = &tcache->tbins[binind];
|
||||
ret = tcache_alloc_easy(tbin);
|
||||
if (ret == NULL) {
|
||||
if (unlikely(ret == NULL)) {
|
||||
/*
|
||||
* Only allocate one large object at a time, because it's quite
|
||||
* expensive to create one and not use it.
|
||||
@@ -329,11 +329,11 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
arena_mapbits_large_binind_set(chunk, pageind,
|
||||
BININD_INVALID);
|
||||
}
|
||||
if (zero == false) {
|
||||
if (likely(zero == false)) {
|
||||
if (config_fill) {
|
||||
if (opt_junk)
|
||||
if (unlikely(opt_junk))
|
||||
memset(ret, 0xa5, size);
|
||||
else if (opt_zero)
|
||||
else if (unlikely(opt_zero))
|
||||
memset(ret, 0, size);
|
||||
}
|
||||
} else
|
||||
@@ -357,12 +357,12 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, size_t binind)
|
||||
|
||||
assert(tcache_salloc(ptr) <= SMALL_MAXCLASS);
|
||||
|
||||
if (config_fill && opt_junk)
|
||||
if (config_fill && unlikely(opt_junk))
|
||||
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
|
||||
|
||||
tbin = &tcache->tbins[binind];
|
||||
tbin_info = &tcache_bin_info[binind];
|
||||
if (tbin->ncached == tbin_info->ncached_max) {
|
||||
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
|
||||
tcache_bin_flush_small(tbin, binind, (tbin_info->ncached_max >>
|
||||
1), tcache);
|
||||
}
|
||||
@@ -386,12 +386,12 @@ tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
||||
|
||||
binind = NBINS + (size >> LG_PAGE) - 1;
|
||||
|
||||
if (config_fill && opt_junk)
|
||||
if (config_fill && unlikely(opt_junk))
|
||||
memset(ptr, 0x5a, size);
|
||||
|
||||
tbin = &tcache->tbins[binind];
|
||||
tbin_info = &tcache_bin_info[binind];
|
||||
if (tbin->ncached == tbin_info->ncached_max) {
|
||||
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
|
||||
tcache_bin_flush_large(tbin, binind, (tbin_info->ncached_max >>
|
||||
1), tcache);
|
||||
}
|
||||
|
Reference in New Issue
Block a user