mark some conditions as unlikely
* assertion failure * malloc_init failure * malloc not already initialized (in malloc_init) * running in valgrind * thread cache disabled at runtime Clang and GCC already consider a comparison with NULL or -1 to be cold, so many branches (out-of-memory) are already correctly considered as cold and marking them is not important.
This commit is contained in:
parent
6b5609d23b
commit
23fdf8b359
@ -218,7 +218,7 @@ tcache_get(bool create)
|
||||
return (NULL);
|
||||
|
||||
tcache = *tcache_tsd_get();
|
||||
if ((uintptr_t)tcache <= (uintptr_t)TCACHE_STATE_MAX) {
|
||||
if (unlikely((uintptr_t)tcache <= (uintptr_t)TCACHE_STATE_MAX)) {
|
||||
if (tcache == TCACHE_STATE_DISABLED)
|
||||
return (NULL);
|
||||
tcache = tcache_get_hard(tcache, create);
|
||||
|
@ -41,7 +41,7 @@
|
||||
*/
|
||||
#ifndef assert
|
||||
#define assert(e) do { \
|
||||
if (config_debug && !(e)) { \
|
||||
if (unlikely(config_debug && !(e))) { \
|
||||
malloc_printf( \
|
||||
"<jemalloc>: %s:%d: Failed assertion: \"%s\"\n", \
|
||||
__FILE__, __LINE__, #e); \
|
||||
@ -73,14 +73,14 @@
|
||||
|
||||
#ifndef assert_not_implemented
|
||||
#define assert_not_implemented(e) do { \
|
||||
if (config_debug && !(e)) \
|
||||
if (unlikely(config_debug && !(e))) \
|
||||
not_implemented(); \
|
||||
} while (0)
|
||||
#endif
|
||||
|
||||
/* Use to assert a particular configuration, e.g., cassert(config_debug). */
|
||||
#define cassert(c) do { \
|
||||
if ((c) == false) \
|
||||
if (unlikely(!(c))) \
|
||||
not_reached(); \
|
||||
} while (0)
|
||||
|
||||
|
@ -14,15 +14,15 @@
|
||||
* usable space.
|
||||
*/
|
||||
#define JEMALLOC_VALGRIND_MAKE_MEM_NOACCESS(ptr, usize) do { \
|
||||
if (in_valgrind) \
|
||||
if (unlikely(in_valgrind)) \
|
||||
valgrind_make_mem_noaccess(ptr, usize); \
|
||||
} while (0)
|
||||
#define JEMALLOC_VALGRIND_MAKE_MEM_UNDEFINED(ptr, usize) do { \
|
||||
if (in_valgrind) \
|
||||
if (unlikely(in_valgrind)) \
|
||||
valgrind_make_mem_undefined(ptr, usize); \
|
||||
} while (0)
|
||||
#define JEMALLOC_VALGRIND_MAKE_MEM_DEFINED(ptr, usize) do { \
|
||||
if (in_valgrind) \
|
||||
if (unlikely(in_valgrind)) \
|
||||
valgrind_make_mem_defined(ptr, usize); \
|
||||
} while (0)
|
||||
/*
|
||||
@ -31,13 +31,13 @@
|
||||
* Valgrind reports errors, there are no extra stack frames in the backtraces.
|
||||
*/
|
||||
#define JEMALLOC_VALGRIND_MALLOC(cond, ptr, usize, zero) do { \
|
||||
if (in_valgrind && cond) \
|
||||
if (unlikely(in_valgrind && cond)) \
|
||||
VALGRIND_MALLOCLIKE_BLOCK(ptr, usize, p2rz(ptr), zero); \
|
||||
} while (0)
|
||||
#define JEMALLOC_VALGRIND_REALLOC(maybe_moved, ptr, usize, \
|
||||
ptr_maybe_null, old_ptr, old_usize, old_rzsize, old_ptr_maybe_null, \
|
||||
zero) do { \
|
||||
if (in_valgrind) { \
|
||||
if (unlikely(in_valgrind)) { \
|
||||
size_t rzsize = p2rz(ptr); \
|
||||
\
|
||||
if (!maybe_moved || ptr == old_ptr) { \
|
||||
@ -73,7 +73,7 @@
|
||||
} \
|
||||
} while (0)
|
||||
#define JEMALLOC_VALGRIND_FREE(ptr, rzsize) do { \
|
||||
if (in_valgrind) \
|
||||
if (unlikely(in_valgrind)) \
|
||||
valgrind_freelike_block(ptr, rzsize); \
|
||||
} while (0)
|
||||
#else
|
||||
|
@ -291,7 +291,7 @@ JEMALLOC_ALWAYS_INLINE_C bool
|
||||
malloc_init(void)
|
||||
{
|
||||
|
||||
if (malloc_initialized == false && malloc_init_hard())
|
||||
if (unlikely(!malloc_initialized) && malloc_init_hard())
|
||||
return (true);
|
||||
malloc_thread_init();
|
||||
|
||||
@ -904,7 +904,7 @@ JEMALLOC_ALWAYS_INLINE_C void *
|
||||
imalloc_body(size_t size, size_t *usize)
|
||||
{
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
return (NULL);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
@ -912,7 +912,7 @@ imalloc_body(size_t size, size_t *usize)
|
||||
return (imalloc_prof(*usize));
|
||||
}
|
||||
|
||||
if (config_stats || (config_valgrind && in_valgrind))
|
||||
if (config_stats || (unlikely(config_valgrind && in_valgrind)))
|
||||
*usize = s2u(size);
|
||||
return (imalloc(size));
|
||||
}
|
||||
@ -993,7 +993,7 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
||||
|
||||
assert(min_alignment != 0);
|
||||
|
||||
if (malloc_init()) {
|
||||
if (unlikely(malloc_init())) {
|
||||
result = NULL;
|
||||
goto label_oom;
|
||||
} else {
|
||||
@ -1116,7 +1116,7 @@ je_calloc(size_t num, size_t size)
|
||||
size_t num_size;
|
||||
size_t usize JEMALLOC_CC_SILENCE_INIT(0);
|
||||
|
||||
if (malloc_init()) {
|
||||
if (unlikely(malloc_init())) {
|
||||
num_size = 0;
|
||||
ret = NULL;
|
||||
goto label_return;
|
||||
@ -1146,7 +1146,7 @@ je_calloc(size_t num, size_t size)
|
||||
usize = s2u(num_size);
|
||||
ret = icalloc_prof(usize);
|
||||
} else {
|
||||
if (config_stats || (config_valgrind && in_valgrind))
|
||||
if (config_stats || unlikely(config_valgrind && in_valgrind))
|
||||
usize = s2u(num_size);
|
||||
ret = icalloc(num_size);
|
||||
}
|
||||
@ -1222,7 +1222,7 @@ ifree(void *ptr, bool try_tcache)
|
||||
usize = isalloc(ptr, config_prof);
|
||||
if (config_stats)
|
||||
thread_allocated_tsd_get()->deallocated += usize;
|
||||
if (config_valgrind && in_valgrind)
|
||||
if (unlikely(config_valgrind && in_valgrind))
|
||||
rzsize = p2rz(ptr);
|
||||
iqalloc(ptr, try_tcache);
|
||||
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
|
||||
@ -1240,7 +1240,7 @@ isfree(void *ptr, size_t usize, bool try_tcache)
|
||||
prof_free(ptr, usize);
|
||||
if (config_stats)
|
||||
thread_allocated_tsd_get()->deallocated += usize;
|
||||
if (config_valgrind && in_valgrind)
|
||||
if (unlikely(config_valgrind && in_valgrind))
|
||||
rzsize = p2rz(ptr);
|
||||
isqalloc(ptr, usize, try_tcache);
|
||||
JEMALLOC_VALGRIND_FREE(ptr, rzsize);
|
||||
@ -1269,16 +1269,16 @@ je_realloc(void *ptr, size_t size)
|
||||
malloc_thread_init();
|
||||
|
||||
if ((config_prof && opt_prof) || config_stats ||
|
||||
(config_valgrind && in_valgrind))
|
||||
unlikely(config_valgrind && in_valgrind))
|
||||
old_usize = isalloc(ptr, config_prof);
|
||||
if (config_valgrind && in_valgrind)
|
||||
if (unlikely(config_valgrind && in_valgrind))
|
||||
old_rzsize = config_prof ? p2rz(ptr) : u2rz(old_usize);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
usize = s2u(size);
|
||||
ret = irealloc_prof(ptr, old_usize, usize);
|
||||
} else {
|
||||
if (config_stats || (config_valgrind && in_valgrind))
|
||||
if (config_stats || unlikely(config_valgrind && in_valgrind))
|
||||
usize = s2u(size);
|
||||
ret = iralloc(ptr, size, 0, false);
|
||||
}
|
||||
@ -1506,7 +1506,7 @@ imallocx_no_prof(size_t size, int flags, size_t *usize)
|
||||
arena_t *arena;
|
||||
|
||||
if (flags == 0) {
|
||||
if (config_stats || (config_valgrind && in_valgrind))
|
||||
if (config_stats || unlikely(config_valgrind && in_valgrind))
|
||||
*usize = s2u(size);
|
||||
return (imalloc(size));
|
||||
}
|
||||
@ -1524,7 +1524,7 @@ je_mallocx(size_t size, int flags)
|
||||
|
||||
assert(size != 0);
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
goto label_oom;
|
||||
|
||||
if (config_prof && opt_prof)
|
||||
@ -1642,9 +1642,9 @@ je_rallocx(void *ptr, size_t size, int flags)
|
||||
}
|
||||
|
||||
if ((config_prof && opt_prof) || config_stats ||
|
||||
(config_valgrind && in_valgrind))
|
||||
(unlikely(config_valgrind && in_valgrind)))
|
||||
old_usize = isalloc(ptr, config_prof);
|
||||
if (config_valgrind && in_valgrind)
|
||||
if (unlikely(config_valgrind && in_valgrind))
|
||||
old_rzsize = u2rz(old_usize);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
@ -1777,7 +1777,7 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
||||
arena = NULL;
|
||||
|
||||
old_usize = isalloc(ptr, config_prof);
|
||||
if (config_valgrind && in_valgrind)
|
||||
if (unlikely(config_valgrind && in_valgrind))
|
||||
old_rzsize = u2rz(old_usize);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
@ -1883,7 +1883,7 @@ je_nallocx(size_t size, int flags)
|
||||
|
||||
assert(size != 0);
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
return (0);
|
||||
|
||||
return (inallocx(size, flags));
|
||||
@ -1894,7 +1894,7 @@ je_mallctl(const char *name, void *oldp, size_t *oldlenp, void *newp,
|
||||
size_t newlen)
|
||||
{
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
return (EAGAIN);
|
||||
|
||||
return (ctl_byname(name, oldp, oldlenp, newp, newlen));
|
||||
@ -1904,7 +1904,7 @@ int
|
||||
je_mallctlnametomib(const char *name, size_t *mibp, size_t *miblenp)
|
||||
{
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
return (EAGAIN);
|
||||
|
||||
return (ctl_nametomib(name, mibp, miblenp));
|
||||
@ -1915,7 +1915,7 @@ je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
||||
void *newp, size_t newlen)
|
||||
{
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
return (EAGAIN);
|
||||
|
||||
return (ctl_bymib(mib, miblen, oldp, oldlenp, newp, newlen));
|
||||
@ -2064,7 +2064,7 @@ static void *
|
||||
a0alloc(size_t size, bool zero)
|
||||
{
|
||||
|
||||
if (malloc_init())
|
||||
if (unlikely(malloc_init()))
|
||||
return (NULL);
|
||||
|
||||
if (size == 0)
|
||||
|
Loading…
Reference in New Issue
Block a user