Add and use JEMALLOC_ALWAYS_INLINE.
Add JEMALLOC_ALWAYS_INLINE and use it to guarantee that the entire fast paths of the primary allocation/deallocation functions are inlined.
This commit is contained in:
@@ -233,10 +233,17 @@ static const bool config_ivsalloc =
|
||||
|
||||
#ifdef JEMALLOC_DEBUG
|
||||
/* Disable inlining to make debugging easier. */
|
||||
# define JEMALLOC_ALWAYS_INLINE
|
||||
# define JEMALLOC_INLINE
|
||||
# define inline
|
||||
#else
|
||||
# define JEMALLOC_ENABLE_INLINE
|
||||
# ifdef JEMALLOC_HAVE_ATTR
|
||||
# define JEMALLOC_ALWAYS_INLINE \
|
||||
static JEMALLOC_ATTR(unused) JEMALLOC_ATTR(always_inline)
|
||||
# else
|
||||
# define JEMALLOC_ALWAYS_INLINE static inline
|
||||
# endif
|
||||
# define JEMALLOC_INLINE static inline
|
||||
# ifdef _MSC_VER
|
||||
# define inline _inline
|
||||
@@ -595,13 +602,14 @@ arena_t *choose_arena(arena_t *arena);
|
||||
* for allocations.
|
||||
*/
|
||||
malloc_tsd_externs(arenas, arena_t *)
|
||||
malloc_tsd_funcs(JEMALLOC_INLINE, arenas, arena_t *, NULL, arenas_cleanup)
|
||||
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, arenas, arena_t *, NULL,
|
||||
arenas_cleanup)
|
||||
|
||||
/*
|
||||
* Compute usable size that would result from allocating an object with the
|
||||
* specified size.
|
||||
*/
|
||||
JEMALLOC_INLINE size_t
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
s2u(size_t size)
|
||||
{
|
||||
|
||||
@@ -616,7 +624,7 @@ s2u(size_t size)
|
||||
* Compute usable size that would result from allocating an object with the
|
||||
* specified size and alignment.
|
||||
*/
|
||||
JEMALLOC_INLINE size_t
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
sa2u(size_t size, size_t alignment)
|
||||
{
|
||||
size_t usize;
|
||||
@@ -761,7 +769,7 @@ malloc_tsd_protos(JEMALLOC_ATTR(unused), thread_allocated, thread_allocated_t)
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
imallocx(size_t size, bool try_tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
@@ -773,14 +781,14 @@ imallocx(size_t size, bool try_tcache, arena_t *arena)
|
||||
return (huge_malloc(size, false));
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
imalloc(size_t size)
|
||||
{
|
||||
|
||||
return (imallocx(size, true, NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
icallocx(size_t size, bool try_tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
@@ -790,14 +798,14 @@ icallocx(size_t size, bool try_tcache, arena_t *arena)
|
||||
return (huge_malloc(size, true));
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
icalloc(size_t size)
|
||||
{
|
||||
|
||||
return (icallocx(size, true, NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
||||
arena_t *arena)
|
||||
{
|
||||
@@ -822,7 +830,7 @@ ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
||||
return (ret);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
ipalloc(size_t usize, size_t alignment, bool zero)
|
||||
{
|
||||
|
||||
@@ -834,7 +842,7 @@ ipalloc(size_t usize, size_t alignment, bool zero)
|
||||
* void *ptr = [...]
|
||||
* size_t sz = isalloc(ptr, config_prof);
|
||||
*/
|
||||
JEMALLOC_INLINE size_t
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
isalloc(const void *ptr, bool demote)
|
||||
{
|
||||
size_t ret;
|
||||
@@ -853,7 +861,7 @@ isalloc(const void *ptr, bool demote)
|
||||
return (ret);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE size_t
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
ivsalloc(const void *ptr, bool demote)
|
||||
{
|
||||
|
||||
@@ -886,7 +894,7 @@ p2rz(const void *ptr)
|
||||
return (u2rz(usize));
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idallocx(void *ptr, bool try_tcache)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
@@ -900,14 +908,14 @@ idallocx(void *ptr, bool try_tcache)
|
||||
huge_dalloc(ptr, true);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloc(void *ptr)
|
||||
{
|
||||
|
||||
idallocx(ptr, true);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
iqallocx(void *ptr, bool try_tcache)
|
||||
{
|
||||
|
||||
@@ -917,14 +925,14 @@ iqallocx(void *ptr, bool try_tcache)
|
||||
idallocx(ptr, try_tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
iqalloc(void *ptr)
|
||||
{
|
||||
|
||||
iqallocx(ptr, true);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
|
||||
{
|
||||
@@ -993,7 +1001,7 @@ irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
}
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void *
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
bool no_move)
|
||||
{
|
||||
@@ -1003,7 +1011,7 @@ iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
}
|
||||
|
||||
malloc_tsd_externs(thread_allocated, thread_allocated_t)
|
||||
malloc_tsd_funcs(JEMALLOC_INLINE, thread_allocated, thread_allocated_t,
|
||||
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, thread_allocated, thread_allocated_t,
|
||||
THREAD_ALLOCATED_INITIALIZER, malloc_tsd_no_cleanup)
|
||||
#endif
|
||||
|
||||
|
Reference in New Issue
Block a user