Add and use JEMALLOC_ALWAYS_INLINE.
Add JEMALLOC_ALWAYS_INLINE and use it to guarantee that the entire fast paths of the primary allocation/deallocation functions are inlined.
This commit is contained in:
parent
38067483c5
commit
88393cb0eb
@ -480,7 +480,7 @@ void arena_dalloc(arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||||
# ifdef JEMALLOC_ARENA_INLINE_A
|
# ifdef JEMALLOC_ARENA_INLINE_A
|
||||||
JEMALLOC_INLINE arena_chunk_map_t *
|
JEMALLOC_ALWAYS_INLINE arena_chunk_map_t *
|
||||||
arena_mapp_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapp_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -490,21 +490,21 @@ arena_mapp_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (&chunk->map[pageind-map_bias]);
|
return (&chunk->map[pageind-map_bias]);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t *
|
JEMALLOC_ALWAYS_INLINE size_t *
|
||||||
arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbitsp_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (&arena_mapp_get(chunk, pageind)->bits);
|
return (&arena_mapp_get(chunk, pageind)->bits);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (*arena_mapbitsp_get(chunk, pageind));
|
return (*arena_mapbitsp_get(chunk, pageind));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_unallocated_size_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_unallocated_size_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -514,7 +514,7 @@ arena_mapbits_unallocated_size_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & ~PAGE_MASK);
|
return (mapbits & ~PAGE_MASK);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -525,7 +525,7 @@ arena_mapbits_large_size_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & ~PAGE_MASK);
|
return (mapbits & ~PAGE_MASK);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -536,7 +536,7 @@ arena_mapbits_small_runind_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits >> LG_PAGE);
|
return (mapbits >> LG_PAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -548,7 +548,7 @@ arena_mapbits_binind_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (binind);
|
return (binind);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -557,7 +557,7 @@ arena_mapbits_dirty_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & CHUNK_MAP_DIRTY);
|
return (mapbits & CHUNK_MAP_DIRTY);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -566,7 +566,7 @@ arena_mapbits_unzeroed_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & CHUNK_MAP_UNZEROED);
|
return (mapbits & CHUNK_MAP_UNZEROED);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -575,7 +575,7 @@ arena_mapbits_large_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & CHUNK_MAP_LARGE);
|
return (mapbits & CHUNK_MAP_LARGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind)
|
arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind)
|
||||||
{
|
{
|
||||||
size_t mapbits;
|
size_t mapbits;
|
||||||
@ -584,7 +584,7 @@ arena_mapbits_allocated_get(arena_chunk_t *chunk, size_t pageind)
|
|||||||
return (mapbits & CHUNK_MAP_ALLOCATED);
|
return (mapbits & CHUNK_MAP_ALLOCATED);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
||||||
size_t flags)
|
size_t flags)
|
||||||
{
|
{
|
||||||
@ -597,7 +597,7 @@ arena_mapbits_unallocated_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
|||||||
*mapbitsp = size | CHUNK_MAP_BININD_INVALID | flags;
|
*mapbitsp = size | CHUNK_MAP_BININD_INVALID | flags;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t size)
|
size_t size)
|
||||||
{
|
{
|
||||||
@ -609,7 +609,7 @@ arena_mapbits_unallocated_size_set(arena_chunk_t *chunk, size_t pageind,
|
|||||||
*mapbitsp = size | (*mapbitsp & PAGE_MASK);
|
*mapbitsp = size | (*mapbitsp & PAGE_MASK);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
||||||
size_t flags)
|
size_t flags)
|
||||||
{
|
{
|
||||||
@ -624,7 +624,7 @@ arena_mapbits_large_set(arena_chunk_t *chunk, size_t pageind, size_t size,
|
|||||||
CHUNK_MAP_LARGE | CHUNK_MAP_ALLOCATED;
|
CHUNK_MAP_LARGE | CHUNK_MAP_ALLOCATED;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t binind)
|
size_t binind)
|
||||||
{
|
{
|
||||||
@ -637,7 +637,7 @@ arena_mapbits_large_binind_set(arena_chunk_t *chunk, size_t pageind,
|
|||||||
CHUNK_MAP_BININD_SHIFT);
|
CHUNK_MAP_BININD_SHIFT);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
||||||
size_t binind, size_t flags)
|
size_t binind, size_t flags)
|
||||||
{
|
{
|
||||||
@ -653,7 +653,7 @@ arena_mapbits_small_set(arena_chunk_t *chunk, size_t pageind, size_t runind,
|
|||||||
flags | unzeroed | CHUNK_MAP_ALLOCATED;
|
flags | unzeroed | CHUNK_MAP_ALLOCATED;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_mapbits_unzeroed_set(arena_chunk_t *chunk, size_t pageind,
|
arena_mapbits_unzeroed_set(arena_chunk_t *chunk, size_t pageind,
|
||||||
size_t unzeroed)
|
size_t unzeroed)
|
||||||
{
|
{
|
||||||
@ -701,7 +701,7 @@ arena_prof_accum(arena_t *arena, uint64_t accumbytes)
|
|||||||
malloc_mutex_unlock(&arena->lock);
|
malloc_mutex_unlock(&arena->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
||||||
{
|
{
|
||||||
size_t binind;
|
size_t binind;
|
||||||
@ -896,7 +896,7 @@ arena_prof_ctx_set(const void *ptr, prof_ctx_t *ctx)
|
|||||||
arena_mapp_get(chunk, pageind)->prof_ctx = ctx;
|
arena_mapp_get(chunk, pageind)->prof_ctx = ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
||||||
{
|
{
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
@ -927,7 +927,7 @@ arena_malloc(arena_t *arena, size_t size, bool zero, bool try_tcache)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Return the size of the allocation pointed to by ptr. */
|
/* Return the size of the allocation pointed to by ptr. */
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_salloc(const void *ptr, bool demote)
|
arena_salloc(const void *ptr, bool demote)
|
||||||
{
|
{
|
||||||
size_t ret;
|
size_t ret;
|
||||||
@ -973,7 +973,7 @@ arena_salloc(const void *ptr, bool demote)
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_dalloc(arena_t *arena, arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
arena_dalloc(arena_t *arena, arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||||
{
|
{
|
||||||
size_t pageind, mapbits;
|
size_t pageind, mapbits;
|
||||||
|
@ -233,10 +233,17 @@ static const bool config_ivsalloc =
|
|||||||
|
|
||||||
#ifdef JEMALLOC_DEBUG
|
#ifdef JEMALLOC_DEBUG
|
||||||
/* Disable inlining to make debugging easier. */
|
/* Disable inlining to make debugging easier. */
|
||||||
|
# define JEMALLOC_ALWAYS_INLINE
|
||||||
# define JEMALLOC_INLINE
|
# define JEMALLOC_INLINE
|
||||||
# define inline
|
# define inline
|
||||||
#else
|
#else
|
||||||
# define JEMALLOC_ENABLE_INLINE
|
# define JEMALLOC_ENABLE_INLINE
|
||||||
|
# ifdef JEMALLOC_HAVE_ATTR
|
||||||
|
# define JEMALLOC_ALWAYS_INLINE \
|
||||||
|
static JEMALLOC_ATTR(unused) JEMALLOC_ATTR(always_inline)
|
||||||
|
# else
|
||||||
|
# define JEMALLOC_ALWAYS_INLINE static inline
|
||||||
|
# endif
|
||||||
# define JEMALLOC_INLINE static inline
|
# define JEMALLOC_INLINE static inline
|
||||||
# ifdef _MSC_VER
|
# ifdef _MSC_VER
|
||||||
# define inline _inline
|
# define inline _inline
|
||||||
@ -595,13 +602,14 @@ arena_t *choose_arena(arena_t *arena);
|
|||||||
* for allocations.
|
* for allocations.
|
||||||
*/
|
*/
|
||||||
malloc_tsd_externs(arenas, arena_t *)
|
malloc_tsd_externs(arenas, arena_t *)
|
||||||
malloc_tsd_funcs(JEMALLOC_INLINE, arenas, arena_t *, NULL, arenas_cleanup)
|
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, arenas, arena_t *, NULL,
|
||||||
|
arenas_cleanup)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Compute usable size that would result from allocating an object with the
|
* Compute usable size that would result from allocating an object with the
|
||||||
* specified size.
|
* specified size.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
s2u(size_t size)
|
s2u(size_t size)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -616,7 +624,7 @@ s2u(size_t size)
|
|||||||
* Compute usable size that would result from allocating an object with the
|
* Compute usable size that would result from allocating an object with the
|
||||||
* specified size and alignment.
|
* specified size and alignment.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
sa2u(size_t size, size_t alignment)
|
sa2u(size_t size, size_t alignment)
|
||||||
{
|
{
|
||||||
size_t usize;
|
size_t usize;
|
||||||
@ -761,7 +769,7 @@ malloc_tsd_protos(JEMALLOC_ATTR(unused), thread_allocated, thread_allocated_t)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
imallocx(size_t size, bool try_tcache, arena_t *arena)
|
imallocx(size_t size, bool try_tcache, arena_t *arena)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -773,14 +781,14 @@ imallocx(size_t size, bool try_tcache, arena_t *arena)
|
|||||||
return (huge_malloc(size, false));
|
return (huge_malloc(size, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
imalloc(size_t size)
|
imalloc(size_t size)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (imallocx(size, true, NULL));
|
return (imallocx(size, true, NULL));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
icallocx(size_t size, bool try_tcache, arena_t *arena)
|
icallocx(size_t size, bool try_tcache, arena_t *arena)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -790,14 +798,14 @@ icallocx(size_t size, bool try_tcache, arena_t *arena)
|
|||||||
return (huge_malloc(size, true));
|
return (huge_malloc(size, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
icalloc(size_t size)
|
icalloc(size_t size)
|
||||||
{
|
{
|
||||||
|
|
||||||
return (icallocx(size, true, NULL));
|
return (icallocx(size, true, NULL));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
||||||
arena_t *arena)
|
arena_t *arena)
|
||||||
{
|
{
|
||||||
@ -822,7 +830,7 @@ ipallocx(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
ipalloc(size_t usize, size_t alignment, bool zero)
|
ipalloc(size_t usize, size_t alignment, bool zero)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -834,7 +842,7 @@ ipalloc(size_t usize, size_t alignment, bool zero)
|
|||||||
* void *ptr = [...]
|
* void *ptr = [...]
|
||||||
* size_t sz = isalloc(ptr, config_prof);
|
* size_t sz = isalloc(ptr, config_prof);
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
isalloc(const void *ptr, bool demote)
|
isalloc(const void *ptr, bool demote)
|
||||||
{
|
{
|
||||||
size_t ret;
|
size_t ret;
|
||||||
@ -853,7 +861,7 @@ isalloc(const void *ptr, bool demote)
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
ivsalloc(const void *ptr, bool demote)
|
ivsalloc(const void *ptr, bool demote)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -886,7 +894,7 @@ p2rz(const void *ptr)
|
|||||||
return (u2rz(usize));
|
return (u2rz(usize));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
idallocx(void *ptr, bool try_tcache)
|
idallocx(void *ptr, bool try_tcache)
|
||||||
{
|
{
|
||||||
arena_chunk_t *chunk;
|
arena_chunk_t *chunk;
|
||||||
@ -900,14 +908,14 @@ idallocx(void *ptr, bool try_tcache)
|
|||||||
huge_dalloc(ptr, true);
|
huge_dalloc(ptr, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
idalloc(void *ptr)
|
idalloc(void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
idallocx(ptr, true);
|
idallocx(ptr, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
iqallocx(void *ptr, bool try_tcache)
|
iqallocx(void *ptr, bool try_tcache)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -917,14 +925,14 @@ iqallocx(void *ptr, bool try_tcache)
|
|||||||
idallocx(ptr, try_tcache);
|
idallocx(ptr, try_tcache);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
iqalloc(void *ptr)
|
iqalloc(void *ptr)
|
||||||
{
|
{
|
||||||
|
|
||||||
iqallocx(ptr, true);
|
iqallocx(ptr, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||||
bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
|
bool no_move, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
|
||||||
{
|
{
|
||||||
@ -993,7 +1001,7 @@ irallocx(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||||
bool no_move)
|
bool no_move)
|
||||||
{
|
{
|
||||||
@ -1003,7 +1011,7 @@ iralloc(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
|||||||
}
|
}
|
||||||
|
|
||||||
malloc_tsd_externs(thread_allocated, thread_allocated_t)
|
malloc_tsd_externs(thread_allocated, thread_allocated_t)
|
||||||
malloc_tsd_funcs(JEMALLOC_INLINE, thread_allocated, thread_allocated_t,
|
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, thread_allocated, thread_allocated_t,
|
||||||
THREAD_ALLOCATED_INITIALIZER, malloc_tsd_no_cleanup)
|
THREAD_ALLOCATED_INITIALIZER, malloc_tsd_no_cleanup)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -140,11 +140,11 @@ void tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size);
|
|||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_))
|
||||||
/* Map of thread-specific caches. */
|
/* Map of thread-specific caches. */
|
||||||
malloc_tsd_externs(tcache, tcache_t *)
|
malloc_tsd_externs(tcache, tcache_t *)
|
||||||
malloc_tsd_funcs(JEMALLOC_INLINE, tcache, tcache_t *, NULL,
|
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, tcache, tcache_t *, NULL,
|
||||||
tcache_thread_cleanup)
|
tcache_thread_cleanup)
|
||||||
/* Per thread flag that allows thread caches to be disabled. */
|
/* Per thread flag that allows thread caches to be disabled. */
|
||||||
malloc_tsd_externs(tcache_enabled, tcache_enabled_t)
|
malloc_tsd_externs(tcache_enabled, tcache_enabled_t)
|
||||||
malloc_tsd_funcs(JEMALLOC_INLINE, tcache_enabled, tcache_enabled_t,
|
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, tcache_enabled, tcache_enabled_t,
|
||||||
tcache_enabled_default, malloc_tsd_no_cleanup)
|
tcache_enabled_default, malloc_tsd_no_cleanup)
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
@ -206,7 +206,7 @@ tcache_enabled_set(bool enabled)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE tcache_t *
|
JEMALLOC_ALWAYS_INLINE tcache_t *
|
||||||
tcache_get(bool create)
|
tcache_get(bool create)
|
||||||
{
|
{
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
@ -258,7 +258,7 @@ tcache_get(bool create)
|
|||||||
return (tcache);
|
return (tcache);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_event(tcache_t *tcache)
|
tcache_event(tcache_t *tcache)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -271,7 +271,7 @@ tcache_event(tcache_t *tcache)
|
|||||||
tcache_event_hard(tcache);
|
tcache_event_hard(tcache);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
tcache_alloc_easy(tcache_bin_t *tbin)
|
tcache_alloc_easy(tcache_bin_t *tbin)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
@ -287,7 +287,7 @@ tcache_alloc_easy(tcache_bin_t *tbin)
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
@ -331,7 +331,7 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||||
{
|
{
|
||||||
void *ret;
|
void *ret;
|
||||||
@ -384,7 +384,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_dalloc_small(tcache_t *tcache, void *ptr, size_t binind)
|
tcache_dalloc_small(tcache_t *tcache, void *ptr, size_t binind)
|
||||||
{
|
{
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
@ -408,7 +408,7 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, size_t binind)
|
|||||||
tcache_event(tcache);
|
tcache_event(tcache);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
||||||
{
|
{
|
||||||
size_t binind;
|
size_t binind;
|
||||||
|
@ -279,7 +279,7 @@ arenas_cleanup(void *arg)
|
|||||||
malloc_mutex_unlock(&arenas_lock);
|
malloc_mutex_unlock(&arenas_lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline bool
|
static JEMALLOC_ATTR(always_inline) bool
|
||||||
malloc_init(void)
|
malloc_init(void)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -892,7 +892,7 @@ JEMALLOC_ATTR(nonnull(1))
|
|||||||
* Avoid any uncertainty as to how many backtrace frames to ignore in
|
* Avoid any uncertainty as to how many backtrace frames to ignore in
|
||||||
* PROF_ALLOC_PREP().
|
* PROF_ALLOC_PREP().
|
||||||
*/
|
*/
|
||||||
JEMALLOC_ATTR(noinline)
|
JEMALLOC_NOINLINE
|
||||||
#endif
|
#endif
|
||||||
static int
|
static int
|
||||||
imemalign(void **memptr, size_t alignment, size_t size,
|
imemalign(void **memptr, size_t alignment, size_t size,
|
||||||
@ -1378,7 +1378,7 @@ je_mallctlbymib(const size_t *mib, size_t miblen, void *oldp, size_t *oldlenp,
|
|||||||
*/
|
*/
|
||||||
#ifdef JEMALLOC_EXPERIMENTAL
|
#ifdef JEMALLOC_EXPERIMENTAL
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
static JEMALLOC_ATTR(always_inline) void *
|
||||||
iallocm(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
iallocm(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
||||||
arena_t *arena)
|
arena_t *arena)
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user