Implement explicit tcache support.
Add the MALLOCX_TCACHE() and MALLOCX_TCACHE_NONE macros, which can be used in conjunction with the *allocx() API. Add the tcache.create, tcache.flush, and tcache.destroy mallctls. This resolves #145.
This commit is contained in:
@@ -272,7 +272,8 @@ struct arena_s {
|
||||
arena_stats_t stats;
|
||||
/*
|
||||
* List of tcaches for extant threads associated with this arena.
|
||||
* Stats from these are merged incrementally, and at exit.
|
||||
* Stats from these are merged incrementally, and at exit if
|
||||
* opt_stats_print is enabled.
|
||||
*/
|
||||
ql_head(tcache_t) tcache_ql;
|
||||
|
||||
@@ -387,8 +388,7 @@ extern arena_ralloc_junk_large_t *arena_ralloc_junk_large;
|
||||
bool arena_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, bool zero);
|
||||
void *arena_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero,
|
||||
bool try_tcache_alloc, bool try_tcache_dalloc);
|
||||
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache);
|
||||
dss_prec_t arena_dss_prec_get(arena_t *arena);
|
||||
bool arena_dss_prec_set(arena_t *arena, dss_prec_t dss_prec);
|
||||
void arena_stats_merge(arena_t *arena, const char **dss, size_t *nactive,
|
||||
@@ -450,13 +450,13 @@ unsigned arena_run_regind(arena_run_t *run, arena_bin_info_t *bin_info,
|
||||
prof_tctx_t *arena_prof_tctx_get(const void *ptr);
|
||||
void arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx);
|
||||
void *arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
|
||||
bool try_tcache);
|
||||
tcache_t *tcache);
|
||||
arena_t *arena_aalloc(const void *ptr);
|
||||
size_t arena_salloc(const void *ptr, bool demote);
|
||||
void arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr,
|
||||
bool try_tcache);
|
||||
tcache_t *tcache);
|
||||
void arena_sdalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, size_t size,
|
||||
bool try_tcache);
|
||||
tcache_t *tcache);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||
@@ -943,17 +943,15 @@ arena_prof_tctx_set(const void *ptr, prof_tctx_t *tctx)
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
|
||||
bool try_tcache)
|
||||
tcache_t *tcache)
|
||||
{
|
||||
tcache_t *tcache;
|
||||
|
||||
assert(size != 0);
|
||||
assert(size <= arena_maxclass);
|
||||
|
||||
if (likely(size <= SMALL_MAXCLASS)) {
|
||||
if (likely(try_tcache) && likely((tcache = tcache_get(tsd,
|
||||
true)) != NULL))
|
||||
return (tcache_alloc_small(tcache, size, zero));
|
||||
if (likely(tcache != NULL))
|
||||
return (tcache_alloc_small(tsd, tcache, size, zero));
|
||||
else {
|
||||
arena = arena_choose(tsd, arena);
|
||||
if (unlikely(arena == NULL))
|
||||
@@ -965,9 +963,8 @@ arena_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
|
||||
* Initialize tcache after checking size in order to avoid
|
||||
* infinite recursion during tcache initialization.
|
||||
*/
|
||||
if (try_tcache && size <= tcache_maxclass && likely((tcache =
|
||||
tcache_get(tsd, true)) != NULL))
|
||||
return (tcache_alloc_large(tcache, size, zero));
|
||||
if (likely(tcache != NULL) && size <= tcache_maxclass)
|
||||
return (tcache_alloc_large(tsd, tcache, size, zero));
|
||||
else {
|
||||
arena = arena_choose(tsd, arena);
|
||||
if (unlikely(arena == NULL))
|
||||
@@ -1027,10 +1024,9 @@ arena_salloc(const void *ptr, bool demote)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||
arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, tcache_t *tcache)
|
||||
{
|
||||
size_t pageind, mapbits;
|
||||
tcache_t *tcache;
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(CHUNK_ADDR2BASE(ptr) != ptr);
|
||||
@@ -1040,11 +1036,10 @@ arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
if (likely((mapbits & CHUNK_MAP_LARGE) == 0)) {
|
||||
/* Small allocation. */
|
||||
if (likely(try_tcache) && likely((tcache = tcache_get(tsd,
|
||||
false)) != NULL)) {
|
||||
if (likely(tcache != NULL)) {
|
||||
index_t binind = arena_ptr_small_binind_get(ptr,
|
||||
mapbits);
|
||||
tcache_dalloc_small(tcache, ptr, binind);
|
||||
tcache_dalloc_small(tsd, tcache, ptr, binind);
|
||||
} else
|
||||
arena_dalloc_small(chunk->arena, chunk, ptr, pageind);
|
||||
} else {
|
||||
@@ -1052,9 +1047,8 @@ arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||
|
||||
assert(((uintptr_t)ptr & PAGE_MASK) == 0);
|
||||
|
||||
if (try_tcache && size <= tcache_maxclass && likely((tcache =
|
||||
tcache_get(tsd, false)) != NULL))
|
||||
tcache_dalloc_large(tcache, ptr, size);
|
||||
if (likely(tcache != NULL) && size <= tcache_maxclass)
|
||||
tcache_dalloc_large(tsd, tcache, ptr, size);
|
||||
else
|
||||
arena_dalloc_large(chunk->arena, chunk, ptr);
|
||||
}
|
||||
@@ -1062,9 +1056,8 @@ arena_dalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, bool try_tcache)
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_sdalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, size_t size,
|
||||
bool try_tcache)
|
||||
tcache_t *tcache)
|
||||
{
|
||||
tcache_t *tcache;
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(CHUNK_ADDR2BASE(ptr) != ptr);
|
||||
@@ -1082,10 +1075,9 @@ arena_sdalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, size_t size,
|
||||
|
||||
if (likely(size <= SMALL_MAXCLASS)) {
|
||||
/* Small allocation. */
|
||||
if (likely(try_tcache) && likely((tcache = tcache_get(tsd,
|
||||
false)) != NULL)) {
|
||||
if (likely(tcache != NULL)) {
|
||||
index_t binind = size2index(size);
|
||||
tcache_dalloc_small(tcache, ptr, binind);
|
||||
tcache_dalloc_small(tsd, tcache, ptr, binind);
|
||||
} else {
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
|
||||
LG_PAGE;
|
||||
@@ -1094,9 +1086,8 @@ arena_sdalloc(tsd_t *tsd, arena_chunk_t *chunk, void *ptr, size_t size,
|
||||
} else {
|
||||
assert(((uintptr_t)ptr & PAGE_MASK) == 0);
|
||||
|
||||
if (try_tcache && size <= tcache_maxclass && (tcache =
|
||||
tcache_get(tsd, false)) != NULL)
|
||||
tcache_dalloc_large(tcache, ptr, size);
|
||||
if (likely(tcache != NULL) && size <= tcache_maxclass)
|
||||
tcache_dalloc_large(tsd, tcache, ptr, size);
|
||||
else
|
||||
arena_dalloc_large(chunk->arena, chunk, ptr);
|
||||
}
|
||||
|
@@ -10,19 +10,19 @@
|
||||
#ifdef JEMALLOC_H_EXTERNS
|
||||
|
||||
void *huge_malloc(tsd_t *tsd, arena_t *arena, size_t size, bool zero,
|
||||
bool try_tcache);
|
||||
tcache_t *tcache);
|
||||
void *huge_palloc(tsd_t *tsd, arena_t *arena, size_t usize, size_t alignment,
|
||||
bool zero, bool try_tcache);
|
||||
bool zero, tcache_t *tcache);
|
||||
bool huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, bool zero);
|
||||
void *huge_ralloc(tsd_t *tsd, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero,
|
||||
bool try_tcache_alloc, bool try_tcache_dalloc);
|
||||
tcache_t *tcache);
|
||||
#ifdef JEMALLOC_JET
|
||||
typedef void (huge_dalloc_junk_t)(void *, size_t);
|
||||
extern huge_dalloc_junk_t *huge_dalloc_junk;
|
||||
#endif
|
||||
void huge_dalloc(tsd_t *tsd, void *ptr, bool try_tcache);
|
||||
void huge_dalloc(tsd_t *tsd, void *ptr, tcache_t *tcache);
|
||||
arena_t *huge_aalloc(const void *ptr);
|
||||
size_t huge_salloc(const void *ptr);
|
||||
prof_tctx_t *huge_prof_tctx_get(const void *ptr);
|
||||
|
@@ -172,7 +172,21 @@ static const bool config_ivsalloc =
|
||||
/* Size class index type. */
|
||||
typedef unsigned index_t;
|
||||
|
||||
#define MALLOCX_ARENA_MASK ((int)~0xff)
|
||||
/*
|
||||
* Flags bits:
|
||||
*
|
||||
* a: arena
|
||||
* t: tcache
|
||||
* 0: unused
|
||||
* z: zero
|
||||
* n: alignment
|
||||
*
|
||||
* aaaaaaaa aaaatttt tttttttt 0znnnnnn
|
||||
*/
|
||||
#define MALLOCX_ARENA_MASK ((int)~0xfffff)
|
||||
#define MALLOCX_ARENA_MAX 0xffe
|
||||
#define MALLOCX_TCACHE_MASK ((int)~0xfff000ffU)
|
||||
#define MALLOCX_TCACHE_MAX 0xffd
|
||||
#define MALLOCX_LG_ALIGN_MASK ((int)0x3f)
|
||||
/* Use MALLOCX_ALIGN_GET() if alignment may not be specified in flags. */
|
||||
#define MALLOCX_ALIGN_GET_SPECIFIED(flags) \
|
||||
@@ -181,8 +195,11 @@ typedef unsigned index_t;
|
||||
(MALLOCX_ALIGN_GET_SPECIFIED(flags) & (SIZE_T_MAX-1))
|
||||
#define MALLOCX_ZERO_GET(flags) \
|
||||
((bool)(flags & MALLOCX_ZERO))
|
||||
|
||||
#define MALLOCX_TCACHE_GET(flags) \
|
||||
(((unsigned)((flags & MALLOCX_TCACHE_MASK) >> 8)) - 2)
|
||||
#define MALLOCX_ARENA_GET(flags) \
|
||||
(((unsigned)(flags >> 8)) - 1)
|
||||
(((unsigned)(((unsigned)flags) >> 20)) - 1)
|
||||
|
||||
/* Smallest size class to support. */
|
||||
#define TINY_MIN (1U << LG_TINY_MIN)
|
||||
@@ -749,7 +766,7 @@ arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing,
|
||||
* ind is invalid, cache is old (too small), or arena to be
|
||||
* initialized.
|
||||
*/
|
||||
return (refresh_if_missing ? arena_get_hard(tsd, ind,
|
||||
return (refresh_if_missing ? arena_get_hard(tsd, ind,
|
||||
init_if_missing) : NULL);
|
||||
}
|
||||
arena = arenas_cache[ind];
|
||||
@@ -778,32 +795,31 @@ arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing,
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
arena_t *iaalloc(const void *ptr);
|
||||
size_t isalloc(const void *ptr, bool demote);
|
||||
void *iallocztm(tsd_t *tsd, size_t size, bool zero, bool try_tcache,
|
||||
void *iallocztm(tsd_t *tsd, size_t size, bool zero, tcache_t *tcache,
|
||||
bool is_metadata, arena_t *arena);
|
||||
void *imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena);
|
||||
void *imalloct(tsd_t *tsd, size_t size, tcache_t *tcache, arena_t *arena);
|
||||
void *imalloc(tsd_t *tsd, size_t size);
|
||||
void *icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena);
|
||||
void *icalloct(tsd_t *tsd, size_t size, tcache_t *tcache, arena_t *arena);
|
||||
void *icalloc(tsd_t *tsd, size_t size);
|
||||
void *ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
bool try_tcache, bool is_metadata, arena_t *arena);
|
||||
tcache_t *tcache, bool is_metadata, arena_t *arena);
|
||||
void *ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
bool try_tcache, arena_t *arena);
|
||||
tcache_t *tcache, arena_t *arena);
|
||||
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
|
||||
size_t ivsalloc(const void *ptr, bool demote);
|
||||
size_t u2rz(size_t usize);
|
||||
size_t p2rz(const void *ptr);
|
||||
void idalloctm(tsd_t *tsd, void *ptr, bool try_tcache, bool is_metadata);
|
||||
void idalloct(tsd_t *tsd, void *ptr, bool try_tcache);
|
||||
void idalloctm(tsd_t *tsd, void *ptr, tcache_t *tcache, bool is_metadata);
|
||||
void idalloct(tsd_t *tsd, void *ptr, tcache_t *tcache);
|
||||
void idalloc(tsd_t *tsd, void *ptr);
|
||||
void iqalloc(tsd_t *tsd, void *ptr, bool try_tcache);
|
||||
void isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
|
||||
void isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
|
||||
void iqalloc(tsd_t *tsd, void *ptr, tcache_t *tcache);
|
||||
void isdalloct(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache);
|
||||
void isqalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache);
|
||||
void *iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, bool try_tcache_alloc,
|
||||
bool try_tcache_dalloc, arena_t *arena);
|
||||
void *iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc,
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
arena_t *arena);
|
||||
void *iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
|
||||
void *iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero);
|
||||
bool ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
@@ -853,7 +869,7 @@ isalloc(const void *ptr, bool demote)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iallocztm(tsd_t *tsd, size_t size, bool zero, bool try_tcache, bool is_metadata,
|
||||
iallocztm(tsd_t *tsd, size_t size, bool zero, tcache_t *tcache, bool is_metadata,
|
||||
arena_t *arena)
|
||||
{
|
||||
void *ret;
|
||||
@@ -861,9 +877,9 @@ iallocztm(tsd_t *tsd, size_t size, bool zero, bool try_tcache, bool is_metadata,
|
||||
assert(size != 0);
|
||||
|
||||
if (likely(size <= arena_maxclass))
|
||||
ret = arena_malloc(tsd, arena, size, zero, try_tcache);
|
||||
ret = arena_malloc(tsd, arena, size, zero, tcache);
|
||||
else
|
||||
ret = huge_malloc(tsd, arena, size, zero, try_tcache);
|
||||
ret = huge_malloc(tsd, arena, size, zero, tcache);
|
||||
if (config_stats && is_metadata && likely(ret != NULL)) {
|
||||
arena_metadata_allocated_add(iaalloc(ret), isalloc(ret,
|
||||
config_prof));
|
||||
@@ -872,36 +888,36 @@ iallocztm(tsd_t *tsd, size_t size, bool zero, bool try_tcache, bool is_metadata,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
|
||||
imalloct(tsd_t *tsd, size_t size, tcache_t *tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
return (iallocztm(tsd, size, false, try_tcache, false, arena));
|
||||
return (iallocztm(tsd, size, false, tcache, false, arena));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
imalloc(tsd_t *tsd, size_t size)
|
||||
{
|
||||
|
||||
return (iallocztm(tsd, size, false, true, false, NULL));
|
||||
return (iallocztm(tsd, size, false, tcache_get(tsd, true), false, NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
|
||||
icalloct(tsd_t *tsd, size_t size, tcache_t *tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
return (iallocztm(tsd, size, true, try_tcache, false, arena));
|
||||
return (iallocztm(tsd, size, true, tcache, false, arena));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
icalloc(tsd_t *tsd, size_t size)
|
||||
{
|
||||
|
||||
return (iallocztm(tsd, size, true, true, false, NULL));
|
||||
return (iallocztm(tsd, size, true, tcache_get(tsd, true), false, NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
bool try_tcache, bool is_metadata, arena_t *arena)
|
||||
tcache_t *tcache, bool is_metadata, arena_t *arena)
|
||||
{
|
||||
void *ret;
|
||||
|
||||
@@ -909,7 +925,7 @@ ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
assert(usize == sa2u(usize, alignment));
|
||||
|
||||
if (usize <= SMALL_MAXCLASS && alignment < PAGE)
|
||||
ret = arena_malloc(tsd, arena, usize, zero, try_tcache);
|
||||
ret = arena_malloc(tsd, arena, usize, zero, tcache);
|
||||
else {
|
||||
if (likely(usize <= arena_maxclass)) {
|
||||
arena = arena_choose(tsd, arena);
|
||||
@@ -917,10 +933,10 @@ ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
return (NULL);
|
||||
ret = arena_palloc(arena, usize, alignment, zero);
|
||||
} else if (likely(alignment <= chunksize))
|
||||
ret = huge_malloc(tsd, arena, usize, zero, try_tcache);
|
||||
ret = huge_malloc(tsd, arena, usize, zero, tcache);
|
||||
else {
|
||||
ret = huge_palloc(tsd, arena, usize, alignment, zero,
|
||||
try_tcache);
|
||||
tcache);
|
||||
}
|
||||
}
|
||||
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
|
||||
@@ -932,19 +948,19 @@ ipallocztm(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, bool try_tcache,
|
||||
arena_t *arena)
|
||||
ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
|
||||
tcache_t *tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
return (ipallocztm(tsd, usize, alignment, zero, try_tcache, false,
|
||||
arena));
|
||||
return (ipallocztm(tsd, usize, alignment, zero, tcache, false, arena));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero)
|
||||
{
|
||||
|
||||
return (ipallocztm(tsd, usize, alignment, zero, true, false, NULL));
|
||||
return (ipallocztm(tsd, usize, alignment, zero, tcache_get(tsd,
|
||||
NULL), false, NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
@@ -981,7 +997,7 @@ p2rz(const void *ptr)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloctm(tsd_t *tsd, void *ptr, bool try_tcache, bool is_metadata)
|
||||
idalloctm(tsd_t *tsd, void *ptr, tcache_t *tcache, bool is_metadata)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
@@ -993,37 +1009,37 @@ idalloctm(tsd_t *tsd, void *ptr, bool try_tcache, bool is_metadata)
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr))
|
||||
arena_dalloc(tsd, chunk, ptr, try_tcache);
|
||||
arena_dalloc(tsd, chunk, ptr, tcache);
|
||||
else
|
||||
huge_dalloc(tsd, ptr, try_tcache);
|
||||
huge_dalloc(tsd, ptr, tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloct(tsd_t *tsd, void *ptr, bool try_tcache)
|
||||
idalloct(tsd_t *tsd, void *ptr, tcache_t *tcache)
|
||||
{
|
||||
|
||||
idalloctm(tsd, ptr, try_tcache, false);
|
||||
idalloctm(tsd, ptr, tcache, false);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloc(tsd_t *tsd, void *ptr)
|
||||
{
|
||||
|
||||
idalloctm(tsd, ptr, true, false);
|
||||
idalloctm(tsd, ptr, tcache_get(tsd, false), false);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
iqalloc(tsd_t *tsd, void *ptr, bool try_tcache)
|
||||
iqalloc(tsd_t *tsd, void *ptr, tcache_t *tcache)
|
||||
{
|
||||
|
||||
if (config_fill && unlikely(opt_quarantine))
|
||||
quarantine(tsd, ptr);
|
||||
else
|
||||
idalloctm(tsd, ptr, try_tcache, false);
|
||||
idalloctm(tsd, ptr, tcache, false);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
|
||||
isdalloct(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
@@ -1031,25 +1047,24 @@ isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr))
|
||||
arena_sdalloc(tsd, chunk, ptr, size, try_tcache);
|
||||
arena_sdalloc(tsd, chunk, ptr, size, tcache);
|
||||
else
|
||||
huge_dalloc(tsd, ptr, try_tcache);
|
||||
huge_dalloc(tsd, ptr, tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
|
||||
isqalloc(tsd_t *tsd, void *ptr, size_t size, tcache_t *tcache)
|
||||
{
|
||||
|
||||
if (config_fill && unlikely(opt_quarantine))
|
||||
quarantine(tsd, ptr);
|
||||
else
|
||||
isdalloct(tsd, ptr, size, try_tcache);
|
||||
isdalloct(tsd, ptr, size, tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, bool try_tcache_alloc,
|
||||
bool try_tcache_dalloc, arena_t *arena)
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena)
|
||||
{
|
||||
void *p;
|
||||
size_t usize, copysize;
|
||||
@@ -1057,7 +1072,7 @@ iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
usize = sa2u(size + extra, alignment);
|
||||
if (usize == 0)
|
||||
return (NULL);
|
||||
p = ipalloct(tsd, usize, alignment, zero, try_tcache_alloc, arena);
|
||||
p = ipalloct(tsd, usize, alignment, zero, tcache, arena);
|
||||
if (p == NULL) {
|
||||
if (extra == 0)
|
||||
return (NULL);
|
||||
@@ -1065,8 +1080,7 @@ iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
usize = sa2u(size, alignment);
|
||||
if (usize == 0)
|
||||
return (NULL);
|
||||
p = ipalloct(tsd, usize, alignment, zero, try_tcache_alloc,
|
||||
arena);
|
||||
p = ipalloct(tsd, usize, alignment, zero, tcache, arena);
|
||||
if (p == NULL)
|
||||
return (NULL);
|
||||
}
|
||||
@@ -1076,13 +1090,13 @@ iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
*/
|
||||
copysize = (size < oldsize) ? size : oldsize;
|
||||
memcpy(p, ptr, copysize);
|
||||
isqalloc(tsd, ptr, oldsize, try_tcache_dalloc);
|
||||
isqalloc(tsd, ptr, oldsize, tcache);
|
||||
return (p);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
|
||||
bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
|
||||
bool zero, tcache_t *tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
assert(ptr != NULL);
|
||||
@@ -1095,15 +1109,15 @@ iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
|
||||
* and copy.
|
||||
*/
|
||||
return (iralloct_realign(tsd, ptr, oldsize, size, 0, alignment,
|
||||
zero, try_tcache_alloc, try_tcache_dalloc, arena));
|
||||
zero, tcache, arena));
|
||||
}
|
||||
|
||||
if (likely(size <= arena_maxclass)) {
|
||||
return (arena_ralloc(tsd, arena, ptr, oldsize, size, 0,
|
||||
alignment, zero, try_tcache_alloc, try_tcache_dalloc));
|
||||
alignment, zero, tcache));
|
||||
} else {
|
||||
return (huge_ralloc(tsd, arena, ptr, oldsize, size, 0,
|
||||
alignment, zero, try_tcache_alloc, try_tcache_dalloc));
|
||||
alignment, zero, tcache));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1112,8 +1126,8 @@ iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
|
||||
bool zero)
|
||||
{
|
||||
|
||||
return (iralloct(tsd, ptr, oldsize, size, alignment, zero, true, true,
|
||||
NULL));
|
||||
return (iralloct(tsd, ptr, oldsize, size, alignment, zero,
|
||||
tcache_get(tsd, true), NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
|
@@ -425,6 +425,11 @@ tcache_get_hard
|
||||
tcache_maxclass
|
||||
tcache_salloc
|
||||
tcache_stats_merge
|
||||
tcaches
|
||||
tcaches_create
|
||||
tcaches_destroy
|
||||
tcaches_flush
|
||||
tcaches_get
|
||||
thread_allocated_cleanup
|
||||
thread_deallocated_cleanup
|
||||
tsd_booted
|
||||
|
@@ -4,6 +4,7 @@
|
||||
typedef struct tcache_bin_info_s tcache_bin_info_t;
|
||||
typedef struct tcache_bin_s tcache_bin_t;
|
||||
typedef struct tcache_s tcache_t;
|
||||
typedef struct tcaches_s tcaches_t;
|
||||
|
||||
/*
|
||||
* tcache pointers close to NULL are used to encode state information that is
|
||||
@@ -70,7 +71,6 @@ struct tcache_bin_s {
|
||||
struct tcache_s {
|
||||
ql_elm(tcache_t) link; /* Used for aggregating stats. */
|
||||
uint64_t prof_accumbytes;/* Cleared after arena_prof_accum(). */
|
||||
arena_t *arena; /* This thread's arena. */
|
||||
unsigned ev_cnt; /* Event count since incremental GC. */
|
||||
index_t next_gc_bin; /* Next bin to GC. */
|
||||
tcache_bin_t tbins[1]; /* Dynamically sized. */
|
||||
@@ -82,6 +82,14 @@ struct tcache_s {
|
||||
*/
|
||||
};
|
||||
|
||||
/* Linkage for list of available (previously used) explicit tcache IDs. */
|
||||
struct tcaches_s {
|
||||
union {
|
||||
tcache_t *tcache;
|
||||
tcaches_t *next;
|
||||
};
|
||||
};
|
||||
|
||||
#endif /* JEMALLOC_H_STRUCTS */
|
||||
/******************************************************************************/
|
||||
#ifdef JEMALLOC_H_EXTERNS
|
||||
@@ -95,27 +103,41 @@ extern tcache_bin_info_t *tcache_bin_info;
|
||||
* Number of tcache bins. There are NBINS small-object bins, plus 0 or more
|
||||
* large-object bins.
|
||||
*/
|
||||
extern size_t nhbins;
|
||||
extern size_t nhbins;
|
||||
|
||||
/* Maximum cached size class. */
|
||||
extern size_t tcache_maxclass;
|
||||
extern size_t tcache_maxclass;
|
||||
|
||||
/*
|
||||
* Explicit tcaches, managed via the tcache.{create,flush,destroy} mallctls and
|
||||
* usable via the MALLOCX_TCACHE() flag. The automatic per thread tcaches are
|
||||
* completely disjoint from this data structure. tcaches starts off as a sparse
|
||||
* array, so it has no physical memory footprint until individual pages are
|
||||
* touched. This allows the entire array to be allocated the first time an
|
||||
* explicit tcache is created without a disproportionate impact on memory usage.
|
||||
*/
|
||||
extern tcaches_t *tcaches;
|
||||
|
||||
size_t tcache_salloc(const void *ptr);
|
||||
void tcache_event_hard(tcache_t *tcache);
|
||||
void *tcache_alloc_small_hard(tcache_t *tcache, tcache_bin_t *tbin,
|
||||
index_t binind);
|
||||
void tcache_bin_flush_small(tcache_bin_t *tbin, index_t binind, unsigned rem,
|
||||
tcache_t *tcache);
|
||||
void tcache_bin_flush_large(tcache_bin_t *tbin, index_t binind, unsigned rem,
|
||||
tcache_t *tcache);
|
||||
void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
|
||||
void *tcache_alloc_small_hard(tsd_t *tsd, tcache_t *tcache,
|
||||
tcache_bin_t *tbin, index_t binind);
|
||||
void tcache_bin_flush_small(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
|
||||
unsigned rem, tcache_t *tcache);
|
||||
void tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, index_t binind,
|
||||
unsigned rem, tcache_t *tcache);
|
||||
void tcache_arena_associate(tcache_t *tcache, arena_t *arena);
|
||||
void tcache_arena_reassociate(tcache_t *tcache, arena_t *arena);
|
||||
void tcache_arena_dissociate(tcache_t *tcache);
|
||||
void tcache_arena_reassociate(tcache_t *tcache, arena_t *oldarena,
|
||||
arena_t *newarena);
|
||||
void tcache_arena_dissociate(tcache_t *tcache, arena_t *arena);
|
||||
tcache_t *tcache_get_hard(tsd_t *tsd);
|
||||
tcache_t *tcache_create(tsd_t *tsd, arena_t *arena);
|
||||
void tcache_cleanup(tsd_t *tsd);
|
||||
void tcache_enabled_cleanup(tsd_t *tsd);
|
||||
void tcache_stats_merge(tcache_t *tcache, arena_t *arena);
|
||||
bool tcaches_create(tsd_t *tsd, unsigned *r_ind);
|
||||
void tcaches_flush(tsd_t *tsd, unsigned ind);
|
||||
void tcaches_destroy(tsd_t *tsd, unsigned ind);
|
||||
bool tcache_boot(void);
|
||||
|
||||
#endif /* JEMALLOC_H_EXTERNS */
|
||||
@@ -123,16 +145,21 @@ bool tcache_boot(void);
|
||||
#ifdef JEMALLOC_H_INLINES
|
||||
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
void tcache_event(tcache_t *tcache);
|
||||
void tcache_event(tsd_t *tsd, tcache_t *tcache);
|
||||
void tcache_flush(void);
|
||||
bool tcache_enabled_get(void);
|
||||
tcache_t *tcache_get(tsd_t *tsd, bool create);
|
||||
void tcache_enabled_set(bool enabled);
|
||||
void *tcache_alloc_easy(tcache_bin_t *tbin);
|
||||
void *tcache_alloc_small(tcache_t *tcache, size_t size, bool zero);
|
||||
void *tcache_alloc_large(tcache_t *tcache, size_t size, bool zero);
|
||||
void tcache_dalloc_small(tcache_t *tcache, void *ptr, index_t binind);
|
||||
void tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size);
|
||||
void *tcache_alloc_small(tsd_t *tsd, tcache_t *tcache, size_t size,
|
||||
bool zero);
|
||||
void *tcache_alloc_large(tsd_t *tsd, tcache_t *tcache, size_t size,
|
||||
bool zero);
|
||||
void tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr,
|
||||
index_t binind);
|
||||
void tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr,
|
||||
size_t size);
|
||||
tcache_t *tcaches_get(tsd_t *tsd, unsigned ind);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_))
|
||||
@@ -202,7 +229,7 @@ tcache_get(tsd_t *tsd, bool create)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
tcache_event(tcache_t *tcache)
|
||||
tcache_event(tsd_t *tsd, tcache_t *tcache)
|
||||
{
|
||||
|
||||
if (TCACHE_GC_INCR == 0)
|
||||
@@ -211,7 +238,7 @@ tcache_event(tcache_t *tcache)
|
||||
tcache->ev_cnt++;
|
||||
assert(tcache->ev_cnt <= TCACHE_GC_INCR);
|
||||
if (unlikely(tcache->ev_cnt == TCACHE_GC_INCR))
|
||||
tcache_event_hard(tcache);
|
||||
tcache_event_hard(tsd, tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
@@ -231,7 +258,7 @@ tcache_alloc_easy(tcache_bin_t *tbin)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||
tcache_alloc_small(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
|
||||
{
|
||||
void *ret;
|
||||
index_t binind;
|
||||
@@ -244,7 +271,7 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||
usize = index2size(binind);
|
||||
ret = tcache_alloc_easy(tbin);
|
||||
if (unlikely(ret == NULL)) {
|
||||
ret = tcache_alloc_small_hard(tcache, tbin, binind);
|
||||
ret = tcache_alloc_small_hard(tsd, tcache, tbin, binind);
|
||||
if (ret == NULL)
|
||||
return (NULL);
|
||||
}
|
||||
@@ -270,12 +297,12 @@ tcache_alloc_small(tcache_t *tcache, size_t size, bool zero)
|
||||
tbin->tstats.nrequests++;
|
||||
if (config_prof)
|
||||
tcache->prof_accumbytes += usize;
|
||||
tcache_event(tcache);
|
||||
tcache_event(tsd, tcache);
|
||||
return (ret);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
tcache_alloc_large(tsd_t *tsd, tcache_t *tcache, size_t size, bool zero)
|
||||
{
|
||||
void *ret;
|
||||
index_t binind;
|
||||
@@ -293,7 +320,7 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
* Only allocate one large object at a time, because it's quite
|
||||
* expensive to create one and not use it.
|
||||
*/
|
||||
ret = arena_malloc_large(tcache->arena, usize, zero);
|
||||
ret = arena_malloc_large(arena_choose(tsd, NULL), usize, zero);
|
||||
if (ret == NULL)
|
||||
return (NULL);
|
||||
} else {
|
||||
@@ -321,12 +348,12 @@ tcache_alloc_large(tcache_t *tcache, size_t size, bool zero)
|
||||
tcache->prof_accumbytes += usize;
|
||||
}
|
||||
|
||||
tcache_event(tcache);
|
||||
tcache_event(tsd, tcache);
|
||||
return (ret);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
tcache_dalloc_small(tcache_t *tcache, void *ptr, index_t binind)
|
||||
tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, index_t binind)
|
||||
{
|
||||
tcache_bin_t *tbin;
|
||||
tcache_bin_info_t *tbin_info;
|
||||
@@ -339,18 +366,18 @@ tcache_dalloc_small(tcache_t *tcache, void *ptr, index_t binind)
|
||||
tbin = &tcache->tbins[binind];
|
||||
tbin_info = &tcache_bin_info[binind];
|
||||
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
|
||||
tcache_bin_flush_small(tbin, binind, (tbin_info->ncached_max >>
|
||||
1), tcache);
|
||||
tcache_bin_flush_small(tsd, tbin, binind,
|
||||
(tbin_info->ncached_max >> 1), tcache);
|
||||
}
|
||||
assert(tbin->ncached < tbin_info->ncached_max);
|
||||
tbin->avail[tbin->ncached] = ptr;
|
||||
tbin->ncached++;
|
||||
|
||||
tcache_event(tcache);
|
||||
tcache_event(tsd, tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
||||
tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size)
|
||||
{
|
||||
index_t binind;
|
||||
tcache_bin_t *tbin;
|
||||
@@ -368,14 +395,23 @@ tcache_dalloc_large(tcache_t *tcache, void *ptr, size_t size)
|
||||
tbin = &tcache->tbins[binind];
|
||||
tbin_info = &tcache_bin_info[binind];
|
||||
if (unlikely(tbin->ncached == tbin_info->ncached_max)) {
|
||||
tcache_bin_flush_large(tbin, binind, (tbin_info->ncached_max >>
|
||||
1), tcache);
|
||||
tcache_bin_flush_large(tsd, tbin, binind,
|
||||
(tbin_info->ncached_max >> 1), tcache);
|
||||
}
|
||||
assert(tbin->ncached < tbin_info->ncached_max);
|
||||
tbin->avail[tbin->ncached] = ptr;
|
||||
tbin->ncached++;
|
||||
|
||||
tcache_event(tcache);
|
||||
tcache_event(tsd, tcache);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE tcache_t *
|
||||
tcaches_get(tsd_t *tsd, unsigned ind)
|
||||
{
|
||||
tcaches_t *elm = &tcaches[ind];
|
||||
if (unlikely(elm->tcache == NULL))
|
||||
elm->tcache = tcache_create(tsd, arena_choose(tsd, NULL));
|
||||
return (elm->tcache);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@@ -19,8 +19,16 @@
|
||||
((a < (size_t)INT_MAX) ? ffs(a)-1 : ffs(a>>32)+31)
|
||||
# endif
|
||||
# define MALLOCX_ZERO ((int)0x40)
|
||||
/* Bias arena index bits so that 0 encodes "MALLOCX_ARENA() unspecified". */
|
||||
# define MALLOCX_ARENA(a) ((int)(((a)+1) << 8))
|
||||
/*
|
||||
* Bias tcache index bits so that 0 encodes "automatic tcache management", and 1
|
||||
* encodes MALLOCX_TCACHE_NONE.
|
||||
*/
|
||||
# define MALLOCX_TCACHE(tc) ((int)(((tc)+2) << 8))
|
||||
# define MALLOCX_TCACHE_NONE MALLOCX_TCACHE(-1)
|
||||
/*
|
||||
* Bias arena index bits so that 0 encodes "use an automatically chosen arena".
|
||||
*/
|
||||
# define MALLOCX_ARENA(a) ((int)(((a)+1) << 20))
|
||||
|
||||
#ifdef JEMALLOC_HAVE_ATTR
|
||||
# define JEMALLOC_ATTR(s) __attribute__((s))
|
||||
|
Reference in New Issue
Block a user