Add support for user-specified chunk allocators/deallocators.
Add new mallctl endpoints "arena<i>.chunk.alloc" and "arena<i>.chunk.dealloc" to allow userspace to configure jemalloc's chunk allocator and deallocator on a per-arena basis.
This commit is contained in:
@@ -370,6 +370,12 @@ struct arena_s {
|
||||
*/
|
||||
arena_avail_tree_t runs_avail;
|
||||
|
||||
/*
|
||||
* user-configureable chunk allocation and deallocation functions.
|
||||
*/
|
||||
chunk_alloc_t *chunk_alloc;
|
||||
chunk_dealloc_t *chunk_dealloc;
|
||||
|
||||
/* bins is used to store trees of free regions. */
|
||||
arena_bin_t bins[NBINS];
|
||||
};
|
||||
|
@@ -43,10 +43,12 @@ extern size_t chunk_npages;
|
||||
extern size_t map_bias; /* Number of arena chunk header pages. */
|
||||
extern size_t arena_maxclass; /* Max size class for arenas. */
|
||||
|
||||
void *chunk_alloc(size_t size, size_t alignment, bool base, bool *zero,
|
||||
dss_prec_t dss_prec);
|
||||
void *chunk_alloc(arena_t *arena, size_t size, size_t alignment, bool base,
|
||||
bool *zero, dss_prec_t dss_prec);
|
||||
void *chunk_alloc_default(size_t size, size_t alignment, bool *zero,
|
||||
unsigned arena_ind);
|
||||
void chunk_unmap(void *chunk, size_t size);
|
||||
void chunk_dealloc(void *chunk, size_t size, bool unmap);
|
||||
void chunk_dealloc(arena_t *arena, void *chunk, size_t size, bool unmap);
|
||||
bool chunk_boot(void);
|
||||
void chunk_prefork(void);
|
||||
void chunk_postfork_parent(void);
|
||||
|
@@ -24,6 +24,9 @@ struct extent_node_s {
|
||||
/* Total region size. */
|
||||
size_t size;
|
||||
|
||||
/* Arena from which this extent came, if any */
|
||||
arena_t *arena;
|
||||
|
||||
/* True if zero-filled; used by chunk recycling code. */
|
||||
bool zeroed;
|
||||
};
|
||||
|
@@ -17,13 +17,15 @@ extern size_t huge_allocated;
|
||||
/* Protects chunk-related data structures. */
|
||||
extern malloc_mutex_t huge_mtx;
|
||||
|
||||
void *huge_malloc(size_t size, bool zero, dss_prec_t dss_prec);
|
||||
void *huge_palloc(size_t size, size_t alignment, bool zero,
|
||||
void *huge_malloc(arena_t *arena, size_t size, bool zero,
|
||||
dss_prec_t dss_prec);
|
||||
void *huge_palloc(arena_t *arena, size_t size, size_t alignment, bool zero,
|
||||
dss_prec_t dss_prec);
|
||||
bool huge_ralloc_no_move(void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra);
|
||||
void *huge_ralloc(void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
size_t alignment, bool zero, bool try_tcache_dalloc, dss_prec_t dss_prec);
|
||||
void *huge_ralloc(arena_t *arena, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, bool try_tcache_dalloc,
|
||||
dss_prec_t dss_prec);
|
||||
#ifdef JEMALLOC_JET
|
||||
typedef void (huge_dalloc_junk_t)(void *, size_t);
|
||||
extern huge_dalloc_junk_t *huge_dalloc_junk;
|
||||
|
@@ -702,7 +702,8 @@ imalloct(size_t size, bool try_tcache, arena_t *arena)
|
||||
if (size <= arena_maxclass)
|
||||
return (arena_malloc(arena, size, false, try_tcache));
|
||||
else
|
||||
return (huge_malloc(size, false, huge_dss_prec_get(arena)));
|
||||
return (huge_malloc(arena, size, false,
|
||||
huge_dss_prec_get(arena)));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
@@ -719,7 +720,8 @@ icalloct(size_t size, bool try_tcache, arena_t *arena)
|
||||
if (size <= arena_maxclass)
|
||||
return (arena_malloc(arena, size, true, try_tcache));
|
||||
else
|
||||
return (huge_malloc(size, true, huge_dss_prec_get(arena)));
|
||||
return (huge_malloc(arena, size, true,
|
||||
huge_dss_prec_get(arena)));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
@@ -745,9 +747,11 @@ ipalloct(size_t usize, size_t alignment, bool zero, bool try_tcache,
|
||||
ret = arena_palloc(choose_arena(arena), usize,
|
||||
alignment, zero);
|
||||
} else if (alignment <= chunksize)
|
||||
ret = huge_malloc(usize, zero, huge_dss_prec_get(arena));
|
||||
ret = huge_malloc(arena, usize, zero,
|
||||
huge_dss_prec_get(arena));
|
||||
else
|
||||
ret = huge_palloc(usize, alignment, zero, huge_dss_prec_get(arena));
|
||||
ret = huge_palloc(arena, usize, alignment, zero,
|
||||
huge_dss_prec_get(arena));
|
||||
}
|
||||
|
||||
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
|
||||
@@ -915,7 +919,7 @@ iralloct(void *ptr, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
alignment, zero, try_tcache_alloc,
|
||||
try_tcache_dalloc));
|
||||
} else {
|
||||
return (huge_ralloc(ptr, oldsize, size, extra,
|
||||
return (huge_ralloc(arena, ptr, oldsize, size, extra,
|
||||
alignment, zero, try_tcache_dalloc, huge_dss_prec_get(arena)));
|
||||
}
|
||||
}
|
||||
|
@@ -104,6 +104,7 @@ buferror
|
||||
choose_arena
|
||||
choose_arena_hard
|
||||
chunk_alloc
|
||||
chunk_alloc_default
|
||||
chunk_alloc_dss
|
||||
chunk_alloc_mmap
|
||||
chunk_boot
|
||||
|
Reference in New Issue
Block a user