Use rtree-based chunk lookups rather than pointer bit twiddling.
Look up chunk metadata via the radix tree, rather than using CHUNK_ADDR2BASE(). Propagate pointer's containing extent. Minimize extent lookups by doing a single lookup (e.g. in free()) and propagating the pointer's extent into nearly all the functions that may need it.
This commit is contained in:
parent
2d2b4e98c9
commit
db72272bef
@ -526,13 +526,13 @@ void *arena_malloc_hard(tsdn_t *tsdn, arena_t *arena, size_t size,
|
||||
szind_t ind, bool zero);
|
||||
void *arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
||||
size_t alignment, bool zero, tcache_t *tcache);
|
||||
void arena_prof_promoted(tsdn_t *tsdn, const void *ptr, size_t size);
|
||||
void arena_prof_promoted(tsdn_t *tsdn, const extent_t *extent,
|
||||
const void *ptr, size_t size);
|
||||
void arena_dalloc_bin_junked_locked(tsdn_t *tsdn, arena_t *arena,
|
||||
arena_chunk_t *chunk, void *ptr, arena_chunk_map_bits_t *bitselm);
|
||||
void arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr, size_t pageind, arena_chunk_map_bits_t *bitselm);
|
||||
arena_chunk_t *chunk, extent_t *extent, void *ptr,
|
||||
arena_chunk_map_bits_t *bitselm);
|
||||
void arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr, size_t pageind);
|
||||
extent_t *extent, void *ptr, size_t pageind);
|
||||
#ifdef JEMALLOC_JET
|
||||
typedef void (arena_dalloc_junk_large_t)(void *, size_t);
|
||||
extern arena_dalloc_junk_large_t *arena_dalloc_junk_large;
|
||||
@ -540,17 +540,17 @@ extern arena_dalloc_junk_large_t *arena_dalloc_junk_large;
|
||||
void arena_dalloc_junk_large(void *ptr, size_t usize);
|
||||
#endif
|
||||
void arena_dalloc_large_junked_locked(tsdn_t *tsdn, arena_t *arena,
|
||||
arena_chunk_t *chunk, void *ptr);
|
||||
arena_chunk_t *chunk, extent_t *extent, void *ptr);
|
||||
void arena_dalloc_large(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr);
|
||||
extent_t *extent, void *ptr);
|
||||
#ifdef JEMALLOC_JET
|
||||
typedef void (arena_ralloc_junk_large_t)(void *, size_t, size_t);
|
||||
extern arena_ralloc_junk_large_t *arena_ralloc_junk_large;
|
||||
#endif
|
||||
bool arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, bool zero);
|
||||
void *arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t size, size_t alignment, bool zero, tcache_t *tcache);
|
||||
bool arena_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t size, size_t extra, bool zero);
|
||||
void *arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache);
|
||||
dss_prec_t arena_dss_prec_get(tsdn_t *tsdn, arena_t *arena);
|
||||
bool arena_dss_prec_set(tsdn_t *tsdn, arena_t *arena, dss_prec_t dss_prec);
|
||||
ssize_t arena_lg_dirty_mult_default_get(void);
|
||||
@ -637,20 +637,23 @@ szind_t arena_ptr_small_binind_get(const void *ptr, size_t mapbits);
|
||||
szind_t arena_bin_index(arena_t *arena, arena_bin_t *bin);
|
||||
size_t arena_run_regind(arena_run_t *run, const arena_bin_info_t *bin_info,
|
||||
const void *ptr);
|
||||
prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr);
|
||||
void arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
void arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
const void *old_ptr, prof_tctx_t *old_tctx);
|
||||
prof_tctx_t *arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent,
|
||||
const void *ptr);
|
||||
void arena_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, prof_tctx_t *tctx);
|
||||
void arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, const void *old_ptr, prof_tctx_t *old_tctx);
|
||||
void arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks);
|
||||
void arena_decay_tick(tsdn_t *tsdn, arena_t *arena);
|
||||
void *arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind,
|
||||
bool zero, tcache_t *tcache, bool slow_path);
|
||||
arena_t *arena_aalloc(const void *ptr);
|
||||
size_t arena_salloc(tsdn_t *tsdn, const void *ptr, bool demote);
|
||||
void arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path);
|
||||
void arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path);
|
||||
size_t arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
||||
bool demote);
|
||||
void arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
tcache_t *tcache, bool slow_path);
|
||||
void arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||
@ -1042,7 +1045,9 @@ arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes)
|
||||
return (ret);
|
||||
}
|
||||
}
|
||||
# endif /* JEMALLOC_ARENA_INLINE_A */
|
||||
|
||||
# ifdef JEMALLOC_ARENA_INLINE_B
|
||||
JEMALLOC_ALWAYS_INLINE szind_t
|
||||
arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
||||
{
|
||||
@ -1051,6 +1056,7 @@ arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
||||
binind = (mapbits & CHUNK_MAP_BININD_MASK) >> CHUNK_MAP_BININD_SHIFT;
|
||||
|
||||
if (config_debug) {
|
||||
const extent_t *extent;
|
||||
arena_chunk_t *chunk;
|
||||
arena_t *arena;
|
||||
size_t pageind;
|
||||
@ -1065,8 +1071,9 @@ arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
||||
|
||||
assert(binind != BININD_INVALID);
|
||||
assert(binind < NBINS);
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
arena = extent_arena_get(&chunk->extent);
|
||||
extent = iealloc(ptr);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
arena = extent_arena_get(extent);
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
actual_mapbits = arena_mapbits_get(chunk, pageind);
|
||||
assert(mapbits == actual_mapbits);
|
||||
@ -1088,9 +1095,7 @@ arena_ptr_small_binind_get(const void *ptr, size_t mapbits)
|
||||
|
||||
return (binind);
|
||||
}
|
||||
# endif /* JEMALLOC_ARENA_INLINE_A */
|
||||
|
||||
# ifdef JEMALLOC_ARENA_INLINE_B
|
||||
JEMALLOC_INLINE szind_t
|
||||
arena_bin_index(arena_t *arena, arena_bin_t *bin)
|
||||
{
|
||||
@ -1172,16 +1177,15 @@ arena_run_regind(arena_run_t *run, const arena_bin_info_t *bin_info,
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE prof_tctx_t *
|
||||
arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr)
|
||||
arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
||||
{
|
||||
prof_tctx_t *ret;
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr)) {
|
||||
if (likely(extent_achunk_get(extent))) {
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
||||
assert((mapbits & CHUNK_MAP_ALLOCATED) != 0);
|
||||
@ -1193,22 +1197,21 @@ arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr)
|
||||
ret = atomic_read_p(&elm->prof_tctx_pun);
|
||||
}
|
||||
} else
|
||||
ret = huge_prof_tctx_get(tsdn, ptr);
|
||||
ret = huge_prof_tctx_get(tsdn, extent, ptr);
|
||||
|
||||
return (ret);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx)
|
||||
arena_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, prof_tctx_t *tctx)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr)) {
|
||||
if (likely(extent_achunk_get(extent))) {
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
@ -1231,12 +1234,12 @@ arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
assert(arena_mapbits_large_get(chunk, pageind) == 0);
|
||||
}
|
||||
} else
|
||||
huge_prof_tctx_set(tsdn, ptr, tctx);
|
||||
huge_prof_tctx_set(tsdn, extent, ptr, tctx);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE void
|
||||
arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
const void *old_ptr, prof_tctx_t *old_tctx)
|
||||
arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, const void *old_ptr, prof_tctx_t *old_tctx)
|
||||
{
|
||||
|
||||
cassert(config_prof);
|
||||
@ -1244,7 +1247,7 @@ arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
|
||||
if (unlikely(usize > SMALL_MAXCLASS || (ptr == old_ptr &&
|
||||
(uintptr_t)old_tctx > (uintptr_t)1U))) {
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
if (likely(chunk != ptr)) {
|
||||
size_t pageind;
|
||||
arena_chunk_map_misc_t *elm;
|
||||
@ -1259,7 +1262,7 @@ arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
atomic_write_p(&elm->prof_tctx_pun,
|
||||
(prof_tctx_t *)(uintptr_t)1U);
|
||||
} else
|
||||
huge_prof_tctx_reset(tsdn, ptr);
|
||||
huge_prof_tctx_reset(tsdn, extent, ptr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1313,28 +1316,24 @@ arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
|
||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||
arena_aalloc(const void *ptr)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr))
|
||||
return (extent_arena_get(&chunk->extent));
|
||||
else
|
||||
return (huge_aalloc(ptr));
|
||||
return (extent_arena_get(iealloc(ptr)));
|
||||
}
|
||||
|
||||
/* Return the size of the allocation pointed to by ptr. */
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
arena_salloc(tsdn_t *tsdn, const void *ptr, bool demote)
|
||||
arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr, bool demote)
|
||||
{
|
||||
size_t ret;
|
||||
arena_chunk_t *chunk;
|
||||
size_t pageind;
|
||||
szind_t binind;
|
||||
|
||||
assert(ptr != NULL);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr)) {
|
||||
if (likely(extent_achunk_get(extent))) {
|
||||
const arena_chunk_t *chunk =
|
||||
(const arena_chunk_t *)extent_addr_get(extent);
|
||||
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
binind = arena_mapbits_binind_get(chunk, pageind);
|
||||
@ -1367,22 +1366,23 @@ arena_salloc(tsdn_t *tsdn, const void *ptr, bool demote)
|
||||
ret = index2size(binind);
|
||||
}
|
||||
} else
|
||||
ret = huge_salloc(tsdn, ptr);
|
||||
ret = huge_salloc(tsdn, extent, ptr);
|
||||
|
||||
return (ret);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path)
|
||||
arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
bool slow_path)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
size_t pageind, mapbits;
|
||||
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
assert(ptr != NULL);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr)) {
|
||||
if (likely(extent_achunk_get(extent))) {
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
mapbits = arena_mapbits_get(chunk, pageind);
|
||||
assert(arena_mapbits_allocated_get(chunk, pageind) != 0);
|
||||
@ -1395,7 +1395,7 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path)
|
||||
binind, slow_path);
|
||||
} else {
|
||||
arena_dalloc_small(tsdn,
|
||||
extent_arena_get(&chunk->extent), chunk,
|
||||
extent_arena_get(extent), chunk, extent,
|
||||
ptr, pageind);
|
||||
}
|
||||
} else {
|
||||
@ -1411,24 +1411,24 @@ arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool slow_path)
|
||||
size - large_pad, slow_path);
|
||||
} else {
|
||||
arena_dalloc_large(tsdn,
|
||||
extent_arena_get(&chunk->extent), chunk,
|
||||
extent_arena_get(extent), chunk, extent,
|
||||
ptr);
|
||||
}
|
||||
}
|
||||
} else
|
||||
huge_dalloc(tsdn, ptr);
|
||||
huge_dalloc(tsdn, extent, ptr);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path)
|
||||
arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (likely(chunk != ptr)) {
|
||||
if (likely(extent_achunk_get(extent))) {
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >>
|
||||
LG_PAGE;
|
||||
@ -1443,7 +1443,8 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
pageind) - large_pad;
|
||||
}
|
||||
}
|
||||
assert(s2u(size) == s2u(arena_salloc(tsdn, ptr, false)));
|
||||
assert(s2u(size) == s2u(arena_salloc(tsdn, extent, ptr,
|
||||
false)));
|
||||
|
||||
if (likely(size <= SMALL_MAXCLASS)) {
|
||||
/* Small allocation. */
|
||||
@ -1455,7 +1456,7 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
size_t pageind = ((uintptr_t)ptr -
|
||||
(uintptr_t)chunk) >> LG_PAGE;
|
||||
arena_dalloc_small(tsdn,
|
||||
extent_arena_get(&chunk->extent), chunk,
|
||||
extent_arena_get(extent), chunk, extent,
|
||||
ptr, pageind);
|
||||
}
|
||||
} else {
|
||||
@ -1467,12 +1468,12 @@ arena_sdalloc(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
size, slow_path);
|
||||
} else {
|
||||
arena_dalloc_large(tsdn,
|
||||
extent_arena_get(&chunk->extent), chunk,
|
||||
extent_arena_get(extent), chunk, extent,
|
||||
ptr);
|
||||
}
|
||||
}
|
||||
} else
|
||||
huge_dalloc(tsdn, ptr);
|
||||
huge_dalloc(tsdn, extent, ptr);
|
||||
}
|
||||
# endif /* JEMALLOC_ARENA_INLINE_B */
|
||||
#endif
|
||||
|
@ -54,6 +54,8 @@ chunk_hooks_t chunk_hooks_set(tsdn_t *tsdn, arena_t *arena,
|
||||
|
||||
bool chunk_register(tsdn_t *tsdn, const void *chunk, const extent_t *extent);
|
||||
void chunk_deregister(const void *chunk, const extent_t *extent);
|
||||
void chunk_reregister(tsdn_t *tsdn, const void *chunk,
|
||||
const extent_t *extent);
|
||||
void *chunk_alloc_base(size_t size);
|
||||
void *chunk_alloc_cache(tsdn_t *tsdn, arena_t *arena,
|
||||
chunk_hooks_t *chunk_hooks, void *new_addr, size_t size, size_t alignment,
|
||||
|
@ -12,20 +12,22 @@
|
||||
void *huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero);
|
||||
void *huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
||||
size_t alignment, bool zero);
|
||||
bool huge_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
size_t usize_min, size_t usize_max, bool zero);
|
||||
void *huge_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t usize, size_t alignment, bool zero, tcache_t *tcache);
|
||||
bool huge_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t usize_min, size_t usize_max, bool zero);
|
||||
void *huge_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t usize, size_t alignment, bool zero,
|
||||
tcache_t *tcache);
|
||||
#ifdef JEMALLOC_JET
|
||||
typedef void (huge_dalloc_junk_t)(tsdn_t *, void *, size_t);
|
||||
extern huge_dalloc_junk_t *huge_dalloc_junk;
|
||||
#endif
|
||||
void huge_dalloc(tsdn_t *tsdn, void *ptr);
|
||||
arena_t *huge_aalloc(const void *ptr);
|
||||
size_t huge_salloc(tsdn_t *tsdn, const void *ptr);
|
||||
prof_tctx_t *huge_prof_tctx_get(tsdn_t *tsdn, const void *ptr);
|
||||
void huge_prof_tctx_set(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx);
|
||||
void huge_prof_tctx_reset(tsdn_t *tsdn, const void *ptr);
|
||||
void huge_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr);
|
||||
size_t huge_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr);
|
||||
prof_tctx_t *huge_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent,
|
||||
const void *ptr);
|
||||
void huge_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
prof_tctx_t *tctx);
|
||||
void huge_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr);
|
||||
|
||||
#endif /* JEMALLOC_H_EXTERNS */
|
||||
/******************************************************************************/
|
||||
|
@ -959,6 +959,20 @@ decay_ticker_get(tsd_t *tsd, unsigned ind)
|
||||
#define JEMALLOC_ARENA_INLINE_A
|
||||
#include "jemalloc/internal/arena.h"
|
||||
#undef JEMALLOC_ARENA_INLINE_A
|
||||
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
extent_t *iealloc(const void *ptr);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||
iealloc(const void *ptr)
|
||||
{
|
||||
|
||||
return (chunk_lookup(ptr, true));
|
||||
}
|
||||
#endif
|
||||
|
||||
#include "jemalloc/internal/tcache.h"
|
||||
#define JEMALLOC_ARENA_INLINE_B
|
||||
#include "jemalloc/internal/arena.h"
|
||||
@ -968,7 +982,8 @@ decay_ticker_get(tsd_t *tsd, unsigned ind)
|
||||
#ifndef JEMALLOC_ENABLE_INLINE
|
||||
extent_t *iealloc(const void *ptr);
|
||||
arena_t *iaalloc(const void *ptr);
|
||||
size_t isalloc(tsdn_t *tsdn, const void *ptr, bool demote);
|
||||
size_t isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
||||
bool demote);
|
||||
void *iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero,
|
||||
tcache_t *tcache, bool is_metadata, arena_t *arena, bool slow_path);
|
||||
void *ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero,
|
||||
@ -979,30 +994,23 @@ void *ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||
tcache_t *tcache, arena_t *arena);
|
||||
void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
|
||||
size_t ivsalloc(tsdn_t *tsdn, const void *ptr, bool demote);
|
||||
void idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_metadata,
|
||||
bool slow_path);
|
||||
void idalloc(tsd_t *tsd, void *ptr);
|
||||
void isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path);
|
||||
void *iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
arena_t *arena);
|
||||
void *iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
|
||||
void *iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero);
|
||||
bool ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero);
|
||||
void idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
bool is_metadata, bool slow_path);
|
||||
void idalloc(tsd_t *tsd, extent_t *extent, void *ptr);
|
||||
void isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path);
|
||||
void *iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
tcache_t *tcache, arena_t *arena);
|
||||
void *iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena);
|
||||
void *iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t alignment, bool zero);
|
||||
bool ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||
iealloc(const void *ptr)
|
||||
{
|
||||
|
||||
return (chunk_lookup(ptr, true));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||
iaalloc(const void *ptr)
|
||||
{
|
||||
@ -1016,17 +1024,18 @@ iaalloc(const void *ptr)
|
||||
* Typical usage:
|
||||
* tsdn_t *tsdn = [...]
|
||||
* void *ptr = [...]
|
||||
* size_t sz = isalloc(tsdn, ptr, config_prof);
|
||||
* extent_t *extent = iealloc(ptr);
|
||||
* size_t sz = isalloc(tsdn, extent, ptr, config_prof);
|
||||
*/
|
||||
JEMALLOC_ALWAYS_INLINE size_t
|
||||
isalloc(tsdn_t *tsdn, const void *ptr, bool demote)
|
||||
isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr, bool demote)
|
||||
{
|
||||
|
||||
assert(ptr != NULL);
|
||||
/* Demotion only makes sense if config_prof is true. */
|
||||
assert(config_prof || !demote);
|
||||
|
||||
return (arena_salloc(tsdn, ptr, demote));
|
||||
return (arena_salloc(tsdn, extent, ptr, demote));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
@ -1041,8 +1050,8 @@ iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
|
||||
|
||||
ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path);
|
||||
if (config_stats && is_metadata && likely(ret != NULL)) {
|
||||
arena_metadata_allocated_add(iaalloc(ret),
|
||||
isalloc(tsdn, ret, config_prof));
|
||||
arena_metadata_allocated_add(iaalloc(ret), isalloc(tsdn,
|
||||
iealloc(ret), ret, config_prof));
|
||||
}
|
||||
return (ret);
|
||||
}
|
||||
@ -1069,8 +1078,8 @@ ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||
ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache);
|
||||
assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
|
||||
if (config_stats && is_metadata && likely(ret != NULL)) {
|
||||
arena_metadata_allocated_add(iaalloc(ret), isalloc(tsdn, ret,
|
||||
config_prof));
|
||||
arena_metadata_allocated_add(iaalloc(ret), isalloc(tsdn,
|
||||
iealloc(ret), ret, config_prof));
|
||||
}
|
||||
return (ret);
|
||||
}
|
||||
@ -1104,43 +1113,45 @@ ivsalloc(tsdn_t *tsdn, const void *ptr, bool demote)
|
||||
assert(extent_addr_get(extent) == ptr ||
|
||||
extent_achunk_get(extent));
|
||||
|
||||
return (isalloc(tsdn, ptr, demote));
|
||||
return (isalloc(tsdn, extent, ptr, demote));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, bool is_metadata,
|
||||
bool slow_path)
|
||||
idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||
bool is_metadata, bool slow_path)
|
||||
{
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(!is_metadata || tcache == NULL);
|
||||
assert(!is_metadata || iaalloc(ptr)->ind < narenas_auto);
|
||||
if (config_stats && is_metadata) {
|
||||
arena_metadata_allocated_sub(iaalloc(ptr), isalloc(tsdn, ptr,
|
||||
config_prof));
|
||||
arena_metadata_allocated_sub(iaalloc(ptr), isalloc(tsdn, extent,
|
||||
ptr, config_prof));
|
||||
}
|
||||
|
||||
arena_dalloc(tsdn, ptr, tcache, slow_path);
|
||||
arena_dalloc(tsdn, extent, ptr, tcache, slow_path);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
idalloc(tsd_t *tsd, void *ptr)
|
||||
idalloc(tsd_t *tsd, extent_t *extent, void *ptr)
|
||||
{
|
||||
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd, false), false, true);
|
||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache_get(tsd, false), false,
|
||||
true);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache,
|
||||
bool slow_path)
|
||||
isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||
tcache_t *tcache, bool slow_path)
|
||||
{
|
||||
|
||||
arena_sdalloc(tsdn, ptr, size, tcache, slow_path);
|
||||
arena_sdalloc(tsdn, extent, ptr, size, tcache, slow_path);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena)
|
||||
iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||
arena_t *arena)
|
||||
{
|
||||
void *p;
|
||||
size_t usize, copysize;
|
||||
@ -1166,13 +1177,13 @@ iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
*/
|
||||
copysize = (size < oldsize) ? size : oldsize;
|
||||
memcpy(p, ptr, copysize);
|
||||
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
||||
return (p);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment,
|
||||
bool zero, tcache_t *tcache, arena_t *arena)
|
||||
iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena)
|
||||
{
|
||||
|
||||
assert(ptr != NULL);
|
||||
@ -1184,26 +1195,26 @@ iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment,
|
||||
* Existing object alignment is inadequate; allocate new space
|
||||
* and copy.
|
||||
*/
|
||||
return (iralloct_realign(tsdn, ptr, oldsize, size, 0, alignment,
|
||||
zero, tcache, arena));
|
||||
return (iralloct_realign(tsdn, extent, ptr, oldsize, size, 0,
|
||||
alignment, zero, tcache, arena));
|
||||
}
|
||||
|
||||
return (arena_ralloc(tsdn, arena, ptr, oldsize, size, alignment, zero,
|
||||
tcache));
|
||||
return (arena_ralloc(tsdn, arena, extent, ptr, oldsize, size, alignment,
|
||||
zero, tcache));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void *
|
||||
iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
|
||||
bool zero)
|
||||
iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||
size_t alignment, bool zero)
|
||||
{
|
||||
|
||||
return (iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero,
|
||||
tcache_get(tsd, true), NULL));
|
||||
return (iralloct(tsd_tsdn(tsd), extent, ptr, oldsize, size, alignment,
|
||||
zero, tcache_get(tsd, true), NULL));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
size_t alignment, bool zero)
|
||||
ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero)
|
||||
{
|
||||
|
||||
assert(ptr != NULL);
|
||||
@ -1215,7 +1226,8 @@ ixalloc(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t extra,
|
||||
return (true);
|
||||
}
|
||||
|
||||
return (arena_ralloc_no_move(tsdn, ptr, oldsize, size, extra, zero));
|
||||
return (arena_ralloc_no_move(tsdn, extent, ptr, oldsize, size, extra,
|
||||
zero));
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -181,6 +181,7 @@ chunk_postfork_parent
|
||||
chunk_prefork
|
||||
chunk_purge_wrapper
|
||||
chunk_register
|
||||
chunk_reregister
|
||||
chunks_rtree
|
||||
chunksize
|
||||
chunksize_mask
|
||||
@ -277,7 +278,6 @@ hash_rotl_64
|
||||
hash_x64_128
|
||||
hash_x86_128
|
||||
hash_x86_32
|
||||
huge_aalloc
|
||||
huge_dalloc
|
||||
huge_dalloc_junk
|
||||
huge_malloc
|
||||
|
@ -281,8 +281,8 @@ extern uint64_t prof_interval;
|
||||
extern size_t lg_prof_sample;
|
||||
|
||||
void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated);
|
||||
void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
void prof_malloc_sample_object(tsdn_t *tsdn, extent_t *extent,
|
||||
const void *ptr, size_t usize, prof_tctx_t *tctx);
|
||||
void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
|
||||
void bt_init(prof_bt_t *bt, void **vec);
|
||||
void prof_backtrace(prof_bt_t *bt);
|
||||
@ -330,21 +330,23 @@ void prof_sample_threshold_update(prof_tdata_t *tdata);
|
||||
bool prof_active_get_unlocked(void);
|
||||
bool prof_gdump_get_unlocked(void);
|
||||
prof_tdata_t *prof_tdata_get(tsd_t *tsd, bool create);
|
||||
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const void *ptr);
|
||||
void prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
void prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
const void *old_ptr, prof_tctx_t *tctx);
|
||||
prof_tctx_t *prof_tctx_get(tsdn_t *tsdn, const extent_t *extent,
|
||||
const void *ptr);
|
||||
void prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, prof_tctx_t *tctx);
|
||||
void prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, const void *old_ptr, prof_tctx_t *tctx);
|
||||
bool prof_sample_accum_update(tsd_t *tsd, size_t usize, bool commit,
|
||||
prof_tdata_t **tdata_out);
|
||||
prof_tctx_t *prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active,
|
||||
bool update);
|
||||
void prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx);
|
||||
void prof_realloc(tsd_t *tsd, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
|
||||
size_t old_usize, prof_tctx_t *old_tctx);
|
||||
void prof_free(tsd_t *tsd, const void *ptr, size_t usize);
|
||||
void prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, prof_tctx_t *tctx);
|
||||
void prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr,
|
||||
size_t usize, prof_tctx_t *tctx, bool prof_active, bool updated,
|
||||
const void *old_ptr, size_t old_usize, prof_tctx_t *old_tctx);
|
||||
void prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr,
|
||||
size_t usize);
|
||||
#endif
|
||||
|
||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PROF_C_))
|
||||
@ -398,34 +400,35 @@ prof_tdata_get(tsd_t *tsd, bool create)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||
prof_tctx_get(tsdn_t *tsdn, const void *ptr)
|
||||
prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
||||
{
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
return (arena_prof_tctx_get(tsdn, ptr));
|
||||
return (arena_prof_tctx_get(tsdn, extent, ptr));
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_tctx_set(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx)
|
||||
prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx)
|
||||
{
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
arena_prof_tctx_set(tsdn, ptr, usize, tctx);
|
||||
arena_prof_tctx_set(tsdn, extent, ptr, usize, tctx);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_tctx_reset(tsdn_t *tsdn, const void *ptr, size_t usize, const void *old_ptr,
|
||||
prof_tctx_t *old_tctx)
|
||||
prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
||||
const void *old_ptr, prof_tctx_t *old_tctx)
|
||||
{
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
|
||||
arena_prof_tctx_reset(tsdn, ptr, usize, old_ptr, old_tctx);
|
||||
arena_prof_tctx_reset(tsdn, extent, ptr, usize, old_ptr, old_tctx);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE bool
|
||||
@ -480,23 +483,26 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update)
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_malloc(tsdn_t *tsdn, const void *ptr, size_t usize, prof_tctx_t *tctx)
|
||||
prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx)
|
||||
{
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
assert(usize == isalloc(tsdn, ptr, true));
|
||||
assert(usize == isalloc(tsdn, extent, ptr, true));
|
||||
|
||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||
prof_malloc_sample_object(tsdn, ptr, usize, tctx);
|
||||
else
|
||||
prof_tctx_set(tsdn, ptr, usize, (prof_tctx_t *)(uintptr_t)1U);
|
||||
prof_malloc_sample_object(tsdn, extent, ptr, usize, tctx);
|
||||
else {
|
||||
prof_tctx_set(tsdn, extent, ptr, usize,
|
||||
(prof_tctx_t *)(uintptr_t)1U);
|
||||
}
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||
bool prof_active, bool updated, const void *old_ptr, size_t old_usize,
|
||||
prof_tctx_t *old_tctx)
|
||||
prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx, bool prof_active, bool updated, const void *old_ptr,
|
||||
size_t old_usize, prof_tctx_t *old_tctx)
|
||||
{
|
||||
bool sampled, old_sampled;
|
||||
|
||||
@ -504,7 +510,7 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||
assert(ptr != NULL || (uintptr_t)tctx <= (uintptr_t)1U);
|
||||
|
||||
if (prof_active && !updated && ptr != NULL) {
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr, true));
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr, true));
|
||||
if (prof_sample_accum_update(tsd, usize, true, NULL)) {
|
||||
/*
|
||||
* Don't sample. The usize passed to prof_alloc_prep()
|
||||
@ -520,22 +526,25 @@ prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx,
|
||||
sampled = ((uintptr_t)tctx > (uintptr_t)1U);
|
||||
old_sampled = ((uintptr_t)old_tctx > (uintptr_t)1U);
|
||||
|
||||
if (unlikely(sampled))
|
||||
prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx);
|
||||
else
|
||||
prof_tctx_reset(tsd_tsdn(tsd), ptr, usize, old_ptr, old_tctx);
|
||||
if (unlikely(sampled)) {
|
||||
prof_malloc_sample_object(tsd_tsdn(tsd), extent, ptr, usize,
|
||||
tctx);
|
||||
} else {
|
||||
prof_tctx_reset(tsd_tsdn(tsd), extent, ptr, usize, old_ptr,
|
||||
old_tctx);
|
||||
}
|
||||
|
||||
if (unlikely(old_sampled))
|
||||
prof_free_sampled_object(tsd, old_usize, old_tctx);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE void
|
||||
prof_free(tsd_t *tsd, const void *ptr, size_t usize)
|
||||
prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr, size_t usize)
|
||||
{
|
||||
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
|
||||
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
|
||||
|
||||
cassert(config_prof);
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr, true));
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr, true));
|
||||
|
||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
||||
prof_free_sampled_object(tsd, usize, tctx);
|
||||
|
@ -371,7 +371,7 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
||||
|
||||
if (config_prof && usize == LARGE_MINCLASS) {
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)CHUNK_ADDR2BASE(ret);
|
||||
(arena_chunk_t *)extent_addr_get(iealloc(ret));
|
||||
size_t pageind = (((uintptr_t)ret - (uintptr_t)chunk) >>
|
||||
LG_PAGE);
|
||||
arena_mapbits_large_binind_set(chunk, pageind,
|
||||
|
245
src/arena.c
245
src/arena.c
@ -45,10 +45,10 @@ unsigned nhclasses; /* Number of huge size classes. */
|
||||
|
||||
static void arena_purge_to_limit(tsdn_t *tsdn, arena_t *arena,
|
||||
size_t ndirty_limit);
|
||||
static void arena_run_dalloc(tsdn_t *tsdn, arena_t *arena, arena_run_t *run,
|
||||
bool dirty, bool cleaned, bool decommitted);
|
||||
static void arena_run_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent,
|
||||
arena_run_t *run, bool dirty, bool cleaned, bool decommitted);
|
||||
static void arena_dalloc_bin_run(tsdn_t *tsdn, arena_t *arena,
|
||||
arena_chunk_t *chunk, arena_run_t *run, arena_bin_t *bin);
|
||||
arena_chunk_t *chunk, extent_t *extent, arena_run_t *run, arena_bin_t *bin);
|
||||
static void arena_bin_lower_run(arena_t *arena, arena_chunk_t *chunk,
|
||||
arena_run_t *run, arena_bin_t *bin);
|
||||
|
||||
@ -264,9 +264,9 @@ arena_run_reg_alloc(arena_run_t *run, const arena_bin_info_t *bin_info)
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE_C void
|
||||
arena_run_reg_dalloc(arena_run_t *run, void *ptr)
|
||||
arena_run_reg_dalloc(arena_run_t *run, extent_t *extent, void *ptr)
|
||||
{
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
size_t mapbits = arena_mapbits_get(chunk, pageind);
|
||||
szind_t binind = arena_ptr_small_binind_get(ptr, mapbits);
|
||||
@ -375,15 +375,15 @@ arena_run_split_remove(arena_t *arena, arena_chunk_t *chunk, size_t run_ind,
|
||||
}
|
||||
|
||||
static bool
|
||||
arena_run_split_large_helper(arena_t *arena, arena_run_t *run, size_t size,
|
||||
bool remove, bool zero)
|
||||
arena_run_split_large_helper(arena_t *arena, extent_t *extent, arena_run_t *run,
|
||||
size_t size, bool remove, bool zero)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
arena_chunk_map_misc_t *miscelm;
|
||||
size_t flag_dirty, flag_decommitted, run_ind, need_pages;
|
||||
size_t flag_unzeroed_mask;
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
miscelm = arena_run_to_miscelm(run);
|
||||
run_ind = arena_miscelm_to_pageind(miscelm);
|
||||
flag_dirty = arena_mapbits_dirty_get(chunk, run_ind);
|
||||
@ -439,22 +439,26 @@ arena_run_split_large_helper(arena_t *arena, arena_run_t *run, size_t size,
|
||||
}
|
||||
|
||||
static bool
|
||||
arena_run_split_large(arena_t *arena, arena_run_t *run, size_t size, bool zero)
|
||||
arena_run_split_large(arena_t *arena, extent_t *extent, arena_run_t *run,
|
||||
size_t size, bool zero)
|
||||
{
|
||||
|
||||
return (arena_run_split_large_helper(arena, run, size, true, zero));
|
||||
return (arena_run_split_large_helper(arena, extent, run, size, true,
|
||||
zero));
|
||||
}
|
||||
|
||||
static bool
|
||||
arena_run_init_large(arena_t *arena, arena_run_t *run, size_t size, bool zero)
|
||||
arena_run_init_large(arena_t *arena, extent_t *extent, arena_run_t *run,
|
||||
size_t size, bool zero)
|
||||
{
|
||||
|
||||
return (arena_run_split_large_helper(arena, run, size, false, zero));
|
||||
return (arena_run_split_large_helper(arena, extent, run, size, false,
|
||||
zero));
|
||||
}
|
||||
|
||||
static bool
|
||||
arena_run_split_small(arena_t *arena, arena_run_t *run, size_t size,
|
||||
szind_t binind)
|
||||
arena_run_split_small(arena_t *arena, extent_t *extent, arena_run_t *run,
|
||||
size_t size, szind_t binind)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
arena_chunk_map_misc_t *miscelm;
|
||||
@ -462,7 +466,7 @@ arena_run_split_small(arena_t *arena, arena_run_t *run, size_t size,
|
||||
|
||||
assert(binind != BININD_INVALID);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
miscelm = arena_run_to_miscelm(run);
|
||||
run_ind = arena_miscelm_to_pageind(miscelm);
|
||||
flag_dirty = arena_mapbits_dirty_get(chunk, run_ind);
|
||||
@ -1037,7 +1041,7 @@ arena_run_alloc_large_helper(arena_t *arena, size_t size, bool zero)
|
||||
{
|
||||
arena_run_t *run = arena_run_first_best_fit(arena, s2u(size));
|
||||
if (run != NULL) {
|
||||
if (arena_run_split_large(arena, run, size, zero))
|
||||
if (arena_run_split_large(arena, iealloc(run), run, size, zero))
|
||||
run = NULL;
|
||||
}
|
||||
return (run);
|
||||
@ -1063,7 +1067,7 @@ arena_run_alloc_large(tsdn_t *tsdn, arena_t *arena, size_t size, bool zero)
|
||||
chunk = arena_chunk_alloc(tsdn, arena);
|
||||
if (chunk != NULL) {
|
||||
run = &arena_miscelm_get_mutable(chunk, map_bias)->run;
|
||||
if (arena_run_split_large(arena, run, size, zero))
|
||||
if (arena_run_split_large(arena, iealloc(run), run, size, zero))
|
||||
run = NULL;
|
||||
return (run);
|
||||
}
|
||||
@ -1081,7 +1085,8 @@ arena_run_alloc_small_helper(arena_t *arena, size_t size, szind_t binind)
|
||||
{
|
||||
arena_run_t *run = arena_run_first_best_fit(arena, size);
|
||||
if (run != NULL) {
|
||||
if (arena_run_split_small(arena, run, size, binind))
|
||||
if (arena_run_split_small(arena, iealloc(run), run, size,
|
||||
binind))
|
||||
run = NULL;
|
||||
}
|
||||
return (run);
|
||||
@ -1108,7 +1113,8 @@ arena_run_alloc_small(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t binind)
|
||||
chunk = arena_chunk_alloc(tsdn, arena);
|
||||
if (chunk != NULL) {
|
||||
run = &arena_miscelm_get_mutable(chunk, map_bias)->run;
|
||||
if (arena_run_split_small(arena, run, size, binind))
|
||||
if (arena_run_split_small(arena, iealloc(run), run, size,
|
||||
binind))
|
||||
run = NULL;
|
||||
return (run);
|
||||
}
|
||||
@ -1435,8 +1441,9 @@ arena_dirty_count(arena_t *arena)
|
||||
npages = extent_size_get(chunkselm) >> LG_PAGE;
|
||||
chunkselm = qr_next(chunkselm, cc_link);
|
||||
} else {
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(
|
||||
rdelm);
|
||||
extent_t *extent = iealloc(rdelm);
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)extent_addr_get(extent);
|
||||
arena_chunk_map_misc_t *miscelm =
|
||||
arena_rd_to_miscelm(rdelm);
|
||||
size_t pageind = arena_miscelm_to_pageind(miscelm);
|
||||
@ -1497,8 +1504,9 @@ arena_stash_dirty(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
||||
LG_PAGE));
|
||||
chunkselm = chunkselm_next;
|
||||
} else {
|
||||
extent_t *extent = iealloc(rdelm);
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)CHUNK_ADDR2BASE(rdelm);
|
||||
(arena_chunk_t *)extent_addr_get(extent);
|
||||
arena_chunk_map_misc_t *miscelm =
|
||||
arena_rd_to_miscelm(rdelm);
|
||||
size_t pageind = arena_miscelm_to_pageind(miscelm);
|
||||
@ -1523,7 +1531,8 @@ arena_stash_dirty(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
||||
arena_chunk_alloc(tsdn, arena);
|
||||
|
||||
/* Temporarily allocate the free dirty run. */
|
||||
arena_run_split_large(arena, run, run_size, false);
|
||||
arena_run_split_large(arena, extent, run, run_size,
|
||||
false);
|
||||
/* Stash. */
|
||||
if (false)
|
||||
qr_new(rdelm, rd_link); /* Redundant. */
|
||||
@ -1577,8 +1586,9 @@ arena_purge_stashed(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
||||
} else {
|
||||
size_t pageind, run_size, flag_unzeroed, flags, i;
|
||||
bool decommitted;
|
||||
extent_t *extent = iealloc(rdelm);
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)CHUNK_ADDR2BASE(rdelm);
|
||||
(arena_chunk_t *)extent_addr_get(extent);
|
||||
arena_chunk_map_misc_t *miscelm =
|
||||
arena_rd_to_miscelm(rdelm);
|
||||
pageind = arena_miscelm_to_pageind(miscelm);
|
||||
@ -1661,8 +1671,9 @@ arena_unstash_purged(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
||||
chunk_dalloc_wrapper(tsdn, arena, chunk_hooks, addr,
|
||||
size, zeroed, committed);
|
||||
} else {
|
||||
extent_t *extent = iealloc(rdelm);
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)CHUNK_ADDR2BASE(rdelm);
|
||||
(arena_chunk_t *)extent_addr_get(extent);
|
||||
arena_chunk_map_misc_t *miscelm =
|
||||
arena_rd_to_miscelm(rdelm);
|
||||
size_t pageind = arena_miscelm_to_pageind(miscelm);
|
||||
@ -1670,7 +1681,7 @@ arena_unstash_purged(tsdn_t *tsdn, arena_t *arena, chunk_hooks_t *chunk_hooks,
|
||||
pageind) != 0);
|
||||
arena_run_t *run = &miscelm->run;
|
||||
qr_remove(rdelm, rd_link);
|
||||
arena_run_dalloc(tsdn, arena, run, false, true,
|
||||
arena_run_dalloc(tsdn, arena, extent, run, false, true,
|
||||
decommitted);
|
||||
}
|
||||
}
|
||||
@ -1755,10 +1766,10 @@ arena_achunk_prof_reset(tsd_t *tsd, arena_t *arena, arena_chunk_t *chunk)
|
||||
if (arena_mapbits_large_get(chunk, pageind) != 0) {
|
||||
void *ptr = (void *)((uintptr_t)chunk + (pageind
|
||||
<< LG_PAGE));
|
||||
size_t usize = isalloc(tsd_tsdn(tsd), ptr,
|
||||
config_prof);
|
||||
size_t usize = isalloc(tsd_tsdn(tsd),
|
||||
&chunk->extent, ptr, config_prof);
|
||||
|
||||
prof_free(tsd, ptr, usize);
|
||||
prof_free(tsd, &chunk->extent, ptr, usize);
|
||||
npages = arena_mapbits_large_size_get(chunk,
|
||||
pageind) >> LG_PAGE;
|
||||
} else {
|
||||
@ -1820,12 +1831,14 @@ arena_reset(tsd_t *tsd, arena_t *arena)
|
||||
size_t usize;
|
||||
|
||||
malloc_mutex_unlock(tsd_tsdn(tsd), &arena->huge_mtx);
|
||||
if (config_stats || (config_prof && opt_prof))
|
||||
usize = isalloc(tsd_tsdn(tsd), ptr, config_prof);
|
||||
if (config_stats || (config_prof && opt_prof)) {
|
||||
usize = isalloc(tsd_tsdn(tsd), extent, ptr,
|
||||
config_prof);
|
||||
}
|
||||
/* Remove huge allocation from prof sample set. */
|
||||
if (config_prof && opt_prof)
|
||||
prof_free(tsd, ptr, usize);
|
||||
huge_dalloc(tsd_tsdn(tsd), ptr);
|
||||
prof_free(tsd, extent, ptr, usize);
|
||||
huge_dalloc(tsd_tsdn(tsd), extent, ptr);
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), &arena->huge_mtx);
|
||||
/* Cancel out unwanted effects on stats. */
|
||||
if (config_stats)
|
||||
@ -1997,14 +2010,14 @@ arena_run_size_get(arena_t *arena, arena_chunk_t *chunk, arena_run_t *run,
|
||||
}
|
||||
|
||||
static void
|
||||
arena_run_dalloc(tsdn_t *tsdn, arena_t *arena, arena_run_t *run, bool dirty,
|
||||
bool cleaned, bool decommitted)
|
||||
arena_run_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent,
|
||||
arena_run_t *run, bool dirty, bool cleaned, bool decommitted)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
arena_chunk_map_misc_t *miscelm;
|
||||
size_t size, run_ind, run_pages, flag_dirty, flag_decommitted;
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
miscelm = arena_run_to_miscelm(run);
|
||||
run_ind = arena_miscelm_to_pageind(miscelm);
|
||||
assert(run_ind >= map_bias);
|
||||
@ -2074,7 +2087,7 @@ arena_run_dalloc(tsdn_t *tsdn, arena_t *arena, arena_run_t *run, bool dirty,
|
||||
|
||||
static void
|
||||
arena_run_trim_head(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
arena_run_t *run, size_t oldsize, size_t newsize)
|
||||
extent_t *extent, arena_run_t *run, size_t oldsize, size_t newsize)
|
||||
{
|
||||
arena_chunk_map_misc_t *miscelm = arena_run_to_miscelm(run);
|
||||
size_t pageind = arena_miscelm_to_pageind(miscelm);
|
||||
@ -2109,13 +2122,14 @@ arena_run_trim_head(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
flag_dirty | (flag_unzeroed_mask & arena_mapbits_unzeroed_get(chunk,
|
||||
pageind+head_npages)));
|
||||
|
||||
arena_run_dalloc(tsdn, arena, run, false, false, (flag_decommitted !=
|
||||
0));
|
||||
arena_run_dalloc(tsdn, arena, extent, run, false, false,
|
||||
(flag_decommitted != 0));
|
||||
}
|
||||
|
||||
static void
|
||||
arena_run_trim_tail(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
arena_run_t *run, size_t oldsize, size_t newsize, bool dirty)
|
||||
extent_t *extent, arena_run_t *run, size_t oldsize, size_t newsize,
|
||||
bool dirty)
|
||||
{
|
||||
arena_chunk_map_misc_t *miscelm = arena_run_to_miscelm(run);
|
||||
size_t pageind = arena_miscelm_to_pageind(miscelm);
|
||||
@ -2154,8 +2168,8 @@ arena_run_trim_tail(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
|
||||
tail_miscelm = arena_miscelm_get_mutable(chunk, pageind + head_npages);
|
||||
tail_run = &tail_miscelm->run;
|
||||
arena_run_dalloc(tsdn, arena, tail_run, dirty, false, (flag_decommitted
|
||||
!= 0));
|
||||
arena_run_dalloc(tsdn, arena, extent, tail_run, dirty, false,
|
||||
(flag_decommitted != 0));
|
||||
}
|
||||
|
||||
static void
|
||||
@ -2251,6 +2265,7 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, arena_bin_t *bin)
|
||||
assert(bin->runcur->nfree > 0);
|
||||
ret = arena_run_reg_alloc(bin->runcur, bin_info);
|
||||
if (run != NULL) {
|
||||
extent_t *extent;
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
/*
|
||||
@ -2261,10 +2276,11 @@ arena_bin_malloc_hard(tsdn_t *tsdn, arena_t *arena, arena_bin_t *bin)
|
||||
* arena_bin_lower_run() must be called, as if a region
|
||||
* were just deallocated from the run.
|
||||
*/
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
extent = iealloc(run);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
if (run->nfree == bin_info->nregs) {
|
||||
arena_dalloc_bin_run(tsdn, arena, chunk, run,
|
||||
bin);
|
||||
arena_dalloc_bin_run(tsdn, arena, chunk, extent,
|
||||
run, bin);
|
||||
} else
|
||||
arena_bin_lower_run(arena, chunk, run, bin);
|
||||
}
|
||||
@ -2499,6 +2515,7 @@ arena_palloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||
void *ret;
|
||||
size_t alloc_size, leadsize, trailsize;
|
||||
arena_run_t *run;
|
||||
extent_t *extent;
|
||||
arena_chunk_t *chunk;
|
||||
arena_chunk_map_misc_t *miscelm;
|
||||
void *rpages;
|
||||
@ -2520,7 +2537,8 @@ arena_palloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||
malloc_mutex_unlock(tsdn, &arena->lock);
|
||||
return (NULL);
|
||||
}
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(run);
|
||||
extent = iealloc(run);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
miscelm = arena_run_to_miscelm(run);
|
||||
rpages = arena_miscelm_to_rpages(miscelm);
|
||||
|
||||
@ -2531,20 +2549,22 @@ arena_palloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||
if (leadsize != 0) {
|
||||
arena_chunk_map_misc_t *head_miscelm = miscelm;
|
||||
arena_run_t *head_run = run;
|
||||
extent_t *head_extent = extent;
|
||||
|
||||
miscelm = arena_miscelm_get_mutable(chunk,
|
||||
arena_miscelm_to_pageind(head_miscelm) + (leadsize >>
|
||||
LG_PAGE));
|
||||
run = &miscelm->run;
|
||||
extent = iealloc(run);
|
||||
|
||||
arena_run_trim_head(tsdn, arena, chunk, head_run, alloc_size,
|
||||
alloc_size - leadsize);
|
||||
arena_run_trim_head(tsdn, arena, chunk, head_extent, head_run,
|
||||
alloc_size, alloc_size - leadsize);
|
||||
}
|
||||
if (trailsize != 0) {
|
||||
arena_run_trim_tail(tsdn, arena, chunk, run, usize + large_pad +
|
||||
trailsize, usize + large_pad, false);
|
||||
arena_run_trim_tail(tsdn, arena, chunk, extent, run, usize +
|
||||
large_pad + trailsize, usize + large_pad, false);
|
||||
}
|
||||
if (arena_run_init_large(arena, run, usize + large_pad, zero)) {
|
||||
if (arena_run_init_large(arena, extent, run, usize + large_pad, zero)) {
|
||||
size_t run_ind =
|
||||
arena_miscelm_to_pageind(arena_run_to_miscelm(run));
|
||||
bool dirty = (arena_mapbits_dirty_get(chunk, run_ind) != 0);
|
||||
@ -2552,7 +2572,8 @@ arena_palloc_large(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||
run_ind) != 0);
|
||||
|
||||
assert(decommitted); /* Cause of OOM. */
|
||||
arena_run_dalloc(tsdn, arena, run, dirty, false, decommitted);
|
||||
arena_run_dalloc(tsdn, arena, extent, run, dirty, false,
|
||||
decommitted);
|
||||
malloc_mutex_unlock(tsdn, &arena->lock);
|
||||
return (NULL);
|
||||
}
|
||||
@ -2616,7 +2637,8 @@ arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||
}
|
||||
|
||||
void
|
||||
arena_prof_promoted(tsdn_t *tsdn, const void *ptr, size_t size)
|
||||
arena_prof_promoted(tsdn_t *tsdn, const extent_t *extent, const void *ptr,
|
||||
size_t size)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
size_t pageind;
|
||||
@ -2624,32 +2646,30 @@ arena_prof_promoted(tsdn_t *tsdn, const void *ptr, size_t size)
|
||||
|
||||
cassert(config_prof);
|
||||
assert(ptr != NULL);
|
||||
assert(CHUNK_ADDR2BASE(ptr) != ptr);
|
||||
assert(isalloc(tsdn, ptr, false) == LARGE_MINCLASS);
|
||||
assert(isalloc(tsdn, ptr, true) == LARGE_MINCLASS);
|
||||
assert(extent_addr_get(extent) != ptr);
|
||||
assert(isalloc(tsdn, extent, ptr, false) == LARGE_MINCLASS);
|
||||
assert(isalloc(tsdn, extent, ptr, true) == LARGE_MINCLASS);
|
||||
assert(size <= SMALL_MAXCLASS);
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
binind = size2index(size);
|
||||
assert(binind < NBINS);
|
||||
arena_mapbits_large_binind_set(chunk, pageind, binind);
|
||||
|
||||
assert(isalloc(tsdn, ptr, false) == LARGE_MINCLASS);
|
||||
assert(isalloc(tsdn, ptr, true) == size);
|
||||
assert(isalloc(tsdn, extent, ptr, false) == LARGE_MINCLASS);
|
||||
assert(isalloc(tsdn, extent, ptr, true) == size);
|
||||
}
|
||||
|
||||
static void
|
||||
arena_dissociate_bin_run(arena_chunk_t *chunk, arena_run_t *run,
|
||||
arena_bin_t *bin)
|
||||
arena_dissociate_bin_run(extent_t *extent, arena_run_t *run, arena_bin_t *bin)
|
||||
{
|
||||
|
||||
/* Dissociate run from bin. */
|
||||
if (run == bin->runcur)
|
||||
bin->runcur = NULL;
|
||||
else {
|
||||
szind_t binind = arena_bin_index(extent_arena_get(
|
||||
&chunk->extent), bin);
|
||||
szind_t binind = arena_bin_index(extent_arena_get(extent), bin);
|
||||
const arena_bin_info_t *bin_info = &arena_bin_info[binind];
|
||||
|
||||
/*
|
||||
@ -2668,7 +2688,7 @@ arena_dissociate_bin_run(arena_chunk_t *chunk, arena_run_t *run,
|
||||
|
||||
static void
|
||||
arena_dalloc_bin_run(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
arena_run_t *run, arena_bin_t *bin)
|
||||
extent_t *extent, arena_run_t *run, arena_bin_t *bin)
|
||||
{
|
||||
|
||||
assert(run != bin->runcur);
|
||||
@ -2676,7 +2696,7 @@ arena_dalloc_bin_run(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
malloc_mutex_unlock(tsdn, &bin->lock);
|
||||
/******************************/
|
||||
malloc_mutex_lock(tsdn, &arena->lock);
|
||||
arena_run_dalloc(tsdn, arena, run, true, false, false);
|
||||
arena_run_dalloc(tsdn, arena, extent, run, true, false, false);
|
||||
malloc_mutex_unlock(tsdn, &arena->lock);
|
||||
/****************************/
|
||||
malloc_mutex_lock(tsdn, &bin->lock);
|
||||
@ -2707,7 +2727,7 @@ arena_bin_lower_run(arena_t *arena, arena_chunk_t *chunk, arena_run_t *run,
|
||||
|
||||
static void
|
||||
arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr, arena_chunk_map_bits_t *bitselm, bool junked)
|
||||
extent_t *extent, void *ptr, arena_chunk_map_bits_t *bitselm, bool junked)
|
||||
{
|
||||
size_t pageind, rpages_ind;
|
||||
arena_run_t *run;
|
||||
@ -2725,10 +2745,10 @@ arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
if (!junked && config_fill && unlikely(opt_junk_free))
|
||||
arena_dalloc_junk_small(ptr, bin_info);
|
||||
|
||||
arena_run_reg_dalloc(run, ptr);
|
||||
arena_run_reg_dalloc(run, extent, ptr);
|
||||
if (run->nfree == bin_info->nregs) {
|
||||
arena_dissociate_bin_run(chunk, run, bin);
|
||||
arena_dalloc_bin_run(tsdn, arena, chunk, run, bin);
|
||||
arena_dissociate_bin_run(extent, run, bin);
|
||||
arena_dalloc_bin_run(tsdn, arena, chunk, extent, run, bin);
|
||||
} else if (run->nfree == 1 && run != bin->runcur)
|
||||
arena_bin_lower_run(arena, chunk, run, bin);
|
||||
|
||||
@ -2740,15 +2760,17 @@ arena_dalloc_bin_locked_impl(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
|
||||
void
|
||||
arena_dalloc_bin_junked_locked(tsdn_t *tsdn, arena_t *arena,
|
||||
arena_chunk_t *chunk, void *ptr, arena_chunk_map_bits_t *bitselm)
|
||||
arena_chunk_t *chunk, extent_t *extent, void *ptr,
|
||||
arena_chunk_map_bits_t *bitselm)
|
||||
{
|
||||
|
||||
arena_dalloc_bin_locked_impl(tsdn, arena, chunk, ptr, bitselm, true);
|
||||
arena_dalloc_bin_locked_impl(tsdn, arena, chunk, extent, ptr, bitselm,
|
||||
true);
|
||||
}
|
||||
|
||||
void
|
||||
arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
size_t pageind, arena_chunk_map_bits_t *bitselm)
|
||||
static void
|
||||
arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
extent_t *extent, void *ptr, size_t pageind, arena_chunk_map_bits_t *bitselm)
|
||||
{
|
||||
arena_run_t *run;
|
||||
arena_bin_t *bin;
|
||||
@ -2758,13 +2780,14 @@ arena_dalloc_bin(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk, void *ptr,
|
||||
run = &arena_miscelm_get_mutable(chunk, rpages_ind)->run;
|
||||
bin = &arena->bins[run->binind];
|
||||
malloc_mutex_lock(tsdn, &bin->lock);
|
||||
arena_dalloc_bin_locked_impl(tsdn, arena, chunk, ptr, bitselm, false);
|
||||
arena_dalloc_bin_locked_impl(tsdn, arena, chunk, extent, ptr, bitselm,
|
||||
false);
|
||||
malloc_mutex_unlock(tsdn, &bin->lock);
|
||||
}
|
||||
|
||||
void
|
||||
arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr, size_t pageind)
|
||||
extent_t *extent, void *ptr, size_t pageind)
|
||||
{
|
||||
arena_chunk_map_bits_t *bitselm;
|
||||
|
||||
@ -2774,7 +2797,7 @@ arena_dalloc_small(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
pageind)) != BININD_INVALID);
|
||||
}
|
||||
bitselm = arena_bitselm_get_mutable(chunk, pageind);
|
||||
arena_dalloc_bin(tsdn, arena, chunk, ptr, pageind, bitselm);
|
||||
arena_dalloc_bin(tsdn, arena, chunk, extent, ptr, pageind, bitselm);
|
||||
arena_decay_tick(tsdn, arena);
|
||||
}
|
||||
|
||||
@ -2798,7 +2821,7 @@ arena_dalloc_junk_large_t *arena_dalloc_junk_large =
|
||||
|
||||
static void
|
||||
arena_dalloc_large_locked_impl(tsdn_t *tsdn, arena_t *arena,
|
||||
arena_chunk_t *chunk, void *ptr, bool junked)
|
||||
arena_chunk_t *chunk, extent_t *extent, void *ptr, bool junked)
|
||||
{
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
arena_chunk_map_misc_t *miscelm = arena_miscelm_get_mutable(chunk,
|
||||
@ -2821,31 +2844,31 @@ arena_dalloc_large_locked_impl(tsdn_t *tsdn, arena_t *arena,
|
||||
}
|
||||
}
|
||||
|
||||
arena_run_dalloc(tsdn, arena, run, true, false, false);
|
||||
arena_run_dalloc(tsdn, arena, extent, run, true, false, false);
|
||||
}
|
||||
|
||||
void
|
||||
arena_dalloc_large_junked_locked(tsdn_t *tsdn, arena_t *arena,
|
||||
arena_chunk_t *chunk, void *ptr)
|
||||
arena_chunk_t *chunk, extent_t *extent, void *ptr)
|
||||
{
|
||||
|
||||
arena_dalloc_large_locked_impl(tsdn, arena, chunk, ptr, true);
|
||||
arena_dalloc_large_locked_impl(tsdn, arena, chunk, extent, ptr, true);
|
||||
}
|
||||
|
||||
void
|
||||
arena_dalloc_large(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr)
|
||||
extent_t *extent, void *ptr)
|
||||
{
|
||||
|
||||
malloc_mutex_lock(tsdn, &arena->lock);
|
||||
arena_dalloc_large_locked_impl(tsdn, arena, chunk, ptr, false);
|
||||
arena_dalloc_large_locked_impl(tsdn, arena, chunk, extent, ptr, false);
|
||||
malloc_mutex_unlock(tsdn, &arena->lock);
|
||||
arena_decay_tick(tsdn, arena);
|
||||
}
|
||||
|
||||
static void
|
||||
arena_ralloc_large_shrink(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
void *ptr, size_t oldsize, size_t size)
|
||||
extent_t *extent, void *ptr, size_t oldsize, size_t size)
|
||||
{
|
||||
size_t pageind = ((uintptr_t)ptr - (uintptr_t)chunk) >> LG_PAGE;
|
||||
arena_chunk_map_misc_t *miscelm = arena_miscelm_get_mutable(chunk,
|
||||
@ -2859,8 +2882,8 @@ arena_ralloc_large_shrink(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
* allocations.
|
||||
*/
|
||||
malloc_mutex_lock(tsdn, &arena->lock);
|
||||
arena_run_trim_tail(tsdn, arena, chunk, run, oldsize + large_pad, size +
|
||||
large_pad, true);
|
||||
arena_run_trim_tail(tsdn, arena, chunk, extent, run, oldsize +
|
||||
large_pad, size + large_pad, true);
|
||||
if (config_stats) {
|
||||
szind_t oldindex = size2index(oldsize) - NBINS;
|
||||
szind_t index = size2index(size) - NBINS;
|
||||
@ -2916,7 +2939,8 @@ arena_ralloc_large_grow(tsdn_t *tsdn, arena_t *arena, arena_chunk_t *chunk,
|
||||
goto label_fail;
|
||||
|
||||
run = &arena_miscelm_get_mutable(chunk, pageind+npages)->run;
|
||||
if (arena_run_split_large(arena, run, splitsize, zero))
|
||||
if (arena_run_split_large(arena, iealloc(run), run, splitsize,
|
||||
zero))
|
||||
goto label_fail;
|
||||
|
||||
if (config_cache_oblivious && zero) {
|
||||
@ -3005,8 +3029,8 @@ arena_ralloc_junk_large_t *arena_ralloc_junk_large =
|
||||
* always fail if growing an object, and the following run is already in use.
|
||||
*/
|
||||
static bool
|
||||
arena_ralloc_large(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
size_t usize_max, bool zero)
|
||||
arena_ralloc_large(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t usize_min, size_t usize_max, bool zero)
|
||||
{
|
||||
arena_chunk_t *chunk;
|
||||
arena_t *arena;
|
||||
@ -3016,8 +3040,8 @@ arena_ralloc_large(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
return (false);
|
||||
}
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
arena = extent_arena_get(&chunk->extent);
|
||||
chunk = (arena_chunk_t *)extent_addr_get(extent);
|
||||
arena = extent_arena_get(extent);
|
||||
|
||||
if (oldsize < usize_max) {
|
||||
bool ret = arena_ralloc_large_grow(tsdn, arena, chunk, ptr,
|
||||
@ -3026,10 +3050,12 @@ arena_ralloc_large(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
if (unlikely(opt_junk_alloc)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize),
|
||||
JEMALLOC_ALLOC_JUNK,
|
||||
isalloc(tsdn, ptr, config_prof) - oldsize);
|
||||
isalloc(tsdn, extent, ptr, config_prof) -
|
||||
oldsize);
|
||||
} else if (unlikely(opt_zero)) {
|
||||
memset((void *)((uintptr_t)ptr + oldsize), 0,
|
||||
isalloc(tsdn, ptr, config_prof) - oldsize);
|
||||
isalloc(tsdn, extent, ptr, config_prof) -
|
||||
oldsize);
|
||||
}
|
||||
}
|
||||
return (ret);
|
||||
@ -3038,13 +3064,14 @@ arena_ralloc_large(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
assert(oldsize > usize_max);
|
||||
/* Fill before shrinking in order avoid a race. */
|
||||
arena_ralloc_junk_large(ptr, oldsize, usize_max);
|
||||
arena_ralloc_large_shrink(tsdn, arena, chunk, ptr, oldsize, usize_max);
|
||||
arena_ralloc_large_shrink(tsdn, arena, chunk, extent, ptr, oldsize,
|
||||
usize_max);
|
||||
return (false);
|
||||
}
|
||||
|
||||
bool
|
||||
arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
size_t extra, bool zero)
|
||||
arena_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t size, size_t extra, bool zero)
|
||||
{
|
||||
size_t usize_min, usize_max;
|
||||
|
||||
@ -3057,8 +3084,6 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
usize_min = s2u(size);
|
||||
usize_max = s2u(size + extra);
|
||||
if (likely(oldsize <= large_maxclass && usize_min <= large_maxclass)) {
|
||||
arena_chunk_t *chunk;
|
||||
|
||||
/*
|
||||
* Avoid moving the allocation if the size class can be left the
|
||||
* same.
|
||||
@ -3073,17 +3098,16 @@ arena_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size,
|
||||
} else {
|
||||
if (usize_max <= SMALL_MAXCLASS)
|
||||
return (true);
|
||||
if (arena_ralloc_large(tsdn, ptr, oldsize, usize_min,
|
||||
usize_max, zero))
|
||||
if (arena_ralloc_large(tsdn, extent, ptr, oldsize,
|
||||
usize_min, usize_max, zero))
|
||||
return (true);
|
||||
}
|
||||
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
arena_decay_tick(tsdn, extent_arena_get(&chunk->extent));
|
||||
arena_decay_tick(tsdn, extent_arena_get(extent));
|
||||
return (false);
|
||||
} else {
|
||||
return (huge_ralloc_no_move(tsdn, ptr, oldsize, usize_min,
|
||||
usize_max, zero));
|
||||
return (huge_ralloc_no_move(tsdn, extent, ptr, oldsize,
|
||||
usize_min, usize_max, zero));
|
||||
}
|
||||
}
|
||||
|
||||
@ -3102,8 +3126,8 @@ arena_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
||||
}
|
||||
|
||||
void *
|
||||
arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t size, size_t alignment, bool zero, tcache_t *tcache)
|
||||
arena_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t size, size_t alignment, bool zero, tcache_t *tcache)
|
||||
{
|
||||
void *ret;
|
||||
size_t usize;
|
||||
@ -3116,7 +3140,8 @@ arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t copysize;
|
||||
|
||||
/* Try to avoid moving the allocation. */
|
||||
if (!arena_ralloc_no_move(tsdn, ptr, oldsize, usize, 0, zero))
|
||||
if (!arena_ralloc_no_move(tsdn, extent, ptr, oldsize, usize, 0,
|
||||
zero))
|
||||
return (ptr);
|
||||
|
||||
/*
|
||||
@ -3136,10 +3161,10 @@ arena_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
|
||||
copysize = (usize < oldsize) ? usize : oldsize;
|
||||
memcpy(ret, ptr, copysize);
|
||||
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
||||
} else {
|
||||
ret = huge_ralloc(tsdn, arena, ptr, oldsize, usize, alignment,
|
||||
zero, tcache);
|
||||
ret = huge_ralloc(tsdn, arena, extent, ptr, oldsize, usize,
|
||||
alignment, zero, tcache);
|
||||
}
|
||||
return (ret);
|
||||
}
|
||||
|
@ -180,6 +180,15 @@ chunk_deregister(const void *chunk, const extent_t *extent)
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
chunk_reregister(tsdn_t *tsdn, const void *chunk, const extent_t *extent)
|
||||
{
|
||||
bool err;
|
||||
|
||||
err = chunk_register(tsdn, chunk, extent);
|
||||
assert(!err);
|
||||
}
|
||||
|
||||
/*
|
||||
* Do first-best-fit chunk selection, i.e. select the lowest chunk that best
|
||||
* fits.
|
||||
|
10
src/ckh.c
10
src/ckh.c
@ -283,12 +283,12 @@ ckh_grow(tsdn_t *tsdn, ckh_t *ckh)
|
||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||
|
||||
if (!ckh_rebuild(ckh, tab)) {
|
||||
idalloctm(tsdn, tab, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(tab), tab, NULL, true, true);
|
||||
break;
|
||||
}
|
||||
|
||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||
idalloctm(tsdn, ckh->tab, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(ckh->tab), ckh->tab, NULL, true, true);
|
||||
ckh->tab = tab;
|
||||
ckh->lg_curbuckets = lg_prevbuckets;
|
||||
}
|
||||
@ -330,7 +330,7 @@ ckh_shrink(tsdn_t *tsdn, ckh_t *ckh)
|
||||
ckh->lg_curbuckets = lg_curcells - LG_CKH_BUCKET_CELLS;
|
||||
|
||||
if (!ckh_rebuild(ckh, tab)) {
|
||||
idalloctm(tsdn, tab, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(tab), tab, NULL, true, true);
|
||||
#ifdef CKH_COUNT
|
||||
ckh->nshrinks++;
|
||||
#endif
|
||||
@ -338,7 +338,7 @@ ckh_shrink(tsdn_t *tsdn, ckh_t *ckh)
|
||||
}
|
||||
|
||||
/* Rebuilding failed, so back out partially rebuilt table. */
|
||||
idalloctm(tsdn, ckh->tab, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(ckh->tab), ckh->tab, NULL, true, true);
|
||||
ckh->tab = tab;
|
||||
ckh->lg_curbuckets = lg_prevbuckets;
|
||||
#ifdef CKH_COUNT
|
||||
@ -421,7 +421,7 @@ ckh_delete(tsdn_t *tsdn, ckh_t *ckh)
|
||||
(unsigned long long)ckh->nrelocs);
|
||||
#endif
|
||||
|
||||
idalloctm(tsdn, ckh->tab, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(ckh->tab), ckh->tab, NULL, true, true);
|
||||
if (config_debug)
|
||||
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
||||
}
|
||||
|
149
src/huge.c
149
src/huge.c
@ -3,42 +3,6 @@
|
||||
|
||||
/******************************************************************************/
|
||||
|
||||
static extent_t *
|
||||
huge_extent_get(const void *ptr)
|
||||
{
|
||||
extent_t *extent;
|
||||
|
||||
extent = chunk_lookup(ptr, true);
|
||||
assert(!extent_achunk_get(extent));
|
||||
|
||||
return (extent);
|
||||
}
|
||||
|
||||
static bool
|
||||
huge_extent_set(tsdn_t *tsdn, const void *ptr, extent_t *extent)
|
||||
{
|
||||
|
||||
assert(extent_addr_get(extent) == ptr);
|
||||
assert(!extent_achunk_get(extent));
|
||||
return (chunk_register(tsdn, ptr, extent));
|
||||
}
|
||||
|
||||
static void
|
||||
huge_extent_reset(tsdn_t *tsdn, const void *ptr, extent_t *extent)
|
||||
{
|
||||
bool err;
|
||||
|
||||
err = huge_extent_set(tsdn, ptr, extent);
|
||||
assert(!err);
|
||||
}
|
||||
|
||||
static void
|
||||
huge_extent_unset(const void *ptr, const extent_t *extent)
|
||||
{
|
||||
|
||||
chunk_deregister(ptr, extent);
|
||||
}
|
||||
|
||||
void *
|
||||
huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero)
|
||||
{
|
||||
@ -81,15 +45,15 @@ huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||
arena = arena_choose(tsdn_tsd(tsdn), arena);
|
||||
if (unlikely(arena == NULL) || (ret = arena_chunk_alloc_huge(tsdn,
|
||||
arena, usize, alignment, &is_zeroed)) == NULL) {
|
||||
idalloctm(tsdn, extent, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(extent), extent, NULL, true, true);
|
||||
return (NULL);
|
||||
}
|
||||
|
||||
extent_init(extent, arena, ret, usize, is_zeroed, true);
|
||||
|
||||
if (huge_extent_set(tsdn, ret, extent)) {
|
||||
if (chunk_register(tsdn, ret, extent)) {
|
||||
arena_chunk_dalloc_huge(tsdn, arena, ret, usize);
|
||||
idalloctm(tsdn, extent, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(extent), extent, NULL, true, true);
|
||||
return (NULL);
|
||||
}
|
||||
|
||||
@ -133,11 +97,10 @@ huge_dalloc_junk_t *huge_dalloc_junk = JEMALLOC_N(huge_dalloc_junk_impl);
|
||||
#endif
|
||||
|
||||
static void
|
||||
huge_ralloc_no_move_similar(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
size_t usize_min, size_t usize_max, bool zero)
|
||||
huge_ralloc_no_move_similar(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t usize_min, size_t usize_max, bool zero)
|
||||
{
|
||||
size_t usize, usize_next;
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
chunk_hooks_t chunk_hooks = CHUNK_HOOKS_INITIALIZER;
|
||||
bool pre_zeroed, post_zeroed;
|
||||
@ -150,7 +113,6 @@ huge_ralloc_no_move_similar(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
if (oldsize == usize)
|
||||
return;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
arena = extent_arena_get(extent);
|
||||
pre_zeroed = extent_zeroed_get(extent);
|
||||
|
||||
@ -169,15 +131,15 @@ huge_ralloc_no_move_similar(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
} else
|
||||
post_zeroed = pre_zeroed;
|
||||
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
/* Update the size of the huge allocation. */
|
||||
assert(extent_size_get(extent) != usize);
|
||||
huge_extent_unset(ptr, extent);
|
||||
chunk_deregister(tsdn, ptr, extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
extent_size_set(extent, usize);
|
||||
huge_extent_reset(tsdn, ptr, extent);
|
||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||
chunk_reregister(tsdn, ptr, extent);
|
||||
/* Update zeroed. */
|
||||
extent_zeroed_set(extent, post_zeroed);
|
||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||
|
||||
arena_chunk_ralloc_huge_similar(tsdn, arena, ptr, oldsize, usize);
|
||||
|
||||
@ -196,16 +158,14 @@ huge_ralloc_no_move_similar(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
}
|
||||
|
||||
static bool
|
||||
huge_ralloc_no_move_shrink(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
size_t usize)
|
||||
huge_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t usize)
|
||||
{
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
chunk_hooks_t chunk_hooks;
|
||||
size_t cdiff;
|
||||
bool pre_zeroed, post_zeroed;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
arena = extent_arena_get(extent);
|
||||
pre_zeroed = extent_zeroed_get(extent);
|
||||
chunk_hooks = chunk_hooks_get(tsdn, arena);
|
||||
@ -233,14 +193,14 @@ huge_ralloc_no_move_shrink(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
} else
|
||||
post_zeroed = pre_zeroed;
|
||||
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
/* Update the size of the huge allocation. */
|
||||
huge_extent_unset(ptr, extent);
|
||||
chunk_deregister(ptr, extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
extent_size_set(extent, usize);
|
||||
huge_extent_reset(tsdn, ptr, extent);
|
||||
/* Update zeroed. */
|
||||
extent_zeroed_set(extent, post_zeroed);
|
||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||
chunk_reregister(tsdn, ptr, extent);
|
||||
|
||||
/* Zap the excess chunks. */
|
||||
arena_chunk_ralloc_huge_shrink(tsdn, arena, ptr, oldsize, usize);
|
||||
@ -249,14 +209,12 @@ huge_ralloc_no_move_shrink(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
}
|
||||
|
||||
static bool
|
||||
huge_ralloc_no_move_expand(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
size_t usize, bool zero)
|
||||
huge_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t usize, bool zero)
|
||||
{
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
bool is_zeroed_subchunk, is_zeroed_chunk;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
arena = extent_arena_get(extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
is_zeroed_subchunk = extent_zeroed_get(extent);
|
||||
@ -272,12 +230,12 @@ huge_ralloc_no_move_expand(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
&is_zeroed_chunk))
|
||||
return (true);
|
||||
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
/* Update the size of the huge allocation. */
|
||||
huge_extent_unset(ptr, extent);
|
||||
chunk_deregister(ptr, extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
extent_size_set(extent, usize);
|
||||
huge_extent_reset(tsdn, ptr, extent);
|
||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||
chunk_reregister(tsdn, ptr, extent);
|
||||
|
||||
if (zero || (config_fill && unlikely(opt_zero))) {
|
||||
if (!is_zeroed_subchunk) {
|
||||
@ -298,8 +256,8 @@ huge_ralloc_no_move_expand(tsdn_t *tsdn, void *ptr, size_t oldsize,
|
||||
}
|
||||
|
||||
bool
|
||||
huge_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
size_t usize_max, bool zero)
|
||||
huge_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||
size_t usize_min, size_t usize_max, bool zero)
|
||||
{
|
||||
|
||||
assert(s2u(oldsize) == oldsize);
|
||||
@ -312,16 +270,16 @@ huge_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
|
||||
if (CHUNK_CEILING(usize_max) > CHUNK_CEILING(oldsize)) {
|
||||
/* Attempt to expand the allocation in-place. */
|
||||
if (!huge_ralloc_no_move_expand(tsdn, ptr, oldsize, usize_max,
|
||||
zero)) {
|
||||
arena_decay_tick(tsdn, huge_aalloc(ptr));
|
||||
if (!huge_ralloc_no_move_expand(tsdn, extent, ptr, oldsize,
|
||||
usize_max, zero)) {
|
||||
arena_decay_tick(tsdn, extent_arena_get(extent));
|
||||
return (false);
|
||||
}
|
||||
/* Try again, this time with usize_min. */
|
||||
if (usize_min < usize_max && CHUNK_CEILING(usize_min) >
|
||||
CHUNK_CEILING(oldsize) && huge_ralloc_no_move_expand(tsdn,
|
||||
ptr, oldsize, usize_min, zero)) {
|
||||
arena_decay_tick(tsdn, huge_aalloc(ptr));
|
||||
extent, ptr, oldsize, usize_min, zero)) {
|
||||
arena_decay_tick(tsdn, extent_arena_get(extent));
|
||||
return (false);
|
||||
}
|
||||
}
|
||||
@ -332,17 +290,17 @@ huge_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t usize_min,
|
||||
*/
|
||||
if (CHUNK_CEILING(oldsize) >= CHUNK_CEILING(usize_min)
|
||||
&& CHUNK_CEILING(oldsize) <= CHUNK_CEILING(usize_max)) {
|
||||
huge_ralloc_no_move_similar(tsdn, ptr, oldsize, usize_min,
|
||||
usize_max, zero);
|
||||
arena_decay_tick(tsdn, huge_aalloc(ptr));
|
||||
huge_ralloc_no_move_similar(tsdn, extent, ptr, oldsize,
|
||||
usize_min, usize_max, zero);
|
||||
arena_decay_tick(tsdn, extent_arena_get(extent));
|
||||
return (false);
|
||||
}
|
||||
|
||||
/* Attempt to shrink the allocation in-place. */
|
||||
if (CHUNK_CEILING(oldsize) > CHUNK_CEILING(usize_max)) {
|
||||
if (!huge_ralloc_no_move_shrink(tsdn, ptr, oldsize,
|
||||
if (!huge_ralloc_no_move_shrink(tsdn, extent, ptr, oldsize,
|
||||
usize_max)) {
|
||||
arena_decay_tick(tsdn, huge_aalloc(ptr));
|
||||
arena_decay_tick(tsdn, extent_arena_get(extent));
|
||||
return (false);
|
||||
}
|
||||
}
|
||||
@ -360,8 +318,8 @@ huge_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
||||
}
|
||||
|
||||
void *
|
||||
huge_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
size_t usize, size_t alignment, bool zero, tcache_t *tcache)
|
||||
huge_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, void *ptr,
|
||||
size_t oldsize, size_t usize, size_t alignment, bool zero, tcache_t *tcache)
|
||||
{
|
||||
void *ret;
|
||||
size_t copysize;
|
||||
@ -370,7 +328,8 @@ huge_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
assert(usize > 0 && usize <= HUGE_MAXCLASS);
|
||||
|
||||
/* Try to avoid moving the allocation. */
|
||||
if (!huge_ralloc_no_move(tsdn, ptr, oldsize, usize, usize, zero))
|
||||
if (!huge_ralloc_no_move(tsdn, extent, ptr, oldsize, usize, usize,
|
||||
zero))
|
||||
return (ptr);
|
||||
|
||||
/*
|
||||
@ -384,19 +343,17 @@ huge_ralloc(tsdn_t *tsdn, arena_t *arena, void *ptr, size_t oldsize,
|
||||
|
||||
copysize = (usize < oldsize) ? usize : oldsize;
|
||||
memcpy(ret, ptr, copysize);
|
||||
isdalloct(tsdn, ptr, oldsize, tcache, true);
|
||||
isdalloct(tsdn, extent, ptr, oldsize, tcache, true);
|
||||
return (ret);
|
||||
}
|
||||
|
||||
void
|
||||
huge_dalloc(tsdn_t *tsdn, void *ptr)
|
||||
huge_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr)
|
||||
{
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
arena = extent_arena_get(extent);
|
||||
huge_extent_unset(ptr, extent);
|
||||
chunk_deregister(ptr, extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
ql_remove(&arena->huge, extent, ql_link);
|
||||
malloc_mutex_unlock(tsdn, &arena->huge_mtx);
|
||||
@ -405,26 +362,17 @@ huge_dalloc(tsdn_t *tsdn, void *ptr)
|
||||
extent_size_get(extent));
|
||||
arena_chunk_dalloc_huge(tsdn, extent_arena_get(extent),
|
||||
extent_addr_get(extent), extent_size_get(extent));
|
||||
idalloctm(tsdn, extent, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(extent), extent, NULL, true, true);
|
||||
|
||||
arena_decay_tick(tsdn, arena);
|
||||
}
|
||||
|
||||
arena_t *
|
||||
huge_aalloc(const void *ptr)
|
||||
{
|
||||
|
||||
return (extent_arena_get(huge_extent_get(ptr)));
|
||||
}
|
||||
|
||||
size_t
|
||||
huge_salloc(tsdn_t *tsdn, const void *ptr)
|
||||
huge_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
||||
{
|
||||
size_t size;
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
arena = extent_arena_get(extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
size = extent_size_get(extent);
|
||||
@ -434,13 +382,13 @@ huge_salloc(tsdn_t *tsdn, const void *ptr)
|
||||
}
|
||||
|
||||
prof_tctx_t *
|
||||
huge_prof_tctx_get(tsdn_t *tsdn, const void *ptr)
|
||||
huge_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
||||
{
|
||||
prof_tctx_t *tctx;
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
assert(extent == iealloc(ptr));
|
||||
|
||||
arena = extent_arena_get(extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
tctx = extent_prof_tctx_get(extent);
|
||||
@ -450,12 +398,13 @@ huge_prof_tctx_get(tsdn_t *tsdn, const void *ptr)
|
||||
}
|
||||
|
||||
void
|
||||
huge_prof_tctx_set(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx)
|
||||
huge_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
prof_tctx_t *tctx)
|
||||
{
|
||||
extent_t *extent;
|
||||
arena_t *arena;
|
||||
|
||||
extent = huge_extent_get(ptr);
|
||||
assert(extent == iealloc(ptr));
|
||||
|
||||
arena = extent_arena_get(extent);
|
||||
malloc_mutex_lock(tsdn, &arena->huge_mtx);
|
||||
extent_prof_tctx_set(extent, tctx);
|
||||
@ -463,8 +412,8 @@ huge_prof_tctx_set(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx)
|
||||
}
|
||||
|
||||
void
|
||||
huge_prof_tctx_reset(tsdn_t *tsdn, const void *ptr)
|
||||
huge_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr)
|
||||
{
|
||||
|
||||
huge_prof_tctx_set(tsdn, ptr, (prof_tctx_t *)(uintptr_t)1U);
|
||||
huge_prof_tctx_set(tsdn, extent, ptr, (prof_tctx_t *)(uintptr_t)1U);
|
||||
}
|
||||
|
210
src/jemalloc.c
210
src/jemalloc.c
@ -308,10 +308,10 @@ a0ialloc(size_t size, bool zero, bool is_metadata)
|
||||
}
|
||||
|
||||
static void
|
||||
a0idalloc(void *ptr, bool is_metadata)
|
||||
a0idalloc(extent_t *extent, void *ptr, bool is_metadata)
|
||||
{
|
||||
|
||||
idalloctm(TSDN_NULL, ptr, false, is_metadata, true);
|
||||
idalloctm(TSDN_NULL, extent, ptr, false, is_metadata, true);
|
||||
}
|
||||
|
||||
void *
|
||||
@ -325,7 +325,7 @@ void
|
||||
a0dalloc(void *ptr)
|
||||
{
|
||||
|
||||
a0idalloc(ptr, true);
|
||||
a0idalloc(iealloc(ptr), ptr, true);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -365,7 +365,7 @@ bootstrap_free(void *ptr)
|
||||
if (unlikely(ptr == NULL))
|
||||
return;
|
||||
|
||||
a0idalloc(ptr, false);
|
||||
a0idalloc(iealloc(ptr), ptr, false);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -1401,7 +1401,7 @@ ialloc_prof_sample(tsd_t *tsd, size_t usize, szind_t ind, bool zero,
|
||||
p = ialloc(tsd, LARGE_MINCLASS, ind_large, zero, slow_path);
|
||||
if (p == NULL)
|
||||
return (NULL);
|
||||
arena_prof_promoted(tsd_tsdn(tsd), p, usize);
|
||||
arena_prof_promoted(tsd_tsdn(tsd), iealloc(p), p, usize);
|
||||
} else
|
||||
p = ialloc(tsd, usize, ind, zero, slow_path);
|
||||
|
||||
@ -1423,7 +1423,7 @@ ialloc_prof(tsd_t *tsd, size_t usize, szind_t ind, bool zero, bool slow_path)
|
||||
prof_alloc_rollback(tsd, tctx, true);
|
||||
return (NULL);
|
||||
}
|
||||
prof_malloc(tsd_tsdn(tsd), p, usize, tctx);
|
||||
prof_malloc(tsd_tsdn(tsd), iealloc(p), p, usize, tctx);
|
||||
|
||||
return (p);
|
||||
}
|
||||
@ -1482,7 +1482,7 @@ ialloc_post_check(void *ret, tsdn_t *tsdn, size_t usize, const char *func,
|
||||
set_errno(ENOMEM);
|
||||
}
|
||||
if (config_stats && likely(ret != NULL)) {
|
||||
assert(usize == isalloc(tsdn, ret, config_prof));
|
||||
assert(usize == isalloc(tsdn, iealloc(ret), ret, config_prof));
|
||||
*tsd_thread_allocatedp_get(tsdn_tsd(tsdn)) += usize;
|
||||
}
|
||||
witness_assert_lockless(tsdn);
|
||||
@ -1525,7 +1525,7 @@ imemalign_prof_sample(tsd_t *tsd, size_t alignment, size_t usize,
|
||||
p = ipalloc(tsd, LARGE_MINCLASS, alignment, false);
|
||||
if (p == NULL)
|
||||
return (NULL);
|
||||
arena_prof_promoted(tsd_tsdn(tsd), p, usize);
|
||||
arena_prof_promoted(tsd_tsdn(tsd), iealloc(p), p, usize);
|
||||
} else
|
||||
p = ipalloc(tsd, usize, alignment, false);
|
||||
|
||||
@ -1547,7 +1547,7 @@ imemalign_prof(tsd_t *tsd, size_t alignment, size_t usize)
|
||||
prof_alloc_rollback(tsd, tctx, true);
|
||||
return (NULL);
|
||||
}
|
||||
prof_malloc(tsd_tsdn(tsd), p, usize, tctx);
|
||||
prof_malloc(tsd_tsdn(tsd), iealloc(p), p, usize, tctx);
|
||||
|
||||
return (p);
|
||||
}
|
||||
@ -1604,7 +1604,8 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
|
||||
ret = 0;
|
||||
label_return:
|
||||
if (config_stats && likely(result != NULL)) {
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), result, config_prof));
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), iealloc(result), result,
|
||||
config_prof));
|
||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||
}
|
||||
UTRACE(0, size, result);
|
||||
@ -1683,44 +1684,49 @@ je_calloc(size_t num, size_t size)
|
||||
}
|
||||
|
||||
static void *
|
||||
irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize,
|
||||
prof_tctx_t *tctx)
|
||||
irealloc_prof_sample(tsd_t *tsd, extent_t *extent, void *old_ptr,
|
||||
size_t old_usize, size_t usize, prof_tctx_t *tctx)
|
||||
{
|
||||
void *p;
|
||||
|
||||
if (tctx == NULL)
|
||||
return (NULL);
|
||||
if (usize <= SMALL_MAXCLASS) {
|
||||
p = iralloc(tsd, old_ptr, old_usize, LARGE_MINCLASS, 0, false);
|
||||
p = iralloc(tsd, extent, old_ptr, old_usize, LARGE_MINCLASS, 0,
|
||||
false);
|
||||
if (p == NULL)
|
||||
return (NULL);
|
||||
arena_prof_promoted(tsd_tsdn(tsd), p, usize);
|
||||
arena_prof_promoted(tsd_tsdn(tsd), iealloc(p), p, usize);
|
||||
} else
|
||||
p = iralloc(tsd, old_ptr, old_usize, usize, 0, false);
|
||||
p = iralloc(tsd, extent, old_ptr, old_usize, usize, 0, false);
|
||||
|
||||
return (p);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C void *
|
||||
irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize)
|
||||
irealloc_prof(tsd_t *tsd, extent_t *extent, void *old_ptr, size_t old_usize,
|
||||
size_t usize)
|
||||
{
|
||||
void *p;
|
||||
extent_t *e;
|
||||
bool prof_active;
|
||||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
prof_active = prof_active_get_unlocked();
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr);
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), extent, old_ptr);
|
||||
tctx = prof_alloc_prep(tsd, usize, prof_active, true);
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U))
|
||||
p = irealloc_prof_sample(tsd, old_ptr, old_usize, usize, tctx);
|
||||
else
|
||||
p = iralloc(tsd, old_ptr, old_usize, usize, 0, false);
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
p = irealloc_prof_sample(tsd, extent, old_ptr, old_usize, usize,
|
||||
tctx);
|
||||
} else
|
||||
p = iralloc(tsd, extent, old_ptr, old_usize, usize, 0, false);
|
||||
if (unlikely(p == NULL)) {
|
||||
prof_alloc_rollback(tsd, tctx, true);
|
||||
return (NULL);
|
||||
}
|
||||
prof_realloc(tsd, p, usize, tctx, prof_active, true, old_ptr, old_usize,
|
||||
old_tctx);
|
||||
e = (p == old_ptr) ? extent : iealloc(p);
|
||||
prof_realloc(tsd, e, p, usize, tctx, prof_active, true,
|
||||
old_ptr, old_usize, old_tctx);
|
||||
|
||||
return (p);
|
||||
}
|
||||
@ -1728,6 +1734,7 @@ irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize)
|
||||
JEMALLOC_INLINE_C void
|
||||
ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path)
|
||||
{
|
||||
extent_t *extent;
|
||||
size_t usize;
|
||||
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
@ -1735,22 +1742,24 @@ ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path)
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
extent = iealloc(ptr);
|
||||
if (config_prof && opt_prof) {
|
||||
usize = isalloc(tsd_tsdn(tsd), ptr, config_prof);
|
||||
prof_free(tsd, ptr, usize);
|
||||
usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||
prof_free(tsd, extent, ptr, usize);
|
||||
} else if (config_stats)
|
||||
usize = isalloc(tsd_tsdn(tsd), ptr, config_prof);
|
||||
usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||
if (config_stats)
|
||||
*tsd_thread_deallocatedp_get(tsd) += usize;
|
||||
|
||||
if (likely(!slow_path))
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, false, false);
|
||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache, false, false);
|
||||
else
|
||||
idalloctm(tsd_tsdn(tsd), ptr, tcache, false, true);
|
||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache, false, true);
|
||||
}
|
||||
|
||||
JEMALLOC_INLINE_C void
|
||||
isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path)
|
||||
isfree(tsd_t *tsd, extent_t *extent, void *ptr, size_t usize, tcache_t *tcache,
|
||||
bool slow_path)
|
||||
{
|
||||
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
@ -1759,14 +1768,14 @@ isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path)
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
|
||||
if (config_prof && opt_prof)
|
||||
prof_free(tsd, ptr, usize);
|
||||
prof_free(tsd, extent, ptr, usize);
|
||||
if (config_stats)
|
||||
*tsd_thread_deallocatedp_get(tsd) += usize;
|
||||
|
||||
if (likely(!slow_path))
|
||||
isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, false);
|
||||
isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, false);
|
||||
else
|
||||
isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, true);
|
||||
isdalloct(tsd_tsdn(tsd), extent, ptr, usize, tcache, true);
|
||||
}
|
||||
|
||||
JEMALLOC_EXPORT JEMALLOC_ALLOCATOR JEMALLOC_RESTRICT_RETURN
|
||||
@ -1794,22 +1803,26 @@ je_realloc(void *ptr, size_t size)
|
||||
|
||||
if (likely(ptr != NULL)) {
|
||||
tsd_t *tsd;
|
||||
extent_t *extent;
|
||||
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
tsd = tsd_fetch();
|
||||
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
|
||||
old_usize = isalloc(tsd_tsdn(tsd), ptr, config_prof);
|
||||
extent = iealloc(ptr);
|
||||
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
usize = s2u(size);
|
||||
ret = unlikely(usize == 0 || usize > HUGE_MAXCLASS) ?
|
||||
NULL : irealloc_prof(tsd, ptr, old_usize, usize);
|
||||
NULL : irealloc_prof(tsd, extent, ptr, old_usize,
|
||||
usize);
|
||||
} else {
|
||||
if (config_stats)
|
||||
usize = s2u(size);
|
||||
ret = iralloc(tsd, ptr, old_usize, size, 0, false);
|
||||
ret = iralloc(tsd, extent, ptr, old_usize, size, 0,
|
||||
false);
|
||||
}
|
||||
tsdn = tsd_tsdn(tsd);
|
||||
} else {
|
||||
@ -1832,7 +1845,7 @@ je_realloc(void *ptr, size_t size)
|
||||
if (config_stats && likely(ret != NULL)) {
|
||||
tsd_t *tsd;
|
||||
|
||||
assert(usize == isalloc(tsdn, ret, config_prof));
|
||||
assert(usize == isalloc(tsdn, iealloc(ret), ret, config_prof));
|
||||
tsd = tsdn_tsd(tsdn);
|
||||
*tsd_thread_allocatedp_get(tsd) += usize;
|
||||
*tsd_thread_deallocatedp_get(tsd) += old_usize;
|
||||
@ -1986,11 +1999,10 @@ imallocx_prof_sample(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||
tcache, arena, slow_path);
|
||||
if (p == NULL)
|
||||
return (NULL);
|
||||
arena_prof_promoted(tsdn, p, usize);
|
||||
} else {
|
||||
arena_prof_promoted(tsdn, iealloc(p), p, usize);
|
||||
} else
|
||||
p = imallocx_flags(tsdn, usize, alignment, zero, tcache, arena,
|
||||
slow_path);
|
||||
}
|
||||
|
||||
return (p);
|
||||
}
|
||||
@ -2021,7 +2033,7 @@ imallocx_prof(tsd_t *tsd, size_t size, int flags, size_t *usize, bool slow_path)
|
||||
prof_alloc_rollback(tsd, tctx, true);
|
||||
return (NULL);
|
||||
}
|
||||
prof_malloc(tsd_tsdn(tsd), p, *usize, tctx);
|
||||
prof_malloc(tsd_tsdn(tsd), iealloc(p), p, *usize, tctx);
|
||||
|
||||
assert(alignment == 0 || ((uintptr_t)p & (alignment - 1)) == ZU(0));
|
||||
return (p);
|
||||
@ -2109,46 +2121,47 @@ je_mallocx(size_t size, int flags)
|
||||
}
|
||||
|
||||
static void *
|
||||
irallocx_prof_sample(tsdn_t *tsdn, void *old_ptr, size_t old_usize,
|
||||
size_t usize, size_t alignment, bool zero, tcache_t *tcache, arena_t *arena,
|
||||
prof_tctx_t *tctx)
|
||||
irallocx_prof_sample(tsdn_t *tsdn, extent_t *extent, void *old_ptr,
|
||||
size_t old_usize, size_t usize, size_t alignment, bool zero,
|
||||
tcache_t *tcache, arena_t *arena, prof_tctx_t *tctx)
|
||||
{
|
||||
void *p;
|
||||
|
||||
if (tctx == NULL)
|
||||
return (NULL);
|
||||
if (usize <= SMALL_MAXCLASS) {
|
||||
p = iralloct(tsdn, old_ptr, old_usize, LARGE_MINCLASS,
|
||||
p = iralloct(tsdn, extent, old_ptr, old_usize, LARGE_MINCLASS,
|
||||
alignment, zero, tcache, arena);
|
||||
if (p == NULL)
|
||||
return (NULL);
|
||||
arena_prof_promoted(tsdn, p, usize);
|
||||
arena_prof_promoted(tsdn, iealloc(p), p, usize);
|
||||
} else {
|
||||
p = iralloct(tsdn, old_ptr, old_usize, usize, alignment, zero,
|
||||
tcache, arena);
|
||||
p = iralloct(tsdn, extent, old_ptr, old_usize, usize, alignment,
|
||||
zero, tcache, arena);
|
||||
}
|
||||
|
||||
return (p);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C void *
|
||||
irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
|
||||
size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
|
||||
irallocx_prof(tsd_t *tsd, extent_t *extent, void *old_ptr, size_t old_usize,
|
||||
size_t size, size_t alignment, size_t *usize, bool zero, tcache_t *tcache,
|
||||
arena_t *arena)
|
||||
{
|
||||
void *p;
|
||||
extent_t *e;
|
||||
bool prof_active;
|
||||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
prof_active = prof_active_get_unlocked();
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr);
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), extent, old_ptr);
|
||||
tctx = prof_alloc_prep(tsd, *usize, prof_active, true);
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize,
|
||||
*usize, alignment, zero, tcache, arena, tctx);
|
||||
p = irallocx_prof_sample(tsd_tsdn(tsd), extent, old_ptr,
|
||||
old_usize, *usize, alignment, zero, tcache, arena, tctx);
|
||||
} else {
|
||||
p = iralloct(tsd_tsdn(tsd), old_ptr, old_usize, size, alignment,
|
||||
zero, tcache, arena);
|
||||
p = iralloct(tsd_tsdn(tsd), extent, old_ptr, old_usize, size,
|
||||
alignment, zero, tcache, arena);
|
||||
}
|
||||
if (unlikely(p == NULL)) {
|
||||
prof_alloc_rollback(tsd, tctx, true);
|
||||
@ -2164,9 +2177,11 @@ irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size,
|
||||
* be the same as the current usize because of in-place large
|
||||
* reallocation. Therefore, query the actual value of usize.
|
||||
*/
|
||||
*usize = isalloc(tsd_tsdn(tsd), p, config_prof);
|
||||
}
|
||||
prof_realloc(tsd, p, *usize, tctx, prof_active, true, old_ptr,
|
||||
e = extent;
|
||||
*usize = isalloc(tsd_tsdn(tsd), e, p, config_prof);
|
||||
} else
|
||||
e = iealloc(p);
|
||||
prof_realloc(tsd, e, p, *usize, tctx, prof_active, true, old_ptr,
|
||||
old_usize, old_tctx);
|
||||
|
||||
return (p);
|
||||
@ -2179,6 +2194,7 @@ je_rallocx(void *ptr, size_t size, int flags)
|
||||
{
|
||||
void *p;
|
||||
tsd_t *tsd;
|
||||
extent_t *extent;
|
||||
size_t usize;
|
||||
size_t old_usize;
|
||||
size_t alignment = MALLOCX_ALIGN_GET(flags);
|
||||
@ -2191,6 +2207,7 @@ je_rallocx(void *ptr, size_t size, int flags)
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
tsd = tsd_fetch();
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
extent = iealloc(ptr);
|
||||
|
||||
if (unlikely((flags & MALLOCX_ARENA_MASK) != 0)) {
|
||||
unsigned arena_ind = MALLOCX_ARENA_GET(flags);
|
||||
@ -2208,23 +2225,25 @@ je_rallocx(void *ptr, size_t size, int flags)
|
||||
} else
|
||||
tcache = tcache_get(tsd, true);
|
||||
|
||||
old_usize = isalloc(tsd_tsdn(tsd), ptr, config_prof);
|
||||
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
usize = (alignment == 0) ? s2u(size) : sa2u(size, alignment);
|
||||
if (unlikely(usize == 0 || usize > HUGE_MAXCLASS))
|
||||
goto label_oom;
|
||||
p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize,
|
||||
zero, tcache, arena);
|
||||
p = irallocx_prof(tsd, extent, ptr, old_usize, size, alignment,
|
||||
&usize, zero, tcache, arena);
|
||||
if (unlikely(p == NULL))
|
||||
goto label_oom;
|
||||
} else {
|
||||
p = iralloct(tsd_tsdn(tsd), ptr, old_usize, size, alignment,
|
||||
zero, tcache, arena);
|
||||
p = iralloct(tsd_tsdn(tsd), extent, ptr, old_usize, size,
|
||||
alignment, zero, tcache, arena);
|
||||
if (unlikely(p == NULL))
|
||||
goto label_oom;
|
||||
if (config_stats)
|
||||
usize = isalloc(tsd_tsdn(tsd), p, config_prof);
|
||||
if (config_stats) {
|
||||
usize = isalloc(tsd_tsdn(tsd), iealloc(p), p,
|
||||
config_prof);
|
||||
}
|
||||
}
|
||||
assert(alignment == 0 || ((uintptr_t)p & (alignment - 1)) == ZU(0));
|
||||
|
||||
@ -2246,42 +2265,43 @@ label_oom:
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C size_t
|
||||
ixallocx_helper(tsdn_t *tsdn, void *ptr, size_t old_usize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero)
|
||||
ixallocx_helper(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t old_usize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero)
|
||||
{
|
||||
size_t usize;
|
||||
|
||||
if (ixalloc(tsdn, ptr, old_usize, size, extra, alignment, zero))
|
||||
if (ixalloc(tsdn, extent, ptr, old_usize, size, extra, alignment, zero))
|
||||
return (old_usize);
|
||||
usize = isalloc(tsdn, ptr, config_prof);
|
||||
usize = isalloc(tsdn, extent, ptr, config_prof);
|
||||
|
||||
return (usize);
|
||||
}
|
||||
|
||||
static size_t
|
||||
ixallocx_prof_sample(tsdn_t *tsdn, void *ptr, size_t old_usize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero, prof_tctx_t *tctx)
|
||||
ixallocx_prof_sample(tsdn_t *tsdn, extent_t *extent, void *ptr,
|
||||
size_t old_usize, size_t size, size_t extra, size_t alignment, bool zero,
|
||||
prof_tctx_t *tctx)
|
||||
{
|
||||
size_t usize;
|
||||
|
||||
if (tctx == NULL)
|
||||
return (old_usize);
|
||||
usize = ixallocx_helper(tsdn, ptr, old_usize, size, extra, alignment,
|
||||
zero);
|
||||
usize = ixallocx_helper(tsdn, extent, ptr, old_usize, size, extra,
|
||||
alignment, zero);
|
||||
|
||||
return (usize);
|
||||
}
|
||||
|
||||
JEMALLOC_ALWAYS_INLINE_C size_t
|
||||
ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
||||
size_t extra, size_t alignment, bool zero)
|
||||
ixallocx_prof(tsd_t *tsd, extent_t *extent, void *ptr, size_t old_usize,
|
||||
size_t size, size_t extra, size_t alignment, bool zero)
|
||||
{
|
||||
size_t usize_max, usize;
|
||||
bool prof_active;
|
||||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
prof_active = prof_active_get_unlocked();
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), ptr);
|
||||
old_tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
|
||||
/*
|
||||
* usize isn't knowable before ixalloc() returns when extra is non-zero.
|
||||
* Therefore, compute its maximum possible value and use that in
|
||||
@ -2306,18 +2326,18 @@ ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size,
|
||||
tctx = prof_alloc_prep(tsd, usize_max, prof_active, false);
|
||||
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
usize = ixallocx_prof_sample(tsd_tsdn(tsd), ptr, old_usize,
|
||||
size, extra, alignment, zero, tctx);
|
||||
usize = ixallocx_prof_sample(tsd_tsdn(tsd), extent, ptr,
|
||||
old_usize, size, extra, alignment, zero, tctx);
|
||||
} else {
|
||||
usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
|
||||
extra, alignment, zero);
|
||||
usize = ixallocx_helper(tsd_tsdn(tsd), extent, ptr, old_usize,
|
||||
size, extra, alignment, zero);
|
||||
}
|
||||
if (usize == old_usize) {
|
||||
prof_alloc_rollback(tsd, tctx, false);
|
||||
return (usize);
|
||||
}
|
||||
prof_realloc(tsd, ptr, usize, tctx, prof_active, false, ptr, old_usize,
|
||||
old_tctx);
|
||||
prof_realloc(tsd, extent, ptr, usize, tctx, prof_active, false, ptr,
|
||||
old_usize, old_tctx);
|
||||
|
||||
return (usize);
|
||||
}
|
||||
@ -2326,6 +2346,7 @@ JEMALLOC_EXPORT size_t JEMALLOC_NOTHROW
|
||||
je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
||||
{
|
||||
tsd_t *tsd;
|
||||
extent_t *extent;
|
||||
size_t usize, old_usize;
|
||||
size_t alignment = MALLOCX_ALIGN_GET(flags);
|
||||
bool zero = flags & MALLOCX_ZERO;
|
||||
@ -2336,8 +2357,9 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
tsd = tsd_fetch();
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
extent = iealloc(ptr);
|
||||
|
||||
old_usize = isalloc(tsd_tsdn(tsd), ptr, config_prof);
|
||||
old_usize = isalloc(tsd_tsdn(tsd), extent, ptr, config_prof);
|
||||
|
||||
/*
|
||||
* The API explicitly absolves itself of protecting against (size +
|
||||
@ -2356,11 +2378,11 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
|
||||
extra = HUGE_MAXCLASS - size;
|
||||
|
||||
if (config_prof && opt_prof) {
|
||||
usize = ixallocx_prof(tsd, ptr, old_usize, size, extra,
|
||||
usize = ixallocx_prof(tsd, extent, ptr, old_usize, size, extra,
|
||||
alignment, zero);
|
||||
} else {
|
||||
usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
|
||||
extra, alignment, zero);
|
||||
usize = ixallocx_helper(tsd_tsdn(tsd), extent, ptr, old_usize,
|
||||
size, extra, alignment, zero);
|
||||
}
|
||||
if (unlikely(usize == old_usize))
|
||||
goto label_not_resized;
|
||||
@ -2390,7 +2412,7 @@ je_sallocx(const void *ptr, int flags)
|
||||
if (config_ivsalloc)
|
||||
usize = ivsalloc(tsdn, ptr, config_prof);
|
||||
else
|
||||
usize = isalloc(tsdn, ptr, config_prof);
|
||||
usize = isalloc(tsdn, iealloc(ptr), ptr, config_prof);
|
||||
|
||||
witness_assert_lockless(tsdn);
|
||||
return (usize);
|
||||
@ -2442,14 +2464,16 @@ JEMALLOC_EXPORT void JEMALLOC_NOTHROW
|
||||
je_sdallocx(void *ptr, size_t size, int flags)
|
||||
{
|
||||
tsd_t *tsd;
|
||||
tcache_t *tcache;
|
||||
extent_t *extent;
|
||||
size_t usize;
|
||||
tcache_t *tcache;
|
||||
|
||||
assert(ptr != NULL);
|
||||
assert(malloc_initialized() || IS_INITIALIZER);
|
||||
tsd = tsd_fetch();
|
||||
extent = iealloc(ptr);
|
||||
usize = inallocx(tsd_tsdn(tsd), size, flags);
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), ptr, config_prof));
|
||||
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr, config_prof));
|
||||
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
if (unlikely((flags & MALLOCX_TCACHE_MASK) != 0)) {
|
||||
@ -2462,9 +2486,9 @@ je_sdallocx(void *ptr, size_t size, int flags)
|
||||
|
||||
UTRACE(ptr, 0, 0);
|
||||
if (likely(!malloc_slow))
|
||||
isfree(tsd, ptr, usize, tcache, false);
|
||||
isfree(tsd, extent, ptr, usize, tcache, false);
|
||||
else
|
||||
isfree(tsd, ptr, usize, tcache, true);
|
||||
isfree(tsd, extent, ptr, usize, tcache, true);
|
||||
witness_assert_lockless(tsd_tsdn(tsd));
|
||||
}
|
||||
|
||||
@ -2566,8 +2590,10 @@ je_malloc_usable_size(JEMALLOC_USABLE_SIZE_CONST void *ptr)
|
||||
|
||||
if (config_ivsalloc)
|
||||
ret = ivsalloc(tsdn, ptr, config_prof);
|
||||
else
|
||||
ret = (ptr == NULL) ? 0 : isalloc(tsdn, ptr, config_prof);
|
||||
else {
|
||||
ret = (ptr == NULL) ? 0 : isalloc(tsdn, iealloc(ptr), ptr,
|
||||
config_prof);
|
||||
}
|
||||
|
||||
witness_assert_lockless(tsdn);
|
||||
return (ret);
|
||||
|
32
src/prof.c
32
src/prof.c
@ -223,11 +223,11 @@ prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated)
|
||||
}
|
||||
|
||||
void
|
||||
prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize,
|
||||
prof_tctx_t *tctx)
|
||||
prof_malloc_sample_object(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||
size_t usize, prof_tctx_t *tctx)
|
||||
{
|
||||
|
||||
prof_tctx_set(tsdn, ptr, usize, tctx);
|
||||
prof_tctx_set(tsdn, extent, ptr, usize, tctx);
|
||||
|
||||
malloc_mutex_lock(tsdn, tctx->tdata->lock);
|
||||
tctx->cnts.curobjs++;
|
||||
@ -596,7 +596,7 @@ prof_gctx_try_destroy(tsd_t *tsd, prof_tdata_t *tdata_self, prof_gctx_t *gctx,
|
||||
prof_leave(tsd, tdata_self);
|
||||
/* Destroy gctx. */
|
||||
malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock);
|
||||
idalloctm(tsd_tsdn(tsd), gctx, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(gctx), gctx, NULL, true, true);
|
||||
} else {
|
||||
/*
|
||||
* Compensate for increment in prof_tctx_destroy() or
|
||||
@ -707,7 +707,7 @@ prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx)
|
||||
prof_tdata_destroy(tsd_tsdn(tsd), tdata, false);
|
||||
|
||||
if (destroy_tctx)
|
||||
idalloctm(tsd_tsdn(tsd), tctx, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tctx), tctx, NULL, true, true);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -736,7 +736,8 @@ prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata,
|
||||
if (ckh_insert(tsd_tsdn(tsd), &bt2gctx, btkey.v, gctx.v)) {
|
||||
/* OOM. */
|
||||
prof_leave(tsd, tdata);
|
||||
idalloctm(tsd_tsdn(tsd), gctx.v, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(gctx.v), gctx.v, NULL,
|
||||
true, true);
|
||||
return (true);
|
||||
}
|
||||
new_gctx = true;
|
||||
@ -816,7 +817,8 @@ prof_lookup(tsd_t *tsd, prof_bt_t *bt)
|
||||
if (error) {
|
||||
if (new_gctx)
|
||||
prof_gctx_try_destroy(tsd, tdata, gctx, tdata);
|
||||
idalloctm(tsd_tsdn(tsd), ret.v, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(ret.v), ret.v, NULL,
|
||||
true, true);
|
||||
return (NULL);
|
||||
}
|
||||
malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock);
|
||||
@ -1238,7 +1240,8 @@ prof_gctx_finish(tsd_t *tsd, prof_gctx_tree_t *gctxs)
|
||||
to_destroy);
|
||||
tctx_tree_remove(&gctx->tctxs,
|
||||
to_destroy);
|
||||
idalloctm(tsd_tsdn(tsd), to_destroy,
|
||||
idalloctm(tsd_tsdn(tsd),
|
||||
iealloc(to_destroy), to_destroy,
|
||||
NULL, true, true);
|
||||
} else
|
||||
next = NULL;
|
||||
@ -1815,7 +1818,7 @@ prof_tdata_init_impl(tsdn_t *tsdn, uint64_t thr_uid, uint64_t thr_discrim,
|
||||
|
||||
if (ckh_new(tsdn, &tdata->bt2tctx, PROF_CKH_MINITEMS,
|
||||
prof_bt_hash, prof_bt_keycomp)) {
|
||||
idalloctm(tsdn, tdata, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(tdata), tdata, NULL, true, true);
|
||||
return (NULL);
|
||||
}
|
||||
|
||||
@ -1878,10 +1881,12 @@ prof_tdata_destroy_locked(tsdn_t *tsdn, prof_tdata_t *tdata,
|
||||
|
||||
assert(prof_tdata_should_destroy_unlocked(tdata, even_if_attached));
|
||||
|
||||
if (tdata->thread_name != NULL)
|
||||
idalloctm(tsdn, tdata->thread_name, NULL, true, true);
|
||||
if (tdata->thread_name != NULL) {
|
||||
idalloctm(tsdn, iealloc(tdata->thread_name), tdata->thread_name,
|
||||
NULL, true, true);
|
||||
}
|
||||
ckh_delete(tsdn, &tdata->bt2tctx);
|
||||
idalloctm(tsdn, tdata, NULL, true, true);
|
||||
idalloctm(tsdn, iealloc(tdata), tdata, NULL, true, true);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -2075,7 +2080,8 @@ prof_thread_name_set(tsd_t *tsd, const char *thread_name)
|
||||
return (EAGAIN);
|
||||
|
||||
if (tdata->thread_name != NULL) {
|
||||
idalloctm(tsd_tsdn(tsd), tdata->thread_name, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tdata->thread_name),
|
||||
tdata->thread_name, NULL, true, true);
|
||||
tdata->thread_name = NULL;
|
||||
}
|
||||
if (strlen(s) > 0)
|
||||
|
31
src/tcache.c
31
src/tcache.c
@ -27,7 +27,7 @@ size_t
|
||||
tcache_salloc(tsdn_t *tsdn, const void *ptr)
|
||||
{
|
||||
|
||||
return (arena_salloc(tsdn, ptr, false));
|
||||
return (arena_salloc(tsdn, iealloc(ptr), ptr, false));
|
||||
}
|
||||
|
||||
void
|
||||
@ -101,9 +101,8 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||
assert(arena != NULL);
|
||||
for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) {
|
||||
/* Lock the arena bin associated with the first object. */
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(
|
||||
*(tbin->avail - 1));
|
||||
arena_t *bin_arena = extent_arena_get(&chunk->extent);
|
||||
extent_t *extent = iealloc(*(tbin->avail - 1));
|
||||
arena_t *bin_arena = extent_arena_get(extent);
|
||||
arena_bin_t *bin = &bin_arena->bins[binind];
|
||||
|
||||
if (config_prof && bin_arena == arena) {
|
||||
@ -125,14 +124,17 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||
for (i = 0; i < nflush; i++) {
|
||||
ptr = *(tbin->avail - 1 - i);
|
||||
assert(ptr != NULL);
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (extent_arena_get(&chunk->extent) == bin_arena) {
|
||||
|
||||
extent = iealloc(ptr);
|
||||
if (extent_arena_get(extent) == bin_arena) {
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)extent_addr_get(extent);
|
||||
size_t pageind = ((uintptr_t)ptr -
|
||||
(uintptr_t)chunk) >> LG_PAGE;
|
||||
arena_chunk_map_bits_t *bitselm =
|
||||
arena_bitselm_get_mutable(chunk, pageind);
|
||||
arena_dalloc_bin_junked_locked(tsd_tsdn(tsd),
|
||||
bin_arena, chunk, ptr, bitselm);
|
||||
bin_arena, chunk, extent, ptr, bitselm);
|
||||
} else {
|
||||
/*
|
||||
* This object was allocated via a different
|
||||
@ -183,9 +185,8 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
||||
assert(arena != NULL);
|
||||
for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) {
|
||||
/* Lock the arena associated with the first object. */
|
||||
arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(
|
||||
*(tbin->avail - 1));
|
||||
arena_t *locked_arena = extent_arena_get(&chunk->extent);
|
||||
extent_t *extent = iealloc(*(tbin->avail - 1));
|
||||
arena_t *locked_arena = extent_arena_get(extent);
|
||||
UNUSED bool idump;
|
||||
|
||||
if (config_prof)
|
||||
@ -210,10 +211,12 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
||||
for (i = 0; i < nflush; i++) {
|
||||
ptr = *(tbin->avail - 1 - i);
|
||||
assert(ptr != NULL);
|
||||
chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
|
||||
if (extent_arena_get(&chunk->extent) == locked_arena) {
|
||||
extent = iealloc(ptr);
|
||||
if (extent_arena_get(extent) == locked_arena) {
|
||||
arena_chunk_t *chunk =
|
||||
(arena_chunk_t *)extent_addr_get(extent);
|
||||
arena_dalloc_large_junked_locked(tsd_tsdn(tsd),
|
||||
locked_arena, chunk, ptr);
|
||||
locked_arena, chunk, extent, ptr);
|
||||
} else {
|
||||
/*
|
||||
* This object was allocated via a different
|
||||
@ -391,7 +394,7 @@ tcache_destroy(tsd_t *tsd, tcache_t *tcache)
|
||||
arena_prof_accum(tsd_tsdn(tsd), arena, tcache->prof_accumbytes))
|
||||
prof_idump(tsd_tsdn(tsd));
|
||||
|
||||
idalloctm(tsd_tsdn(tsd), tcache, NULL, true, true);
|
||||
idalloctm(tsd_tsdn(tsd), iealloc(tcache), tcache, NULL, true, true);
|
||||
}
|
||||
|
||||
void
|
||||
|
Loading…
Reference in New Issue
Block a user