Update brace style.
Add braces around single-line blocks, and remove line breaks before function-opening braces. This resolves #537.
This commit is contained in:
parent
5154ff32ee
commit
c4c2592c83
@ -14,32 +14,27 @@ bool arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes);
|
|||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||||
|
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
arena_ind_get(const arena_t *arena)
|
arena_ind_get(const arena_t *arena) {
|
||||||
{
|
|
||||||
return (base_ind_get(arena->base));
|
return (base_ind_get(arena->base));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
arena_internal_add(arena_t *arena, size_t size)
|
arena_internal_add(arena_t *arena, size_t size) {
|
||||||
{
|
|
||||||
atomic_add_zu(&arena->stats.internal, size);
|
atomic_add_zu(&arena->stats.internal, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
arena_internal_sub(arena_t *arena, size_t size)
|
arena_internal_sub(arena_t *arena, size_t size) {
|
||||||
{
|
|
||||||
atomic_sub_zu(&arena->stats.internal, size);
|
atomic_sub_zu(&arena->stats.internal, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
arena_internal_get(arena_t *arena)
|
arena_internal_get(arena_t *arena) {
|
||||||
{
|
|
||||||
return (atomic_read_zu(&arena->stats.internal));
|
return (atomic_read_zu(&arena->stats.internal));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes)
|
arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(prof_interval != 0);
|
assert(prof_interval != 0);
|
||||||
|
|
||||||
@ -52,22 +47,22 @@ arena_prof_accum_impl(arena_t *arena, uint64_t accumbytes)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes)
|
arena_prof_accum_locked(arena_t *arena, uint64_t accumbytes) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
if (likely(prof_interval == 0))
|
if (likely(prof_interval == 0)) {
|
||||||
return (false);
|
return (false);
|
||||||
|
}
|
||||||
return (arena_prof_accum_impl(arena, accumbytes));
|
return (arena_prof_accum_impl(arena, accumbytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes)
|
arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
if (likely(prof_interval == 0))
|
if (likely(prof_interval == 0)) {
|
||||||
return (false);
|
return (false);
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
bool ret;
|
bool ret;
|
||||||
|
@ -23,39 +23,37 @@ void arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_))
|
||||||
JEMALLOC_INLINE szind_t
|
JEMALLOC_INLINE szind_t
|
||||||
arena_bin_index(arena_t *arena, arena_bin_t *bin)
|
arena_bin_index(arena_t *arena, arena_bin_t *bin) {
|
||||||
{
|
|
||||||
szind_t binind = (szind_t)(bin - arena->bins);
|
szind_t binind = (szind_t)(bin - arena->bins);
|
||||||
assert(binind < NBINS);
|
assert(binind < NBINS);
|
||||||
return (binind);
|
return (binind);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE prof_tctx_t *
|
JEMALLOC_INLINE prof_tctx_t *
|
||||||
arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
arena_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
if (unlikely(!extent_slab_get(extent)))
|
if (unlikely(!extent_slab_get(extent))) {
|
||||||
return (large_prof_tctx_get(tsdn, extent));
|
return (large_prof_tctx_get(tsdn, extent));
|
||||||
|
}
|
||||||
return ((prof_tctx_t *)(uintptr_t)1U);
|
return ((prof_tctx_t *)(uintptr_t)1U);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
arena_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
arena_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||||
size_t usize, prof_tctx_t *tctx)
|
size_t usize, prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
if (unlikely(!extent_slab_get(extent)))
|
if (unlikely(!extent_slab_get(extent))) {
|
||||||
large_prof_tctx_set(tsdn, extent, tctx);
|
large_prof_tctx_set(tsdn, extent, tctx);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||||
prof_tctx_t *tctx)
|
prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(!extent_slab_get(extent));
|
assert(!extent_slab_get(extent));
|
||||||
@ -64,24 +62,25 @@ arena_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks)
|
arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
ticker_t *decay_ticker;
|
ticker_t *decay_ticker;
|
||||||
|
|
||||||
if (unlikely(tsdn_null(tsdn)))
|
if (unlikely(tsdn_null(tsdn))) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
decay_ticker = decay_ticker_get(tsd, arena_ind_get(arena));
|
decay_ticker = decay_ticker_get(tsd, arena_ind_get(arena));
|
||||||
if (unlikely(decay_ticker == NULL))
|
if (unlikely(decay_ticker == NULL)) {
|
||||||
return;
|
return;
|
||||||
if (unlikely(ticker_ticks(decay_ticker, nticks)))
|
}
|
||||||
|
if (unlikely(ticker_ticks(decay_ticker, nticks))) {
|
||||||
arena_purge(tsdn, arena, false);
|
arena_purge(tsdn, arena, false);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_decay_tick(tsdn_t *tsdn, arena_t *arena)
|
arena_decay_tick(tsdn_t *tsdn, arena_t *arena) {
|
||||||
{
|
|
||||||
malloc_mutex_assert_not_owner(tsdn, &arena->lock);
|
malloc_mutex_assert_not_owner(tsdn, &arena->lock);
|
||||||
|
|
||||||
arena_decay_ticks(tsdn, arena, 1);
|
arena_decay_ticks(tsdn, arena, 1);
|
||||||
@ -89,8 +88,7 @@ arena_decay_tick(tsdn_t *tsdn, arena_t *arena)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
|
arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
|
||||||
tcache_t *tcache, bool slow_path)
|
tcache_t *tcache, bool slow_path) {
|
||||||
{
|
|
||||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
|
|
||||||
@ -111,31 +109,29 @@ arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||||
arena_aalloc(tsdn_t *tsdn, const void *ptr)
|
arena_aalloc(tsdn_t *tsdn, const void *ptr) {
|
||||||
{
|
|
||||||
return (extent_arena_get(iealloc(tsdn, ptr)));
|
return (extent_arena_get(iealloc(tsdn, ptr)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Return the size of the allocation pointed to by ptr. */
|
/* Return the size of the allocation pointed to by ptr. */
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
arena_salloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
if (likely(extent_slab_get(extent)))
|
if (likely(extent_slab_get(extent))) {
|
||||||
ret = index2size(extent_slab_data_get_const(extent)->binind);
|
ret = index2size(extent_slab_data_get_const(extent)->binind);
|
||||||
else
|
} else {
|
||||||
ret = large_salloc(tsdn, extent);
|
ret = large_salloc(tsdn, extent);
|
||||||
|
}
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||||
bool slow_path)
|
bool slow_path) {
|
||||||
{
|
|
||||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
@ -160,15 +156,15 @@ arena_dalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
|||||||
tcache_dalloc_large(tsdn_tsd(tsdn), tcache,
|
tcache_dalloc_large(tsdn_tsd(tsdn), tcache,
|
||||||
ptr, usize, slow_path);
|
ptr, usize, slow_path);
|
||||||
}
|
}
|
||||||
} else
|
} else {
|
||||||
large_dalloc(tsdn, extent);
|
large_dalloc(tsdn, extent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||||
tcache_t *tcache, bool slow_path)
|
tcache_t *tcache, bool slow_path) {
|
||||||
{
|
|
||||||
assert(!tsdn_null(tsdn) || tcache == NULL);
|
assert(!tsdn_null(tsdn) || tcache == NULL);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
@ -192,10 +188,11 @@ arena_sdalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
|||||||
tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr,
|
tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr,
|
||||||
size, slow_path);
|
size, slow_path);
|
||||||
}
|
}
|
||||||
} else
|
} else {
|
||||||
large_dalloc(tsdn, extent);
|
large_dalloc(tsdn, extent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#endif /* (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_)) */
|
#endif /* (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ARENA_C_)) */
|
||||||
#endif /* JEMALLOC_INTERNAL_ARENA_INLINES_B_H */
|
#endif /* JEMALLOC_INTERNAL_ARENA_INLINES_B_H */
|
||||||
|
@ -37,8 +37,9 @@
|
|||||||
|
|
||||||
#ifndef assert_not_implemented
|
#ifndef assert_not_implemented
|
||||||
#define assert_not_implemented(e) do { \
|
#define assert_not_implemented(e) do { \
|
||||||
if (unlikely(config_debug && !(e))) \
|
if (unlikely(config_debug && !(e))) { \
|
||||||
not_implemented(); \
|
not_implemented(); \
|
||||||
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -53,8 +53,7 @@ void atomic_write_u(unsigned *p, unsigned x);
|
|||||||
#if (LG_SIZEOF_PTR == 3 || LG_SIZEOF_INT == 3)
|
#if (LG_SIZEOF_PTR == 3 || LG_SIZEOF_INT == 3)
|
||||||
# if (defined(__amd64__) || defined(__x86_64__))
|
# if (defined(__amd64__) || defined(__x86_64__))
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_add_u64(uint64_t *p, uint64_t x)
|
atomic_add_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
uint64_t t = x;
|
uint64_t t = x;
|
||||||
|
|
||||||
asm volatile (
|
asm volatile (
|
||||||
@ -67,8 +66,7 @@ atomic_add_u64(uint64_t *p, uint64_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_sub_u64(uint64_t *p, uint64_t x)
|
atomic_sub_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
uint64_t t;
|
uint64_t t;
|
||||||
|
|
||||||
x = (uint64_t)(-(int64_t)x);
|
x = (uint64_t)(-(int64_t)x);
|
||||||
@ -83,8 +81,7 @@ atomic_sub_u64(uint64_t *p, uint64_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s) {
|
||||||
{
|
|
||||||
uint8_t success;
|
uint8_t success;
|
||||||
|
|
||||||
asm volatile (
|
asm volatile (
|
||||||
@ -99,8 +96,7 @@ atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u64(uint64_t *p, uint64_t x)
|
atomic_write_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
asm volatile (
|
asm volatile (
|
||||||
"xchgq %1, %0;" /* Lock is implied by xchgq. */
|
"xchgq %1, %0;" /* Lock is implied by xchgq. */
|
||||||
: "=m" (*p), "+r" (x) /* Outputs. */
|
: "=m" (*p), "+r" (x) /* Outputs. */
|
||||||
@ -110,36 +106,31 @@ atomic_write_u64(uint64_t *p, uint64_t x)
|
|||||||
}
|
}
|
||||||
# elif (defined(JEMALLOC_C11ATOMICS))
|
# elif (defined(JEMALLOC_C11ATOMICS))
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_add_u64(uint64_t *p, uint64_t x)
|
atomic_add_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
||||||
return (atomic_fetch_add(a, x) + x);
|
return (atomic_fetch_add(a, x) + x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_sub_u64(uint64_t *p, uint64_t x)
|
atomic_sub_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
||||||
return (atomic_fetch_sub(a, x) - x);
|
return (atomic_fetch_sub(a, x) - x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
||||||
return (!atomic_compare_exchange_strong(a, &c, s));
|
return (!atomic_compare_exchange_strong(a, &c, s));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u64(uint64_t *p, uint64_t x)
|
atomic_write_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
|
||||||
atomic_store(a, x);
|
atomic_store(a, x);
|
||||||
}
|
}
|
||||||
# elif (defined(JEMALLOC_ATOMIC9))
|
# elif (defined(JEMALLOC_ATOMIC9))
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_add_u64(uint64_t *p, uint64_t x)
|
atomic_add_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* atomic_fetchadd_64() doesn't exist, but we only ever use this
|
* atomic_fetchadd_64() doesn't exist, but we only ever use this
|
||||||
* function on LP64 systems, so atomic_fetchadd_long() will do.
|
* function on LP64 systems, so atomic_fetchadd_long() will do.
|
||||||
@ -150,50 +141,43 @@ atomic_add_u64(uint64_t *p, uint64_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_sub_u64(uint64_t *p, uint64_t x)
|
atomic_sub_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
assert(sizeof(uint64_t) == sizeof(unsigned long));
|
assert(sizeof(uint64_t) == sizeof(unsigned long));
|
||||||
|
|
||||||
return (atomic_fetchadd_long(p, (unsigned long)(-(long)x)) - x);
|
return (atomic_fetchadd_long(p, (unsigned long)(-(long)x)) - x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s) {
|
||||||
{
|
|
||||||
assert(sizeof(uint64_t) == sizeof(unsigned long));
|
assert(sizeof(uint64_t) == sizeof(unsigned long));
|
||||||
|
|
||||||
return (!atomic_cmpset_long(p, (unsigned long)c, (unsigned long)s));
|
return (!atomic_cmpset_long(p, (unsigned long)c, (unsigned long)s));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u64(uint64_t *p, uint64_t x)
|
atomic_write_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
assert(sizeof(uint64_t) == sizeof(unsigned long));
|
assert(sizeof(uint64_t) == sizeof(unsigned long));
|
||||||
|
|
||||||
atomic_store_rel_long(p, x);
|
atomic_store_rel_long(p, x);
|
||||||
}
|
}
|
||||||
# elif (defined(JEMALLOC_OSATOMIC))
|
# elif (defined(JEMALLOC_OSATOMIC))
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_add_u64(uint64_t *p, uint64_t x)
|
atomic_add_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
return (OSAtomicAdd64((int64_t)x, (int64_t *)p));
|
return (OSAtomicAdd64((int64_t)x, (int64_t *)p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_sub_u64(uint64_t *p, uint64_t x)
|
atomic_sub_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
return (OSAtomicAdd64(-((int64_t)x), (int64_t *)p));
|
return (OSAtomicAdd64(-((int64_t)x), (int64_t *)p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s) {
|
||||||
{
|
|
||||||
return (!OSAtomicCompareAndSwap64(c, s, (int64_t *)p));
|
return (!OSAtomicCompareAndSwap64(c, s, (int64_t *)p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u64(uint64_t *p, uint64_t x)
|
atomic_write_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
uint64_t o;
|
uint64_t o;
|
||||||
|
|
||||||
/*The documented OSAtomic*() API does not expose an atomic exchange. */
|
/*The documented OSAtomic*() API does not expose an atomic exchange. */
|
||||||
@ -203,20 +187,17 @@ atomic_write_u64(uint64_t *p, uint64_t x)
|
|||||||
}
|
}
|
||||||
# elif (defined(_MSC_VER))
|
# elif (defined(_MSC_VER))
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_add_u64(uint64_t *p, uint64_t x)
|
atomic_add_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
return (InterlockedExchangeAdd64(p, x) + x);
|
return (InterlockedExchangeAdd64(p, x) + x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_sub_u64(uint64_t *p, uint64_t x)
|
atomic_sub_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
return (InterlockedExchangeAdd64(p, -((int64_t)x)) - x);
|
return (InterlockedExchangeAdd64(p, -((int64_t)x)) - x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s) {
|
||||||
{
|
|
||||||
uint64_t o;
|
uint64_t o;
|
||||||
|
|
||||||
o = InterlockedCompareExchange64(p, s, c);
|
o = InterlockedCompareExchange64(p, s, c);
|
||||||
@ -224,33 +205,28 @@ atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u64(uint64_t *p, uint64_t x)
|
atomic_write_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
InterlockedExchange64(p, x);
|
InterlockedExchange64(p, x);
|
||||||
}
|
}
|
||||||
# elif (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || \
|
# elif (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || \
|
||||||
defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_8))
|
defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_8))
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_add_u64(uint64_t *p, uint64_t x)
|
atomic_add_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
return (__sync_add_and_fetch(p, x));
|
return (__sync_add_and_fetch(p, x));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
atomic_sub_u64(uint64_t *p, uint64_t x)
|
atomic_sub_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
return (__sync_sub_and_fetch(p, x));
|
return (__sync_sub_and_fetch(p, x));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s)
|
atomic_cas_u64(uint64_t *p, uint64_t c, uint64_t s) {
|
||||||
{
|
|
||||||
return (!__sync_bool_compare_and_swap(p, c, s));
|
return (!__sync_bool_compare_and_swap(p, c, s));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u64(uint64_t *p, uint64_t x)
|
atomic_write_u64(uint64_t *p, uint64_t x) {
|
||||||
{
|
|
||||||
__sync_lock_test_and_set(p, x);
|
__sync_lock_test_and_set(p, x);
|
||||||
}
|
}
|
||||||
# else
|
# else
|
||||||
@ -262,8 +238,7 @@ atomic_write_u64(uint64_t *p, uint64_t x)
|
|||||||
/* 32-bit operations. */
|
/* 32-bit operations. */
|
||||||
#if (defined(__i386__) || defined(__amd64__) || defined(__x86_64__))
|
#if (defined(__i386__) || defined(__amd64__) || defined(__x86_64__))
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_add_u32(uint32_t *p, uint32_t x)
|
atomic_add_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
uint32_t t = x;
|
uint32_t t = x;
|
||||||
|
|
||||||
asm volatile (
|
asm volatile (
|
||||||
@ -276,8 +251,7 @@ atomic_add_u32(uint32_t *p, uint32_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_sub_u32(uint32_t *p, uint32_t x)
|
atomic_sub_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
uint32_t t;
|
uint32_t t;
|
||||||
|
|
||||||
x = (uint32_t)(-(int32_t)x);
|
x = (uint32_t)(-(int32_t)x);
|
||||||
@ -292,8 +266,7 @@ atomic_sub_u32(uint32_t *p, uint32_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s) {
|
||||||
{
|
|
||||||
uint8_t success;
|
uint8_t success;
|
||||||
|
|
||||||
asm volatile (
|
asm volatile (
|
||||||
@ -308,8 +281,7 @@ atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u32(uint32_t *p, uint32_t x)
|
atomic_write_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
asm volatile (
|
asm volatile (
|
||||||
"xchgl %1, %0;" /* Lock is implied by xchgl. */
|
"xchgl %1, %0;" /* Lock is implied by xchgl. */
|
||||||
: "=m" (*p), "+r" (x) /* Outputs. */
|
: "=m" (*p), "+r" (x) /* Outputs. */
|
||||||
@ -319,78 +291,66 @@ atomic_write_u32(uint32_t *p, uint32_t x)
|
|||||||
}
|
}
|
||||||
# elif (defined(JEMALLOC_C11ATOMICS))
|
# elif (defined(JEMALLOC_C11ATOMICS))
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_add_u32(uint32_t *p, uint32_t x)
|
atomic_add_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
||||||
return (atomic_fetch_add(a, x) + x);
|
return (atomic_fetch_add(a, x) + x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_sub_u32(uint32_t *p, uint32_t x)
|
atomic_sub_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
||||||
return (atomic_fetch_sub(a, x) - x);
|
return (atomic_fetch_sub(a, x) - x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
||||||
return (!atomic_compare_exchange_strong(a, &c, s));
|
return (!atomic_compare_exchange_strong(a, &c, s));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u32(uint32_t *p, uint32_t x)
|
atomic_write_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
volatile atomic_uint_least32_t *a = (volatile atomic_uint_least32_t *)p;
|
||||||
atomic_store(a, x);
|
atomic_store(a, x);
|
||||||
}
|
}
|
||||||
#elif (defined(JEMALLOC_ATOMIC9))
|
#elif (defined(JEMALLOC_ATOMIC9))
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_add_u32(uint32_t *p, uint32_t x)
|
atomic_add_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (atomic_fetchadd_32(p, x) + x);
|
return (atomic_fetchadd_32(p, x) + x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_sub_u32(uint32_t *p, uint32_t x)
|
atomic_sub_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (atomic_fetchadd_32(p, (uint32_t)(-(int32_t)x)) - x);
|
return (atomic_fetchadd_32(p, (uint32_t)(-(int32_t)x)) - x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s) {
|
||||||
{
|
|
||||||
return (!atomic_cmpset_32(p, c, s));
|
return (!atomic_cmpset_32(p, c, s));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u32(uint32_t *p, uint32_t x)
|
atomic_write_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
atomic_store_rel_32(p, x);
|
atomic_store_rel_32(p, x);
|
||||||
}
|
}
|
||||||
#elif (defined(JEMALLOC_OSATOMIC))
|
#elif (defined(JEMALLOC_OSATOMIC))
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_add_u32(uint32_t *p, uint32_t x)
|
atomic_add_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (OSAtomicAdd32((int32_t)x, (int32_t *)p));
|
return (OSAtomicAdd32((int32_t)x, (int32_t *)p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_sub_u32(uint32_t *p, uint32_t x)
|
atomic_sub_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (OSAtomicAdd32(-((int32_t)x), (int32_t *)p));
|
return (OSAtomicAdd32(-((int32_t)x), (int32_t *)p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s) {
|
||||||
{
|
|
||||||
return (!OSAtomicCompareAndSwap32(c, s, (int32_t *)p));
|
return (!OSAtomicCompareAndSwap32(c, s, (int32_t *)p));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u32(uint32_t *p, uint32_t x)
|
atomic_write_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
uint32_t o;
|
uint32_t o;
|
||||||
|
|
||||||
/*The documented OSAtomic*() API does not expose an atomic exchange. */
|
/*The documented OSAtomic*() API does not expose an atomic exchange. */
|
||||||
@ -400,20 +360,17 @@ atomic_write_u32(uint32_t *p, uint32_t x)
|
|||||||
}
|
}
|
||||||
#elif (defined(_MSC_VER))
|
#elif (defined(_MSC_VER))
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_add_u32(uint32_t *p, uint32_t x)
|
atomic_add_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (InterlockedExchangeAdd(p, x) + x);
|
return (InterlockedExchangeAdd(p, x) + x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_sub_u32(uint32_t *p, uint32_t x)
|
atomic_sub_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (InterlockedExchangeAdd(p, -((int32_t)x)) - x);
|
return (InterlockedExchangeAdd(p, -((int32_t)x)) - x);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s) {
|
||||||
{
|
|
||||||
uint32_t o;
|
uint32_t o;
|
||||||
|
|
||||||
o = InterlockedCompareExchange(p, s, c);
|
o = InterlockedCompareExchange(p, s, c);
|
||||||
@ -421,33 +378,28 @@ atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u32(uint32_t *p, uint32_t x)
|
atomic_write_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
InterlockedExchange(p, x);
|
InterlockedExchange(p, x);
|
||||||
}
|
}
|
||||||
#elif (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) || \
|
#elif (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) || \
|
||||||
defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_4))
|
defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_4))
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_add_u32(uint32_t *p, uint32_t x)
|
atomic_add_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (__sync_add_and_fetch(p, x));
|
return (__sync_add_and_fetch(p, x));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
atomic_sub_u32(uint32_t *p, uint32_t x)
|
atomic_sub_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
return (__sync_sub_and_fetch(p, x));
|
return (__sync_sub_and_fetch(p, x));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s)
|
atomic_cas_u32(uint32_t *p, uint32_t c, uint32_t s) {
|
||||||
{
|
|
||||||
return (!__sync_bool_compare_and_swap(p, c, s));
|
return (!__sync_bool_compare_and_swap(p, c, s));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u32(uint32_t *p, uint32_t x)
|
atomic_write_u32(uint32_t *p, uint32_t x) {
|
||||||
{
|
|
||||||
__sync_lock_test_and_set(p, x);
|
__sync_lock_test_and_set(p, x);
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
@ -457,8 +409,7 @@ atomic_write_u32(uint32_t *p, uint32_t x)
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* Pointer operations. */
|
/* Pointer operations. */
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
atomic_add_p(void **p, void *x)
|
atomic_add_p(void **p, void *x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return ((void *)atomic_add_u64((uint64_t *)p, (uint64_t)x));
|
return ((void *)atomic_add_u64((uint64_t *)p, (uint64_t)x));
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -467,8 +418,7 @@ atomic_add_p(void **p, void *x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
atomic_sub_p(void **p, void *x)
|
atomic_sub_p(void **p, void *x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return ((void *)atomic_add_u64((uint64_t *)p, (uint64_t)-((int64_t)x)));
|
return ((void *)atomic_add_u64((uint64_t *)p, (uint64_t)-((int64_t)x)));
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -477,8 +427,7 @@ atomic_sub_p(void **p, void *x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_p(void **p, void *c, void *s)
|
atomic_cas_p(void **p, void *c, void *s) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return (atomic_cas_u64((uint64_t *)p, (uint64_t)c, (uint64_t)s));
|
return (atomic_cas_u64((uint64_t *)p, (uint64_t)c, (uint64_t)s));
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -487,8 +436,7 @@ atomic_cas_p(void **p, void *c, void *s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_p(void **p, const void *x)
|
atomic_write_p(void **p, const void *x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
atomic_write_u64((uint64_t *)p, (uint64_t)x);
|
atomic_write_u64((uint64_t *)p, (uint64_t)x);
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -499,8 +447,7 @@ atomic_write_p(void **p, const void *x)
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* size_t operations. */
|
/* size_t operations. */
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
atomic_add_zu(size_t *p, size_t x)
|
atomic_add_zu(size_t *p, size_t x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return ((size_t)atomic_add_u64((uint64_t *)p, (uint64_t)x));
|
return ((size_t)atomic_add_u64((uint64_t *)p, (uint64_t)x));
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -509,8 +456,7 @@ atomic_add_zu(size_t *p, size_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
atomic_sub_zu(size_t *p, size_t x)
|
atomic_sub_zu(size_t *p, size_t x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return ((size_t)atomic_add_u64((uint64_t *)p, (uint64_t)-((int64_t)x)));
|
return ((size_t)atomic_add_u64((uint64_t *)p, (uint64_t)-((int64_t)x)));
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -519,8 +465,7 @@ atomic_sub_zu(size_t *p, size_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_zu(size_t *p, size_t c, size_t s)
|
atomic_cas_zu(size_t *p, size_t c, size_t s) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return (atomic_cas_u64((uint64_t *)p, (uint64_t)c, (uint64_t)s));
|
return (atomic_cas_u64((uint64_t *)p, (uint64_t)c, (uint64_t)s));
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -529,8 +474,7 @@ atomic_cas_zu(size_t *p, size_t c, size_t s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_zu(size_t *p, size_t x)
|
atomic_write_zu(size_t *p, size_t x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
atomic_write_u64((uint64_t *)p, (uint64_t)x);
|
atomic_write_u64((uint64_t *)p, (uint64_t)x);
|
||||||
#elif (LG_SIZEOF_PTR == 2)
|
#elif (LG_SIZEOF_PTR == 2)
|
||||||
@ -541,8 +485,7 @@ atomic_write_zu(size_t *p, size_t x)
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* unsigned operations. */
|
/* unsigned operations. */
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
atomic_add_u(unsigned *p, unsigned x)
|
atomic_add_u(unsigned *p, unsigned x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_INT == 3)
|
#if (LG_SIZEOF_INT == 3)
|
||||||
return ((unsigned)atomic_add_u64((uint64_t *)p, (uint64_t)x));
|
return ((unsigned)atomic_add_u64((uint64_t *)p, (uint64_t)x));
|
||||||
#elif (LG_SIZEOF_INT == 2)
|
#elif (LG_SIZEOF_INT == 2)
|
||||||
@ -551,8 +494,7 @@ atomic_add_u(unsigned *p, unsigned x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
atomic_sub_u(unsigned *p, unsigned x)
|
atomic_sub_u(unsigned *p, unsigned x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_INT == 3)
|
#if (LG_SIZEOF_INT == 3)
|
||||||
return ((unsigned)atomic_add_u64((uint64_t *)p,
|
return ((unsigned)atomic_add_u64((uint64_t *)p,
|
||||||
(uint64_t)-((int64_t)x)));
|
(uint64_t)-((int64_t)x)));
|
||||||
@ -563,8 +505,7 @@ atomic_sub_u(unsigned *p, unsigned x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
atomic_cas_u(unsigned *p, unsigned c, unsigned s)
|
atomic_cas_u(unsigned *p, unsigned c, unsigned s) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_INT == 3)
|
#if (LG_SIZEOF_INT == 3)
|
||||||
return (atomic_cas_u64((uint64_t *)p, (uint64_t)c, (uint64_t)s));
|
return (atomic_cas_u64((uint64_t *)p, (uint64_t)c, (uint64_t)s));
|
||||||
#elif (LG_SIZEOF_INT == 2)
|
#elif (LG_SIZEOF_INT == 2)
|
||||||
@ -573,8 +514,7 @@ atomic_cas_u(unsigned *p, unsigned c, unsigned s)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
atomic_write_u(unsigned *p, unsigned x)
|
atomic_write_u(unsigned *p, unsigned x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_INT == 3)
|
#if (LG_SIZEOF_INT == 3)
|
||||||
atomic_write_u64((uint64_t *)p, (uint64_t)x);
|
atomic_write_u64((uint64_t *)p, (uint64_t)x);
|
||||||
#elif (LG_SIZEOF_INT == 2)
|
#elif (LG_SIZEOF_INT == 2)
|
||||||
|
@ -7,8 +7,7 @@ unsigned base_ind_get(const base_t *base);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_BASE_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_BASE_C_))
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
base_ind_get(const base_t *base)
|
base_ind_get(const base_t *base) {
|
||||||
{
|
|
||||||
return (base->ind);
|
return (base->ind);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -11,8 +11,7 @@ void bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_BITMAP_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_BITMAP_C_))
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
#ifdef BITMAP_USE_TREE
|
#ifdef BITMAP_USE_TREE
|
||||||
size_t rgoff = binfo->levels[binfo->nlevels].group_offset - 1;
|
size_t rgoff = binfo->levels[binfo->nlevels].group_offset - 1;
|
||||||
bitmap_t rg = bitmap[rgoff];
|
bitmap_t rg = bitmap[rgoff];
|
||||||
@ -22,16 +21,16 @@ bitmap_full(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
|||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
for (i = 0; i < binfo->ngroups; i++) {
|
for (i = 0; i < binfo->ngroups; i++) {
|
||||||
if (bitmap[i] != 0)
|
if (bitmap[i] != 0) {
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return (true);
|
return (true);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit)
|
bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
|
||||||
{
|
|
||||||
size_t goff;
|
size_t goff;
|
||||||
bitmap_t g;
|
bitmap_t g;
|
||||||
|
|
||||||
@ -42,8 +41,7 @@ bitmap_get(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit)
|
bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
|
||||||
{
|
|
||||||
size_t goff;
|
size_t goff;
|
||||||
bitmap_t *gp;
|
bitmap_t *gp;
|
||||||
bitmap_t g;
|
bitmap_t g;
|
||||||
@ -69,17 +67,17 @@ bitmap_set(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit)
|
|||||||
assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
|
assert(g & (ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK)));
|
||||||
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
|
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
|
||||||
*gp = g;
|
*gp = g;
|
||||||
if (g != 0)
|
if (g != 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/* sfu: set first unset. */
|
/* sfu: set first unset. */
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
size_t bit;
|
size_t bit;
|
||||||
bitmap_t g;
|
bitmap_t g;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
@ -109,8 +107,7 @@ bitmap_sfu(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit)
|
bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit) {
|
||||||
{
|
|
||||||
size_t goff;
|
size_t goff;
|
||||||
bitmap_t *gp;
|
bitmap_t *gp;
|
||||||
bitmap_t g;
|
bitmap_t g;
|
||||||
@ -140,10 +137,11 @@ bitmap_unset(bitmap_t *bitmap, const bitmap_info_t *binfo, size_t bit)
|
|||||||
== 0);
|
== 0);
|
||||||
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
|
g ^= ZU(1) << (bit & BITMAP_GROUP_NBITS_MASK);
|
||||||
*gp = g;
|
*gp = g;
|
||||||
if (!propagate)
|
if (!propagate) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif /* BITMAP_USE_TREE */
|
#endif /* BITMAP_USE_TREE */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,8 +43,7 @@ int extent_snad_comp(const extent_t *a, const extent_t *b);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
|
||||||
JEMALLOC_INLINE extent_t *
|
JEMALLOC_INLINE extent_t *
|
||||||
extent_lookup(tsdn_t *tsdn, const void *ptr, bool dependent)
|
extent_lookup(tsdn_t *tsdn, const void *ptr, bool dependent) {
|
||||||
{
|
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
|
|
||||||
@ -53,132 +52,112 @@ extent_lookup(tsdn_t *tsdn, const void *ptr, bool dependent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE arena_t *
|
JEMALLOC_INLINE arena_t *
|
||||||
extent_arena_get(const extent_t *extent)
|
extent_arena_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_arena);
|
return (extent->e_arena);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
extent_base_get(const extent_t *extent)
|
extent_base_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
|
assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
|
||||||
!extent->e_slab);
|
!extent->e_slab);
|
||||||
return (PAGE_ADDR2BASE(extent->e_addr));
|
return (PAGE_ADDR2BASE(extent->e_addr));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
extent_addr_get(const extent_t *extent)
|
extent_addr_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
|
assert(extent->e_addr == PAGE_ADDR2BASE(extent->e_addr) ||
|
||||||
!extent->e_slab);
|
!extent->e_slab);
|
||||||
return (extent->e_addr);
|
return (extent->e_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
extent_size_get(const extent_t *extent)
|
extent_size_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_size);
|
return (extent->e_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
extent_usize_get(const extent_t *extent)
|
extent_usize_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
assert(!extent->e_slab);
|
assert(!extent->e_slab);
|
||||||
return (extent->e_usize);
|
return (extent->e_usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
extent_before_get(const extent_t *extent)
|
extent_before_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return ((void *)((uintptr_t)extent_base_get(extent) - PAGE));
|
return ((void *)((uintptr_t)extent_base_get(extent) - PAGE));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
extent_last_get(const extent_t *extent)
|
extent_last_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return ((void *)((uintptr_t)extent_base_get(extent) +
|
return ((void *)((uintptr_t)extent_base_get(extent) +
|
||||||
extent_size_get(extent) - PAGE));
|
extent_size_get(extent) - PAGE));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void *
|
JEMALLOC_INLINE void *
|
||||||
extent_past_get(const extent_t *extent)
|
extent_past_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return ((void *)((uintptr_t)extent_base_get(extent) +
|
return ((void *)((uintptr_t)extent_base_get(extent) +
|
||||||
extent_size_get(extent)));
|
extent_size_get(extent)));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
extent_sn_get(const extent_t *extent)
|
extent_sn_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_sn);
|
return (extent->e_sn);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
extent_active_get(const extent_t *extent)
|
extent_active_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_active);
|
return (extent->e_active);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
extent_retained_get(const extent_t *extent)
|
extent_retained_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (qr_next(extent, qr_link) == extent);
|
return (qr_next(extent, qr_link) == extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
extent_zeroed_get(const extent_t *extent)
|
extent_zeroed_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_zeroed);
|
return (extent->e_zeroed);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
extent_committed_get(const extent_t *extent)
|
extent_committed_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_committed);
|
return (extent->e_committed);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
extent_slab_get(const extent_t *extent)
|
extent_slab_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent->e_slab);
|
return (extent->e_slab);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE arena_slab_data_t *
|
JEMALLOC_INLINE arena_slab_data_t *
|
||||||
extent_slab_data_get(extent_t *extent)
|
extent_slab_data_get(extent_t *extent) {
|
||||||
{
|
|
||||||
assert(extent->e_slab);
|
assert(extent->e_slab);
|
||||||
return (&extent->e_slab_data);
|
return (&extent->e_slab_data);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE const arena_slab_data_t *
|
JEMALLOC_INLINE const arena_slab_data_t *
|
||||||
extent_slab_data_get_const(const extent_t *extent)
|
extent_slab_data_get_const(const extent_t *extent) {
|
||||||
{
|
|
||||||
assert(extent->e_slab);
|
assert(extent->e_slab);
|
||||||
return (&extent->e_slab_data);
|
return (&extent->e_slab_data);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE prof_tctx_t *
|
JEMALLOC_INLINE prof_tctx_t *
|
||||||
extent_prof_tctx_get(const extent_t *extent)
|
extent_prof_tctx_get(const extent_t *extent) {
|
||||||
{
|
|
||||||
return ((prof_tctx_t *)atomic_read_p(
|
return ((prof_tctx_t *)atomic_read_p(
|
||||||
&((extent_t *)extent)->e_prof_tctx_pun));
|
&((extent_t *)extent)->e_prof_tctx_pun));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_arena_set(extent_t *extent, arena_t *arena)
|
extent_arena_set(extent_t *extent, arena_t *arena) {
|
||||||
{
|
|
||||||
extent->e_arena = arena;
|
extent->e_arena = arena;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_addr_set(extent_t *extent, void *addr)
|
extent_addr_set(extent_t *extent, void *addr) {
|
||||||
{
|
|
||||||
extent->e_addr = addr;
|
extent->e_addr = addr;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment)
|
extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment) {
|
||||||
{
|
|
||||||
assert(extent_base_get(extent) == extent_addr_get(extent));
|
assert(extent_base_get(extent) == extent_addr_get(extent));
|
||||||
|
|
||||||
if (alignment < PAGE) {
|
if (alignment < PAGE) {
|
||||||
@ -197,58 +176,49 @@ extent_addr_randomize(tsdn_t *tsdn, extent_t *extent, size_t alignment)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_size_set(extent_t *extent, size_t size)
|
extent_size_set(extent_t *extent, size_t size) {
|
||||||
{
|
|
||||||
extent->e_size = size;
|
extent->e_size = size;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_usize_set(extent_t *extent, size_t usize)
|
extent_usize_set(extent_t *extent, size_t usize) {
|
||||||
{
|
|
||||||
extent->e_usize = usize;
|
extent->e_usize = usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_sn_set(extent_t *extent, size_t sn)
|
extent_sn_set(extent_t *extent, size_t sn) {
|
||||||
{
|
|
||||||
extent->e_sn = sn;
|
extent->e_sn = sn;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_active_set(extent_t *extent, bool active)
|
extent_active_set(extent_t *extent, bool active) {
|
||||||
{
|
|
||||||
extent->e_active = active;
|
extent->e_active = active;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_zeroed_set(extent_t *extent, bool zeroed)
|
extent_zeroed_set(extent_t *extent, bool zeroed) {
|
||||||
{
|
|
||||||
extent->e_zeroed = zeroed;
|
extent->e_zeroed = zeroed;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_committed_set(extent_t *extent, bool committed)
|
extent_committed_set(extent_t *extent, bool committed) {
|
||||||
{
|
|
||||||
extent->e_committed = committed;
|
extent->e_committed = committed;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_slab_set(extent_t *extent, bool slab)
|
extent_slab_set(extent_t *extent, bool slab) {
|
||||||
{
|
|
||||||
extent->e_slab = slab;
|
extent->e_slab = slab;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx)
|
extent_prof_tctx_set(extent_t *extent, prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
atomic_write_p(&extent->e_prof_tctx_pun, tctx);
|
atomic_write_p(&extent->e_prof_tctx_pun, tctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
|
extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
|
||||||
size_t usize, size_t sn, bool active, bool zeroed, bool committed,
|
size_t usize, size_t sn, bool active, bool zeroed, bool committed,
|
||||||
bool slab)
|
bool slab) {
|
||||||
{
|
|
||||||
assert(addr == PAGE_ADDR2BASE(addr) || !slab);
|
assert(addr == PAGE_ADDR2BASE(addr) || !slab);
|
||||||
|
|
||||||
extent_arena_set(extent, arena);
|
extent_arena_set(extent, arena);
|
||||||
@ -260,26 +230,24 @@ extent_init(extent_t *extent, arena_t *arena, void *addr, size_t size,
|
|||||||
extent_zeroed_set(extent, zeroed);
|
extent_zeroed_set(extent, zeroed);
|
||||||
extent_committed_set(extent, committed);
|
extent_committed_set(extent, committed);
|
||||||
extent_slab_set(extent, slab);
|
extent_slab_set(extent, slab);
|
||||||
if (config_prof)
|
if (config_prof) {
|
||||||
extent_prof_tctx_set(extent, NULL);
|
extent_prof_tctx_set(extent, NULL);
|
||||||
|
}
|
||||||
qr_new(extent, qr_link);
|
qr_new(extent, qr_link);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_ring_insert(extent_t *sentinel, extent_t *extent)
|
extent_ring_insert(extent_t *sentinel, extent_t *extent) {
|
||||||
{
|
|
||||||
qr_meld(sentinel, extent, extent_t, qr_link);
|
qr_meld(sentinel, extent, extent_t, qr_link);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
extent_ring_remove(extent_t *extent)
|
extent_ring_remove(extent_t *extent) {
|
||||||
{
|
|
||||||
qr_remove(extent, qr_link);
|
qr_remove(extent, qr_link);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE int
|
JEMALLOC_INLINE int
|
||||||
extent_sn_comp(const extent_t *a, const extent_t *b)
|
extent_sn_comp(const extent_t *a, const extent_t *b) {
|
||||||
{
|
|
||||||
size_t a_sn = extent_sn_get(a);
|
size_t a_sn = extent_sn_get(a);
|
||||||
size_t b_sn = extent_sn_get(b);
|
size_t b_sn = extent_sn_get(b);
|
||||||
|
|
||||||
@ -287,8 +255,7 @@ extent_sn_comp(const extent_t *a, const extent_t *b)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE int
|
JEMALLOC_INLINE int
|
||||||
extent_ad_comp(const extent_t *a, const extent_t *b)
|
extent_ad_comp(const extent_t *a, const extent_t *b) {
|
||||||
{
|
|
||||||
uintptr_t a_addr = (uintptr_t)extent_addr_get(a);
|
uintptr_t a_addr = (uintptr_t)extent_addr_get(a);
|
||||||
uintptr_t b_addr = (uintptr_t)extent_addr_get(b);
|
uintptr_t b_addr = (uintptr_t)extent_addr_get(b);
|
||||||
|
|
||||||
@ -296,13 +263,13 @@ extent_ad_comp(const extent_t *a, const extent_t *b)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE int
|
JEMALLOC_INLINE int
|
||||||
extent_snad_comp(const extent_t *a, const extent_t *b)
|
extent_snad_comp(const extent_t *a, const extent_t *b) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
ret = extent_sn_comp(a, b);
|
ret = extent_sn_comp(a, b);
|
||||||
if (ret != 0)
|
if (ret != 0) {
|
||||||
return (ret);
|
return (ret);
|
||||||
|
}
|
||||||
|
|
||||||
ret = extent_ad_comp(a, b);
|
ret = extent_ad_comp(a, b);
|
||||||
return (ret);
|
return (ret);
|
||||||
|
@ -21,20 +21,17 @@ void hash(const void *key, size_t len, const uint32_t seed,
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* Internal implementation. */
|
/* Internal implementation. */
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
hash_rotl_32(uint32_t x, int8_t r)
|
hash_rotl_32(uint32_t x, int8_t r) {
|
||||||
{
|
|
||||||
return ((x << r) | (x >> (32 - r)));
|
return ((x << r) | (x >> (32 - r)));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
hash_rotl_64(uint64_t x, int8_t r)
|
hash_rotl_64(uint64_t x, int8_t r) {
|
||||||
{
|
|
||||||
return ((x << r) | (x >> (64 - r)));
|
return ((x << r) | (x >> (64 - r)));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
hash_get_block_32(const uint32_t *p, int i)
|
hash_get_block_32(const uint32_t *p, int i) {
|
||||||
{
|
|
||||||
/* Handle unaligned read. */
|
/* Handle unaligned read. */
|
||||||
if (unlikely((uintptr_t)p & (sizeof(uint32_t)-1)) != 0) {
|
if (unlikely((uintptr_t)p & (sizeof(uint32_t)-1)) != 0) {
|
||||||
uint32_t ret;
|
uint32_t ret;
|
||||||
@ -47,8 +44,7 @@ hash_get_block_32(const uint32_t *p, int i)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
hash_get_block_64(const uint64_t *p, int i)
|
hash_get_block_64(const uint64_t *p, int i) {
|
||||||
{
|
|
||||||
/* Handle unaligned read. */
|
/* Handle unaligned read. */
|
||||||
if (unlikely((uintptr_t)p & (sizeof(uint64_t)-1)) != 0) {
|
if (unlikely((uintptr_t)p & (sizeof(uint64_t)-1)) != 0) {
|
||||||
uint64_t ret;
|
uint64_t ret;
|
||||||
@ -61,8 +57,7 @@ hash_get_block_64(const uint64_t *p, int i)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
hash_fmix_32(uint32_t h)
|
hash_fmix_32(uint32_t h) {
|
||||||
{
|
|
||||||
h ^= h >> 16;
|
h ^= h >> 16;
|
||||||
h *= 0x85ebca6b;
|
h *= 0x85ebca6b;
|
||||||
h ^= h >> 13;
|
h ^= h >> 13;
|
||||||
@ -73,8 +68,7 @@ hash_fmix_32(uint32_t h)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
hash_fmix_64(uint64_t k)
|
hash_fmix_64(uint64_t k) {
|
||||||
{
|
|
||||||
k ^= k >> 33;
|
k ^= k >> 33;
|
||||||
k *= KQU(0xff51afd7ed558ccd);
|
k *= KQU(0xff51afd7ed558ccd);
|
||||||
k ^= k >> 33;
|
k ^= k >> 33;
|
||||||
@ -85,8 +79,7 @@ hash_fmix_64(uint64_t k)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
hash_x86_32(const void *key, int len, uint32_t seed)
|
hash_x86_32(const void *key, int len, uint32_t seed) {
|
||||||
{
|
|
||||||
const uint8_t *data = (const uint8_t *) key;
|
const uint8_t *data = (const uint8_t *) key;
|
||||||
const int nblocks = len / 4;
|
const int nblocks = len / 4;
|
||||||
|
|
||||||
@ -137,8 +130,7 @@ hash_x86_32(const void *key, int len, uint32_t seed)
|
|||||||
|
|
||||||
UNUSED JEMALLOC_INLINE void
|
UNUSED JEMALLOC_INLINE void
|
||||||
hash_x86_128(const void *key, const int len, uint32_t seed,
|
hash_x86_128(const void *key, const int len, uint32_t seed,
|
||||||
uint64_t r_out[2])
|
uint64_t r_out[2]) {
|
||||||
{
|
|
||||||
const uint8_t * data = (const uint8_t *) key;
|
const uint8_t * data = (const uint8_t *) key;
|
||||||
const int nblocks = len / 16;
|
const int nblocks = len / 16;
|
||||||
|
|
||||||
@ -239,8 +231,7 @@ hash_x86_128(const void *key, const int len, uint32_t seed,
|
|||||||
|
|
||||||
UNUSED JEMALLOC_INLINE void
|
UNUSED JEMALLOC_INLINE void
|
||||||
hash_x64_128(const void *key, const int len, const uint32_t seed,
|
hash_x64_128(const void *key, const int len, const uint32_t seed,
|
||||||
uint64_t r_out[2])
|
uint64_t r_out[2]) {
|
||||||
{
|
|
||||||
const uint8_t *data = (const uint8_t *) key;
|
const uint8_t *data = (const uint8_t *) key;
|
||||||
const int nblocks = len / 16;
|
const int nblocks = len / 16;
|
||||||
|
|
||||||
@ -318,8 +309,7 @@ hash_x64_128(const void *key, const int len, const uint32_t seed,
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* API. */
|
/* API. */
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
hash(const void *key, size_t len, const uint32_t seed, size_t r_hash[2])
|
hash(const void *key, size_t len, const uint32_t seed, size_t r_hash[2]) {
|
||||||
{
|
|
||||||
assert(len <= INT_MAX); /* Unfortunate implementation limitation. */
|
assert(len <= INT_MAX); /* Unfortunate implementation limitation. */
|
||||||
|
|
||||||
#if (LG_SIZEOF_PTR == 3 && !defined(JEMALLOC_BIG_ENDIAN))
|
#if (LG_SIZEOF_PTR == 3 && !defined(JEMALLOC_BIG_ENDIAN))
|
||||||
|
@ -550,10 +550,10 @@ ticker_t *decay_ticker_get(tsd_t *tsd, unsigned ind);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE pszind_t
|
JEMALLOC_ALWAYS_INLINE pszind_t
|
||||||
psz2ind(size_t psz)
|
psz2ind(size_t psz) {
|
||||||
{
|
if (unlikely(psz > LARGE_MAXCLASS)) {
|
||||||
if (unlikely(psz > LARGE_MAXCLASS))
|
|
||||||
return (NPSIZES);
|
return (NPSIZES);
|
||||||
|
}
|
||||||
{
|
{
|
||||||
pszind_t x = lg_floor((psz<<1)-1);
|
pszind_t x = lg_floor((psz<<1)-1);
|
||||||
pszind_t shift = (x < LG_SIZE_CLASS_GROUP + LG_PAGE) ? 0 : x -
|
pszind_t shift = (x < LG_SIZE_CLASS_GROUP + LG_PAGE) ? 0 : x -
|
||||||
@ -573,10 +573,10 @@ psz2ind(size_t psz)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
pind2sz_compute(pszind_t pind)
|
pind2sz_compute(pszind_t pind) {
|
||||||
{
|
if (unlikely(pind == NPSIZES)) {
|
||||||
if (unlikely(pind == NPSIZES))
|
|
||||||
return (LARGE_MAXCLASS + PAGE);
|
return (LARGE_MAXCLASS + PAGE);
|
||||||
|
}
|
||||||
{
|
{
|
||||||
size_t grp = pind >> LG_SIZE_CLASS_GROUP;
|
size_t grp = pind >> LG_SIZE_CLASS_GROUP;
|
||||||
size_t mod = pind & ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1);
|
size_t mod = pind & ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1);
|
||||||
@ -595,25 +595,23 @@ pind2sz_compute(pszind_t pind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
pind2sz_lookup(pszind_t pind)
|
pind2sz_lookup(pszind_t pind) {
|
||||||
{
|
|
||||||
size_t ret = (size_t)pind2sz_tab[pind];
|
size_t ret = (size_t)pind2sz_tab[pind];
|
||||||
assert(ret == pind2sz_compute(pind));
|
assert(ret == pind2sz_compute(pind));
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
pind2sz(pszind_t pind)
|
pind2sz(pszind_t pind) {
|
||||||
{
|
|
||||||
assert(pind < NPSIZES+1);
|
assert(pind < NPSIZES+1);
|
||||||
return (pind2sz_lookup(pind));
|
return (pind2sz_lookup(pind));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
psz2u(size_t psz)
|
psz2u(size_t psz) {
|
||||||
{
|
if (unlikely(psz > LARGE_MAXCLASS)) {
|
||||||
if (unlikely(psz > LARGE_MAXCLASS))
|
|
||||||
return (LARGE_MAXCLASS + PAGE);
|
return (LARGE_MAXCLASS + PAGE);
|
||||||
|
}
|
||||||
{
|
{
|
||||||
size_t x = lg_floor((psz<<1)-1);
|
size_t x = lg_floor((psz<<1)-1);
|
||||||
size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ?
|
size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_PAGE + 1) ?
|
||||||
@ -626,10 +624,10 @@ psz2u(size_t psz)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE szind_t
|
JEMALLOC_INLINE szind_t
|
||||||
size2index_compute(size_t size)
|
size2index_compute(size_t size) {
|
||||||
{
|
if (unlikely(size > LARGE_MAXCLASS)) {
|
||||||
if (unlikely(size > LARGE_MAXCLASS))
|
|
||||||
return (NSIZES);
|
return (NSIZES);
|
||||||
|
}
|
||||||
#if (NTBINS != 0)
|
#if (NTBINS != 0)
|
||||||
if (size <= (ZU(1) << LG_TINY_MAXCLASS)) {
|
if (size <= (ZU(1) << LG_TINY_MAXCLASS)) {
|
||||||
szind_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
|
szind_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
|
||||||
@ -656,8 +654,7 @@ size2index_compute(size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE szind_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
size2index_lookup(size_t size)
|
size2index_lookup(size_t size) {
|
||||||
{
|
|
||||||
assert(size <= LOOKUP_MAXCLASS);
|
assert(size <= LOOKUP_MAXCLASS);
|
||||||
{
|
{
|
||||||
szind_t ret = (size2index_tab[(size-1) >> LG_TINY_MIN]);
|
szind_t ret = (size2index_tab[(size-1) >> LG_TINY_MIN]);
|
||||||
@ -667,20 +664,20 @@ size2index_lookup(size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE szind_t
|
JEMALLOC_ALWAYS_INLINE szind_t
|
||||||
size2index(size_t size)
|
size2index(size_t size) {
|
||||||
{
|
|
||||||
assert(size > 0);
|
assert(size > 0);
|
||||||
if (likely(size <= LOOKUP_MAXCLASS))
|
if (likely(size <= LOOKUP_MAXCLASS)) {
|
||||||
return (size2index_lookup(size));
|
return (size2index_lookup(size));
|
||||||
|
}
|
||||||
return (size2index_compute(size));
|
return (size2index_compute(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
index2size_compute(szind_t index)
|
index2size_compute(szind_t index) {
|
||||||
{
|
|
||||||
#if (NTBINS > 0)
|
#if (NTBINS > 0)
|
||||||
if (index < NTBINS)
|
if (index < NTBINS) {
|
||||||
return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index));
|
return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index));
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
size_t reduced_index = index - NTBINS;
|
size_t reduced_index = index - NTBINS;
|
||||||
@ -702,25 +699,23 @@ index2size_compute(szind_t index)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
index2size_lookup(szind_t index)
|
index2size_lookup(szind_t index) {
|
||||||
{
|
|
||||||
size_t ret = (size_t)index2size_tab[index];
|
size_t ret = (size_t)index2size_tab[index];
|
||||||
assert(ret == index2size_compute(index));
|
assert(ret == index2size_compute(index));
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
index2size(szind_t index)
|
index2size(szind_t index) {
|
||||||
{
|
|
||||||
assert(index < NSIZES);
|
assert(index < NSIZES);
|
||||||
return (index2size_lookup(index));
|
return (index2size_lookup(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
s2u_compute(size_t size)
|
s2u_compute(size_t size) {
|
||||||
{
|
if (unlikely(size > LARGE_MAXCLASS)) {
|
||||||
if (unlikely(size > LARGE_MAXCLASS))
|
|
||||||
return (0);
|
return (0);
|
||||||
|
}
|
||||||
#if (NTBINS > 0)
|
#if (NTBINS > 0)
|
||||||
if (size <= (ZU(1) << LG_TINY_MAXCLASS)) {
|
if (size <= (ZU(1) << LG_TINY_MAXCLASS)) {
|
||||||
size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
|
size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
|
||||||
@ -741,8 +736,7 @@ s2u_compute(size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
s2u_lookup(size_t size)
|
s2u_lookup(size_t size) {
|
||||||
{
|
|
||||||
size_t ret = index2size_lookup(size2index_lookup(size));
|
size_t ret = index2size_lookup(size2index_lookup(size));
|
||||||
|
|
||||||
assert(ret == s2u_compute(size));
|
assert(ret == s2u_compute(size));
|
||||||
@ -754,11 +748,11 @@ s2u_lookup(size_t size)
|
|||||||
* specified size.
|
* specified size.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
s2u(size_t size)
|
s2u(size_t size) {
|
||||||
{
|
|
||||||
assert(size > 0);
|
assert(size > 0);
|
||||||
if (likely(size <= LOOKUP_MAXCLASS))
|
if (likely(size <= LOOKUP_MAXCLASS)) {
|
||||||
return (s2u_lookup(size));
|
return (s2u_lookup(size));
|
||||||
|
}
|
||||||
return (s2u_compute(size));
|
return (s2u_compute(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -767,8 +761,7 @@ s2u(size_t size)
|
|||||||
* specified size and alignment.
|
* specified size and alignment.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
sa2u(size_t size, size_t alignment)
|
sa2u(size_t size, size_t alignment) {
|
||||||
{
|
|
||||||
size_t usize;
|
size_t usize;
|
||||||
|
|
||||||
assert(alignment != 0 && ((alignment - 1) & alignment) == 0);
|
assert(alignment != 0 && ((alignment - 1) & alignment) == 0);
|
||||||
@ -790,19 +783,21 @@ sa2u(size_t size, size_t alignment)
|
|||||||
* 192 | 11000000 | 64
|
* 192 | 11000000 | 64
|
||||||
*/
|
*/
|
||||||
usize = s2u(ALIGNMENT_CEILING(size, alignment));
|
usize = s2u(ALIGNMENT_CEILING(size, alignment));
|
||||||
if (usize < LARGE_MINCLASS)
|
if (usize < LARGE_MINCLASS) {
|
||||||
return (usize);
|
return (usize);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Large size class. Beware of overflow. */
|
/* Large size class. Beware of overflow. */
|
||||||
|
|
||||||
if (unlikely(alignment > LARGE_MAXCLASS))
|
if (unlikely(alignment > LARGE_MAXCLASS)) {
|
||||||
return (0);
|
return (0);
|
||||||
|
}
|
||||||
|
|
||||||
/* Make sure result is a large size class. */
|
/* Make sure result is a large size class. */
|
||||||
if (size <= LARGE_MINCLASS)
|
if (size <= LARGE_MINCLASS) {
|
||||||
usize = LARGE_MINCLASS;
|
usize = LARGE_MINCLASS;
|
||||||
else {
|
} else {
|
||||||
usize = s2u(size);
|
usize = s2u(size);
|
||||||
if (usize < size) {
|
if (usize < size) {
|
||||||
/* size_t overflow. */
|
/* size_t overflow. */
|
||||||
@ -823,35 +818,33 @@ sa2u(size_t size, size_t alignment)
|
|||||||
|
|
||||||
/* Choose an arena based on a per-thread value. */
|
/* Choose an arena based on a per-thread value. */
|
||||||
JEMALLOC_INLINE arena_t *
|
JEMALLOC_INLINE arena_t *
|
||||||
arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal)
|
arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) {
|
||||||
{
|
|
||||||
arena_t *ret;
|
arena_t *ret;
|
||||||
|
|
||||||
if (arena != NULL)
|
if (arena != NULL) {
|
||||||
return (arena);
|
return (arena);
|
||||||
|
}
|
||||||
|
|
||||||
ret = internal ? tsd_iarena_get(tsd) : tsd_arena_get(tsd);
|
ret = internal ? tsd_iarena_get(tsd) : tsd_arena_get(tsd);
|
||||||
if (unlikely(ret == NULL))
|
if (unlikely(ret == NULL)) {
|
||||||
ret = arena_choose_hard(tsd, internal);
|
ret = arena_choose_hard(tsd, internal);
|
||||||
|
}
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE arena_t *
|
JEMALLOC_INLINE arena_t *
|
||||||
arena_choose(tsd_t *tsd, arena_t *arena)
|
arena_choose(tsd_t *tsd, arena_t *arena) {
|
||||||
{
|
|
||||||
return (arena_choose_impl(tsd, arena, false));
|
return (arena_choose_impl(tsd, arena, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE arena_t *
|
JEMALLOC_INLINE arena_t *
|
||||||
arena_ichoose(tsd_t *tsd, arena_t *arena)
|
arena_ichoose(tsd_t *tsd, arena_t *arena) {
|
||||||
{
|
|
||||||
return (arena_choose_impl(tsd, arena, true));
|
return (arena_choose_impl(tsd, arena, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE arena_tdata_t *
|
JEMALLOC_INLINE arena_tdata_t *
|
||||||
arena_tdata_get(tsd_t *tsd, unsigned ind, bool refresh_if_missing)
|
arena_tdata_get(tsd_t *tsd, unsigned ind, bool refresh_if_missing) {
|
||||||
{
|
|
||||||
arena_tdata_t *tdata;
|
arena_tdata_t *tdata;
|
||||||
arena_tdata_t *arenas_tdata = tsd_arenas_tdata_get(tsd);
|
arena_tdata_t *arenas_tdata = tsd_arenas_tdata_get(tsd);
|
||||||
|
|
||||||
@ -869,14 +862,14 @@ arena_tdata_get(tsd_t *tsd, unsigned ind, bool refresh_if_missing)
|
|||||||
}
|
}
|
||||||
|
|
||||||
tdata = &arenas_tdata[ind];
|
tdata = &arenas_tdata[ind];
|
||||||
if (likely(tdata != NULL) || !refresh_if_missing)
|
if (likely(tdata != NULL) || !refresh_if_missing) {
|
||||||
return (tdata);
|
return (tdata);
|
||||||
|
}
|
||||||
return (arena_tdata_get_hard(tsd, ind));
|
return (arena_tdata_get_hard(tsd, ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE arena_t *
|
JEMALLOC_INLINE arena_t *
|
||||||
arena_get(tsdn_t *tsdn, unsigned ind, bool init_if_missing)
|
arena_get(tsdn_t *tsdn, unsigned ind, bool init_if_missing) {
|
||||||
{
|
|
||||||
arena_t *ret;
|
arena_t *ret;
|
||||||
|
|
||||||
assert(ind <= MALLOCX_ARENA_MAX);
|
assert(ind <= MALLOCX_ARENA_MAX);
|
||||||
@ -893,13 +886,13 @@ arena_get(tsdn_t *tsdn, unsigned ind, bool init_if_missing)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE ticker_t *
|
JEMALLOC_INLINE ticker_t *
|
||||||
decay_ticker_get(tsd_t *tsd, unsigned ind)
|
decay_ticker_get(tsd_t *tsd, unsigned ind) {
|
||||||
{
|
|
||||||
arena_tdata_t *tdata;
|
arena_tdata_t *tdata;
|
||||||
|
|
||||||
tdata = arena_tdata_get(tsd, ind, true);
|
tdata = arena_tdata_get(tsd, ind, true);
|
||||||
if (unlikely(tdata == NULL))
|
if (unlikely(tdata == NULL)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
return (&tdata->decay_ticker);
|
return (&tdata->decay_ticker);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -917,8 +910,7 @@ extent_t *iealloc(tsdn_t *tsdn, const void *ptr);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
iealloc(tsdn_t *tsdn, const void *ptr)
|
iealloc(tsdn_t *tsdn, const void *ptr) {
|
||||||
{
|
|
||||||
return (extent_lookup(tsdn, ptr, true));
|
return (extent_lookup(tsdn, ptr, true));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -958,8 +950,7 @@ bool ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE arena_t *
|
JEMALLOC_ALWAYS_INLINE arena_t *
|
||||||
iaalloc(tsdn_t *tsdn, const void *ptr)
|
iaalloc(tsdn_t *tsdn, const void *ptr) {
|
||||||
{
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
return (arena_aalloc(tsdn, ptr));
|
return (arena_aalloc(tsdn, ptr));
|
||||||
@ -973,8 +964,7 @@ iaalloc(tsdn_t *tsdn, const void *ptr)
|
|||||||
* size_t sz = isalloc(tsdn, extent, ptr);
|
* size_t sz = isalloc(tsdn, extent, ptr);
|
||||||
*/
|
*/
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
|
||||||
{
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
return (arena_salloc(tsdn, extent, ptr));
|
return (arena_salloc(tsdn, extent, ptr));
|
||||||
@ -982,8 +972,7 @@ isalloc(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
|
iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
|
||||||
bool is_internal, arena_t *arena, bool slow_path)
|
bool is_internal, arena_t *arena, bool slow_path) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
@ -1000,16 +989,14 @@ iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path)
|
ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path) {
|
||||||
{
|
|
||||||
return (iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd, true),
|
return (iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd, true),
|
||||||
false, NULL, slow_path));
|
false, NULL, slow_path));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||||
tcache_t *tcache, bool is_internal, arena_t *arena)
|
tcache_t *tcache, bool is_internal, arena_t *arena) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
assert(usize != 0);
|
assert(usize != 0);
|
||||||
@ -1029,21 +1016,18 @@ ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero,
|
||||||
tcache_t *tcache, arena_t *arena)
|
tcache_t *tcache, arena_t *arena) {
|
||||||
{
|
|
||||||
return (ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena));
|
return (ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero)
|
ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) {
|
||||||
{
|
|
||||||
return (ipallocztm(tsd_tsdn(tsd), usize, alignment, zero,
|
return (ipallocztm(tsd_tsdn(tsd), usize, alignment, zero,
|
||||||
tcache_get(tsd, true), false, NULL));
|
tcache_get(tsd, true), false, NULL));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
ivsalloc(tsdn_t *tsdn, const void *ptr)
|
ivsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -1055,8 +1039,9 @@ ivsalloc(tsdn_t *tsdn, const void *ptr)
|
|||||||
* failure.
|
* failure.
|
||||||
* */
|
* */
|
||||||
extent = extent_lookup(tsdn, ptr, false);
|
extent = extent_lookup(tsdn, ptr, false);
|
||||||
if (extent == NULL)
|
if (extent == NULL) {
|
||||||
return (0);
|
return (0);
|
||||||
|
}
|
||||||
assert(extent_active_get(extent));
|
assert(extent_active_get(extent));
|
||||||
/* Only slab members should be looked up via interior pointers. */
|
/* Only slab members should be looked up via interior pointers. */
|
||||||
assert(extent_addr_get(extent) == ptr || extent_slab_get(extent));
|
assert(extent_addr_get(extent) == ptr || extent_slab_get(extent));
|
||||||
@ -1066,8 +1051,7 @@ ivsalloc(tsdn_t *tsdn, const void *ptr)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
||||||
bool is_internal, bool slow_path)
|
bool is_internal, bool slow_path) {
|
||||||
{
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(!is_internal || tcache == NULL);
|
assert(!is_internal || tcache == NULL);
|
||||||
assert(!is_internal || arena_ind_get(iaalloc(tsdn, ptr)) <
|
assert(!is_internal || arena_ind_get(iaalloc(tsdn, ptr)) <
|
||||||
@ -1081,42 +1065,43 @@ idalloctm(tsdn_t *tsdn, extent_t *extent, void *ptr, tcache_t *tcache,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
idalloc(tsd_t *tsd, extent_t *extent, void *ptr)
|
idalloc(tsd_t *tsd, extent_t *extent, void *ptr) {
|
||||||
{
|
|
||||||
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache_get(tsd, false), false,
|
idalloctm(tsd_tsdn(tsd), extent, ptr, tcache_get(tsd, false), false,
|
||||||
true);
|
true);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
isdalloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t size,
|
||||||
tcache_t *tcache, bool slow_path)
|
tcache_t *tcache, bool slow_path) {
|
||||||
{
|
|
||||||
arena_sdalloc(tsdn, extent, ptr, size, tcache, slow_path);
|
arena_sdalloc(tsdn, extent, ptr, size, tcache, slow_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
||||||
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
size_t size, size_t extra, size_t alignment, bool zero, tcache_t *tcache,
|
||||||
arena_t *arena)
|
arena_t *arena) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
size_t usize, copysize;
|
size_t usize, copysize;
|
||||||
|
|
||||||
usize = sa2u(size + extra, alignment);
|
usize = sa2u(size + extra, alignment);
|
||||||
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS))
|
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
|
p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
|
||||||
if (p == NULL) {
|
if (p == NULL) {
|
||||||
if (extra == 0)
|
if (extra == 0) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
/* Try again, without extra this time. */
|
/* Try again, without extra this time. */
|
||||||
usize = sa2u(size, alignment);
|
usize = sa2u(size, alignment);
|
||||||
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS))
|
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
|
p = ipalloct(tsdn, usize, alignment, zero, tcache, arena);
|
||||||
if (p == NULL)
|
if (p == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
/*
|
/*
|
||||||
* Copy at most size bytes (not size+extra), since the caller has no
|
* Copy at most size bytes (not size+extra), since the caller has no
|
||||||
* expectation that the extra bytes will be reliably preserved.
|
* expectation that the extra bytes will be reliably preserved.
|
||||||
@ -1129,8 +1114,7 @@ iralloct_realign(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||||
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena)
|
size_t alignment, bool zero, tcache_t *tcache, arena_t *arena) {
|
||||||
{
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
|
|
||||||
@ -1150,16 +1134,14 @@ iralloct(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
iralloc(tsd_t *tsd, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||||
size_t alignment, bool zero)
|
size_t alignment, bool zero) {
|
||||||
{
|
|
||||||
return (iralloct(tsd_tsdn(tsd), extent, ptr, oldsize, size, alignment,
|
return (iralloct(tsd_tsdn(tsd), extent, ptr, oldsize, size, alignment,
|
||||||
zero, tcache_get(tsd, true), NULL));
|
zero, tcache_get(tsd, true), NULL));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE bool
|
JEMALLOC_ALWAYS_INLINE bool
|
||||||
ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
ixalloc(tsdn_t *tsdn, extent_t *extent, void *ptr, size_t oldsize, size_t size,
|
||||||
size_t extra, size_t alignment, bool zero)
|
size_t extra, size_t alignment, bool zero) {
|
||||||
{
|
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
|
|
||||||
|
@ -61,8 +61,7 @@ typedef intptr_t ssize_t;
|
|||||||
# pragma warning(disable: 4996)
|
# pragma warning(disable: 4996)
|
||||||
#if _MSC_VER < 1800
|
#if _MSC_VER < 1800
|
||||||
static int
|
static int
|
||||||
isblank(int c)
|
isblank(int c) {
|
||||||
{
|
|
||||||
return (c == '\t' || c == ' ');
|
return (c == '\t' || c == ' ');
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -10,8 +10,7 @@ void malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_MUTEX_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_MUTEX_C_))
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
|
||||||
if (isthreaded) {
|
if (isthreaded) {
|
||||||
witness_assert_not_owner(tsdn, &mutex->witness);
|
witness_assert_not_owner(tsdn, &mutex->witness);
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
@ -32,8 +31,7 @@ malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
|
||||||
if (isthreaded) {
|
if (isthreaded) {
|
||||||
witness_unlock(tsdn, &mutex->witness);
|
witness_unlock(tsdn, &mutex->witness);
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
@ -53,18 +51,18 @@ malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
if (isthreaded) {
|
||||||
if (isthreaded)
|
|
||||||
witness_assert_owner(tsdn, &mutex->witness);
|
witness_assert_owner(tsdn, &mutex->witness);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
if (isthreaded) {
|
||||||
if (isthreaded)
|
|
||||||
witness_assert_not_owner(tsdn, &mutex->witness);
|
witness_assert_not_owner(tsdn, &mutex->witness);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* JEMALLOC_INTERNAL_MUTEX_INLINES_H */
|
#endif /* JEMALLOC_INTERNAL_MUTEX_INLINES_H */
|
||||||
|
@ -58,17 +58,18 @@ struct { \
|
|||||||
phn_prev_set(a_type, a_field, a_phn1, a_phn0); \
|
phn_prev_set(a_type, a_field, a_phn1, a_phn0); \
|
||||||
phn0child = phn_lchild_get(a_type, a_field, a_phn0); \
|
phn0child = phn_lchild_get(a_type, a_field, a_phn0); \
|
||||||
phn_next_set(a_type, a_field, a_phn1, phn0child); \
|
phn_next_set(a_type, a_field, a_phn1, phn0child); \
|
||||||
if (phn0child != NULL) \
|
if (phn0child != NULL) { \
|
||||||
phn_prev_set(a_type, a_field, phn0child, a_phn1); \
|
phn_prev_set(a_type, a_field, phn0child, a_phn1); \
|
||||||
|
} \
|
||||||
phn_lchild_set(a_type, a_field, a_phn0, a_phn1); \
|
phn_lchild_set(a_type, a_field, a_phn0, a_phn1); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define phn_merge(a_type, a_field, a_phn0, a_phn1, a_cmp, r_phn) do { \
|
#define phn_merge(a_type, a_field, a_phn0, a_phn1, a_cmp, r_phn) do { \
|
||||||
if (a_phn0 == NULL) \
|
if (a_phn0 == NULL) { \
|
||||||
r_phn = a_phn1; \
|
r_phn = a_phn1; \
|
||||||
else if (a_phn1 == NULL) \
|
} else if (a_phn1 == NULL) { \
|
||||||
r_phn = a_phn0; \
|
r_phn = a_phn0; \
|
||||||
else if (a_cmp(a_phn0, a_phn1) < 0) { \
|
} else if (a_cmp(a_phn0, a_phn1) < 0) { \
|
||||||
phn_merge_ordered(a_type, a_field, a_phn0, a_phn1, \
|
phn_merge_ordered(a_type, a_field, a_phn0, a_phn1, \
|
||||||
a_cmp); \
|
a_cmp); \
|
||||||
r_phn = a_phn0; \
|
r_phn = a_phn0; \
|
||||||
@ -95,8 +96,9 @@ struct { \
|
|||||||
*/ \
|
*/ \
|
||||||
if (phn1 != NULL) { \
|
if (phn1 != NULL) { \
|
||||||
a_type *phnrest = phn_next_get(a_type, a_field, phn1); \
|
a_type *phnrest = phn_next_get(a_type, a_field, phn1); \
|
||||||
if (phnrest != NULL) \
|
if (phnrest != NULL) { \
|
||||||
phn_prev_set(a_type, a_field, phnrest, NULL); \
|
phn_prev_set(a_type, a_field, phnrest, NULL); \
|
||||||
|
} \
|
||||||
phn_prev_set(a_type, a_field, phn0, NULL); \
|
phn_prev_set(a_type, a_field, phn0, NULL); \
|
||||||
phn_next_set(a_type, a_field, phn0, NULL); \
|
phn_next_set(a_type, a_field, phn0, NULL); \
|
||||||
phn_prev_set(a_type, a_field, phn1, NULL); \
|
phn_prev_set(a_type, a_field, phn1, NULL); \
|
||||||
@ -150,8 +152,9 @@ struct { \
|
|||||||
NULL); \
|
NULL); \
|
||||||
phn_merge(a_type, a_field, phn0, phn1, \
|
phn_merge(a_type, a_field, phn0, phn1, \
|
||||||
a_cmp, phn0); \
|
a_cmp, phn0); \
|
||||||
if (head == NULL) \
|
if (head == NULL) { \
|
||||||
break; \
|
break; \
|
||||||
|
} \
|
||||||
phn_next_set(a_type, a_field, tail, \
|
phn_next_set(a_type, a_field, tail, \
|
||||||
phn0); \
|
phn0); \
|
||||||
tail = phn0; \
|
tail = phn0; \
|
||||||
@ -179,9 +182,9 @@ struct { \
|
|||||||
|
|
||||||
#define ph_merge_children(a_type, a_field, a_phn, a_cmp, r_phn) do { \
|
#define ph_merge_children(a_type, a_field, a_phn, a_cmp, r_phn) do { \
|
||||||
a_type *lchild = phn_lchild_get(a_type, a_field, a_phn); \
|
a_type *lchild = phn_lchild_get(a_type, a_field, a_phn); \
|
||||||
if (lchild == NULL) \
|
if (lchild == NULL) { \
|
||||||
r_phn = NULL; \
|
r_phn = NULL; \
|
||||||
else { \
|
} else { \
|
||||||
ph_merge_siblings(a_type, a_field, lchild, a_cmp, \
|
ph_merge_siblings(a_type, a_field, lchild, a_cmp, \
|
||||||
r_phn); \
|
r_phn); \
|
||||||
} \
|
} \
|
||||||
@ -205,26 +208,23 @@ a_attr void a_prefix##remove(a_ph_type *ph, a_type *phn);
|
|||||||
*/
|
*/
|
||||||
#define ph_gen(a_attr, a_prefix, a_ph_type, a_type, a_field, a_cmp) \
|
#define ph_gen(a_attr, a_prefix, a_ph_type, a_type, a_field, a_cmp) \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_prefix##new(a_ph_type *ph) \
|
a_prefix##new(a_ph_type *ph) { \
|
||||||
{ \
|
|
||||||
memset(ph, 0, sizeof(ph(a_type))); \
|
memset(ph, 0, sizeof(ph(a_type))); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_prefix##empty(a_ph_type *ph) \
|
a_prefix##empty(a_ph_type *ph) { \
|
||||||
{ \
|
|
||||||
return (ph->ph_root == NULL); \
|
return (ph->ph_root == NULL); \
|
||||||
} \
|
} \
|
||||||
a_attr a_type * \
|
a_attr a_type * \
|
||||||
a_prefix##first(a_ph_type *ph) \
|
a_prefix##first(a_ph_type *ph) { \
|
||||||
{ \
|
if (ph->ph_root == NULL) { \
|
||||||
if (ph->ph_root == NULL) \
|
|
||||||
return (NULL); \
|
return (NULL); \
|
||||||
|
} \
|
||||||
ph_merge_aux(a_type, a_field, ph, a_cmp); \
|
ph_merge_aux(a_type, a_field, ph, a_cmp); \
|
||||||
return (ph->ph_root); \
|
return (ph->ph_root); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_prefix##insert(a_ph_type *ph, a_type *phn) \
|
a_prefix##insert(a_ph_type *ph, a_type *phn) { \
|
||||||
{ \
|
|
||||||
memset(&phn->a_field, 0, sizeof(phn(a_type))); \
|
memset(&phn->a_field, 0, sizeof(phn(a_type))); \
|
||||||
\
|
\
|
||||||
/* \
|
/* \
|
||||||
@ -235,9 +235,9 @@ a_prefix##insert(a_ph_type *ph, a_type *phn) \
|
|||||||
* constant-time, whereas eager merging would make insert \
|
* constant-time, whereas eager merging would make insert \
|
||||||
* O(log n). \
|
* O(log n). \
|
||||||
*/ \
|
*/ \
|
||||||
if (ph->ph_root == NULL) \
|
if (ph->ph_root == NULL) { \
|
||||||
ph->ph_root = phn; \
|
ph->ph_root = phn; \
|
||||||
else { \
|
} else { \
|
||||||
phn_next_set(a_type, a_field, phn, phn_next_get(a_type, \
|
phn_next_set(a_type, a_field, phn, phn_next_get(a_type, \
|
||||||
a_field, ph->ph_root)); \
|
a_field, ph->ph_root)); \
|
||||||
if (phn_next_get(a_type, a_field, ph->ph_root) != \
|
if (phn_next_get(a_type, a_field, ph->ph_root) != \
|
||||||
@ -251,12 +251,12 @@ a_prefix##insert(a_ph_type *ph, a_type *phn) \
|
|||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
a_attr a_type * \
|
a_attr a_type * \
|
||||||
a_prefix##remove_first(a_ph_type *ph) \
|
a_prefix##remove_first(a_ph_type *ph) { \
|
||||||
{ \
|
|
||||||
a_type *ret; \
|
a_type *ret; \
|
||||||
\
|
\
|
||||||
if (ph->ph_root == NULL) \
|
if (ph->ph_root == NULL) { \
|
||||||
return (NULL); \
|
return (NULL); \
|
||||||
|
} \
|
||||||
ph_merge_aux(a_type, a_field, ph, a_cmp); \
|
ph_merge_aux(a_type, a_field, ph, a_cmp); \
|
||||||
\
|
\
|
||||||
ret = ph->ph_root; \
|
ret = ph->ph_root; \
|
||||||
@ -267,8 +267,7 @@ a_prefix##remove_first(a_ph_type *ph) \
|
|||||||
return (ret); \
|
return (ret); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_prefix##remove(a_ph_type *ph, a_type *phn) \
|
a_prefix##remove(a_ph_type *ph, a_type *phn) { \
|
||||||
{ \
|
|
||||||
a_type *replace, *parent; \
|
a_type *replace, *parent; \
|
||||||
\
|
\
|
||||||
/* \
|
/* \
|
||||||
@ -286,9 +285,10 @@ a_prefix##remove(a_ph_type *ph, a_type *phn) \
|
|||||||
\
|
\
|
||||||
/* Get parent (if phn is leftmost child) before mutating. */ \
|
/* Get parent (if phn is leftmost child) before mutating. */ \
|
||||||
if ((parent = phn_prev_get(a_type, a_field, phn)) != NULL) { \
|
if ((parent = phn_prev_get(a_type, a_field, phn)) != NULL) { \
|
||||||
if (phn_lchild_get(a_type, a_field, parent) != phn) \
|
if (phn_lchild_get(a_type, a_field, parent) != phn) { \
|
||||||
parent = NULL; \
|
parent = NULL; \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
/* Find a possible replacement node, and link to parent. */ \
|
/* Find a possible replacement node, and link to parent. */ \
|
||||||
ph_merge_children(a_type, a_field, phn, a_cmp, replace); \
|
ph_merge_children(a_type, a_field, phn, a_cmp, replace); \
|
||||||
/* Set next/prev for sibling linked list. */ \
|
/* Set next/prev for sibling linked list. */ \
|
||||||
|
@ -18,20 +18,17 @@ size_t prng_range_zu(size_t *state, size_t range, bool atomic);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PRNG_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PRNG_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE uint32_t
|
JEMALLOC_ALWAYS_INLINE uint32_t
|
||||||
prng_state_next_u32(uint32_t state)
|
prng_state_next_u32(uint32_t state) {
|
||||||
{
|
|
||||||
return ((state * PRNG_A_32) + PRNG_C_32);
|
return ((state * PRNG_A_32) + PRNG_C_32);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE uint64_t
|
JEMALLOC_ALWAYS_INLINE uint64_t
|
||||||
prng_state_next_u64(uint64_t state)
|
prng_state_next_u64(uint64_t state) {
|
||||||
{
|
|
||||||
return ((state * PRNG_A_64) + PRNG_C_64);
|
return ((state * PRNG_A_64) + PRNG_C_64);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
prng_state_next_zu(size_t state)
|
prng_state_next_zu(size_t state) {
|
||||||
{
|
|
||||||
#if LG_SIZEOF_PTR == 2
|
#if LG_SIZEOF_PTR == 2
|
||||||
return ((state * PRNG_A_32) + PRNG_C_32);
|
return ((state * PRNG_A_32) + PRNG_C_32);
|
||||||
#elif LG_SIZEOF_PTR == 3
|
#elif LG_SIZEOF_PTR == 3
|
||||||
@ -42,8 +39,7 @@ prng_state_next_zu(size_t state)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE uint32_t
|
JEMALLOC_ALWAYS_INLINE uint32_t
|
||||||
prng_lg_range_u32(uint32_t *state, unsigned lg_range, bool atomic)
|
prng_lg_range_u32(uint32_t *state, unsigned lg_range, bool atomic) {
|
||||||
{
|
|
||||||
uint32_t ret, state1;
|
uint32_t ret, state1;
|
||||||
|
|
||||||
assert(lg_range > 0);
|
assert(lg_range > 0);
|
||||||
@ -67,8 +63,7 @@ prng_lg_range_u32(uint32_t *state, unsigned lg_range, bool atomic)
|
|||||||
|
|
||||||
/* 64-bit atomic operations cannot be supported on all relevant platforms. */
|
/* 64-bit atomic operations cannot be supported on all relevant platforms. */
|
||||||
JEMALLOC_ALWAYS_INLINE uint64_t
|
JEMALLOC_ALWAYS_INLINE uint64_t
|
||||||
prng_lg_range_u64(uint64_t *state, unsigned lg_range)
|
prng_lg_range_u64(uint64_t *state, unsigned lg_range) {
|
||||||
{
|
|
||||||
uint64_t ret, state1;
|
uint64_t ret, state1;
|
||||||
|
|
||||||
assert(lg_range > 0);
|
assert(lg_range > 0);
|
||||||
@ -82,8 +77,7 @@ prng_lg_range_u64(uint64_t *state, unsigned lg_range)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
prng_lg_range_zu(size_t *state, unsigned lg_range, bool atomic)
|
prng_lg_range_zu(size_t *state, unsigned lg_range, bool atomic) {
|
||||||
{
|
|
||||||
size_t ret, state1;
|
size_t ret, state1;
|
||||||
|
|
||||||
assert(lg_range > 0);
|
assert(lg_range > 0);
|
||||||
@ -106,8 +100,7 @@ prng_lg_range_zu(size_t *state, unsigned lg_range, bool atomic)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE uint32_t
|
JEMALLOC_ALWAYS_INLINE uint32_t
|
||||||
prng_range_u32(uint32_t *state, uint32_t range, bool atomic)
|
prng_range_u32(uint32_t *state, uint32_t range, bool atomic) {
|
||||||
{
|
|
||||||
uint32_t ret;
|
uint32_t ret;
|
||||||
unsigned lg_range;
|
unsigned lg_range;
|
||||||
|
|
||||||
@ -125,8 +118,7 @@ prng_range_u32(uint32_t *state, uint32_t range, bool atomic)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE uint64_t
|
JEMALLOC_ALWAYS_INLINE uint64_t
|
||||||
prng_range_u64(uint64_t *state, uint64_t range)
|
prng_range_u64(uint64_t *state, uint64_t range) {
|
||||||
{
|
|
||||||
uint64_t ret;
|
uint64_t ret;
|
||||||
unsigned lg_range;
|
unsigned lg_range;
|
||||||
|
|
||||||
@ -144,8 +136,7 @@ prng_range_u64(uint64_t *state, uint64_t range)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE size_t
|
JEMALLOC_ALWAYS_INLINE size_t
|
||||||
prng_range_zu(size_t *state, size_t range, bool atomic)
|
prng_range_zu(size_t *state, size_t range, bool atomic) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
unsigned lg_range;
|
unsigned lg_range;
|
||||||
|
|
||||||
|
@ -27,8 +27,7 @@ void prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr,
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PROF_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_PROF_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE bool
|
JEMALLOC_ALWAYS_INLINE bool
|
||||||
prof_active_get_unlocked(void)
|
prof_active_get_unlocked(void) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* Even if opt_prof is true, sampling can be temporarily disabled by
|
* Even if opt_prof is true, sampling can be temporarily disabled by
|
||||||
* setting prof_active to false. No locking is used when reading
|
* setting prof_active to false. No locking is used when reading
|
||||||
@ -39,8 +38,7 @@ prof_active_get_unlocked(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE bool
|
JEMALLOC_ALWAYS_INLINE bool
|
||||||
prof_gdump_get_unlocked(void)
|
prof_gdump_get_unlocked(void) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* No locking is used when reading prof_gdump_val in the fast path, so
|
* No locking is used when reading prof_gdump_val in the fast path, so
|
||||||
* there are no guarantees regarding how long it will take for all
|
* there are no guarantees regarding how long it will take for all
|
||||||
@ -50,8 +48,7 @@ prof_gdump_get_unlocked(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE prof_tdata_t *
|
JEMALLOC_ALWAYS_INLINE prof_tdata_t *
|
||||||
prof_tdata_get(tsd_t *tsd, bool create)
|
prof_tdata_get(tsd_t *tsd, bool create) {
|
||||||
{
|
|
||||||
prof_tdata_t *tdata;
|
prof_tdata_t *tdata;
|
||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
@ -74,8 +71,7 @@ prof_tdata_get(tsd_t *tsd, bool create)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||||
prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
@ -84,8 +80,7 @@ prof_tctx_get(tsdn_t *tsdn, const extent_t *extent, const void *ptr)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
||||||
prof_tctx_t *tctx)
|
prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
@ -94,8 +89,7 @@ prof_tctx_set(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
||||||
prof_tctx_t *tctx)
|
prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
|
|
||||||
@ -104,37 +98,40 @@ prof_tctx_reset(tsdn_t *tsdn, extent_t *extent, const void *ptr,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE bool
|
JEMALLOC_ALWAYS_INLINE bool
|
||||||
prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
|
prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update,
|
||||||
prof_tdata_t **tdata_out)
|
prof_tdata_t **tdata_out) {
|
||||||
{
|
|
||||||
prof_tdata_t *tdata;
|
prof_tdata_t *tdata;
|
||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
tdata = prof_tdata_get(tsd, true);
|
tdata = prof_tdata_get(tsd, true);
|
||||||
if (unlikely((uintptr_t)tdata <= (uintptr_t)PROF_TDATA_STATE_MAX))
|
if (unlikely((uintptr_t)tdata <= (uintptr_t)PROF_TDATA_STATE_MAX)) {
|
||||||
tdata = NULL;
|
tdata = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
if (tdata_out != NULL)
|
if (tdata_out != NULL) {
|
||||||
*tdata_out = tdata;
|
*tdata_out = tdata;
|
||||||
|
}
|
||||||
|
|
||||||
if (unlikely(tdata == NULL))
|
if (unlikely(tdata == NULL)) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
if (likely(tdata->bytes_until_sample >= usize)) {
|
if (likely(tdata->bytes_until_sample >= usize)) {
|
||||||
if (update)
|
if (update) {
|
||||||
tdata->bytes_until_sample -= usize;
|
tdata->bytes_until_sample -= usize;
|
||||||
|
}
|
||||||
return (true);
|
return (true);
|
||||||
} else {
|
} else {
|
||||||
/* Compute new sample threshold. */
|
/* Compute new sample threshold. */
|
||||||
if (update)
|
if (update) {
|
||||||
prof_sample_threshold_update(tdata);
|
prof_sample_threshold_update(tdata);
|
||||||
|
}
|
||||||
return (!tdata->active);
|
return (!tdata->active);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
JEMALLOC_ALWAYS_INLINE prof_tctx_t *
|
||||||
prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update)
|
prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) {
|
||||||
{
|
|
||||||
prof_tctx_t *ret;
|
prof_tctx_t *ret;
|
||||||
prof_tdata_t *tdata;
|
prof_tdata_t *tdata;
|
||||||
prof_bt_t bt;
|
prof_bt_t bt;
|
||||||
@ -142,9 +139,9 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update)
|
|||||||
assert(usize == s2u(usize));
|
assert(usize == s2u(usize));
|
||||||
|
|
||||||
if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update,
|
if (!prof_active || likely(prof_sample_accum_update(tsd, usize, update,
|
||||||
&tdata)))
|
&tdata))) {
|
||||||
ret = (prof_tctx_t *)(uintptr_t)1U;
|
ret = (prof_tctx_t *)(uintptr_t)1U;
|
||||||
else {
|
} else {
|
||||||
bt_init(&bt, tdata->vec);
|
bt_init(&bt, tdata->vec);
|
||||||
prof_backtrace(&bt);
|
prof_backtrace(&bt);
|
||||||
ret = prof_lookup(tsd, &bt);
|
ret = prof_lookup(tsd, &bt);
|
||||||
@ -155,15 +152,14 @@ prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
||||||
prof_tctx_t *tctx)
|
prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(ptr != NULL);
|
assert(ptr != NULL);
|
||||||
assert(usize == isalloc(tsdn, extent, ptr));
|
assert(usize == isalloc(tsdn, extent, ptr));
|
||||||
|
|
||||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
|
||||||
prof_malloc_sample_object(tsdn, extent, ptr, usize, tctx);
|
prof_malloc_sample_object(tsdn, extent, ptr, usize, tctx);
|
||||||
else {
|
} else {
|
||||||
prof_tctx_set(tsdn, extent, ptr, usize,
|
prof_tctx_set(tsdn, extent, ptr, usize,
|
||||||
(prof_tctx_t *)(uintptr_t)1U);
|
(prof_tctx_t *)(uintptr_t)1U);
|
||||||
}
|
}
|
||||||
@ -172,8 +168,7 @@ prof_malloc(tsdn_t *tsdn, extent_t *extent, const void *ptr, size_t usize,
|
|||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
|
prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
|
||||||
prof_tctx_t *tctx, bool prof_active, bool updated, extent_t *old_extent,
|
prof_tctx_t *tctx, bool prof_active, bool updated, extent_t *old_extent,
|
||||||
const void *old_ptr, size_t old_usize, prof_tctx_t *old_tctx)
|
const void *old_ptr, size_t old_usize, prof_tctx_t *old_tctx) {
|
||||||
{
|
|
||||||
bool sampled, old_sampled, moved;
|
bool sampled, old_sampled, moved;
|
||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
@ -230,16 +225,16 @@ prof_realloc(tsd_t *tsd, extent_t *extent, const void *ptr, size_t usize,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr, size_t usize)
|
prof_free(tsd_t *tsd, const extent_t *extent, const void *ptr, size_t usize) {
|
||||||
{
|
|
||||||
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
|
prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), extent, ptr);
|
||||||
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr));
|
assert(usize == isalloc(tsd_tsdn(tsd), extent, ptr));
|
||||||
|
|
||||||
if (unlikely((uintptr_t)tctx > (uintptr_t)1U))
|
if (unlikely((uintptr_t)tctx > (uintptr_t)1U)) {
|
||||||
prof_free_sampled_object(tsd, usize, tctx);
|
prof_free_sampled_object(tsd, usize, tctx);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* JEMALLOC_INTERNAL_PROF_INLINES_H */
|
#endif /* JEMALLOC_INTERNAL_PROF_INLINES_H */
|
||||||
|
@ -25,9 +25,7 @@ struct { \
|
|||||||
(a_qrelm)->a_field.qre_prev = (a_qr); \
|
(a_qrelm)->a_field.qre_prev = (a_qr); \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define qr_after_insert(a_qrelm, a_qr, a_field) \
|
#define qr_after_insert(a_qrelm, a_qr, a_field) do { \
|
||||||
do \
|
|
||||||
{ \
|
|
||||||
(a_qr)->a_field.qre_next = (a_qrelm)->a_field.qre_next; \
|
(a_qr)->a_field.qre_next = (a_qrelm)->a_field.qre_next; \
|
||||||
(a_qr)->a_field.qre_prev = (a_qrelm); \
|
(a_qr)->a_field.qre_prev = (a_qrelm); \
|
||||||
(a_qr)->a_field.qre_next->a_field.qre_prev = (a_qr); \
|
(a_qr)->a_field.qre_next->a_field.qre_prev = (a_qr); \
|
||||||
|
@ -550,8 +550,7 @@ a_prefix##remove(a_rbt_type *rbtree, a_type *node) { \
|
|||||||
/* Find node's successor, in preparation for swap. */ \
|
/* Find node's successor, in preparation for swap. */ \
|
||||||
pathp->cmp = 1; \
|
pathp->cmp = 1; \
|
||||||
nodep = pathp; \
|
nodep = pathp; \
|
||||||
for (pathp++; pathp->node != NULL; \
|
for (pathp++; pathp->node != NULL; pathp++) { \
|
||||||
pathp++) { \
|
|
||||||
pathp->cmp = -1; \
|
pathp->cmp = -1; \
|
||||||
pathp[1].node = rbtn_left_get(a_type, a_field, \
|
pathp[1].node = rbtn_left_get(a_type, a_field, \
|
||||||
pathp->node); \
|
pathp->node); \
|
||||||
|
@ -37,12 +37,12 @@ void rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_RTREE_C_))
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
rtree_start_level(const rtree_t *rtree, uintptr_t key)
|
rtree_start_level(const rtree_t *rtree, uintptr_t key) {
|
||||||
{
|
|
||||||
unsigned start_level;
|
unsigned start_level;
|
||||||
|
|
||||||
if (unlikely(key == 0))
|
if (unlikely(key == 0)) {
|
||||||
return (rtree->height - 1);
|
return (rtree->height - 1);
|
||||||
|
}
|
||||||
|
|
||||||
start_level = rtree->start_level[(lg_floor(key) + 1) >>
|
start_level = rtree->start_level[(lg_floor(key) + 1) >>
|
||||||
LG_RTREE_BITS_PER_LEVEL];
|
LG_RTREE_BITS_PER_LEVEL];
|
||||||
@ -52,8 +52,7 @@ rtree_start_level(const rtree_t *rtree, uintptr_t key)
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
rtree_ctx_start_level(const rtree_t *rtree, const rtree_ctx_t *rtree_ctx,
|
rtree_ctx_start_level(const rtree_t *rtree, const rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key)
|
uintptr_t key) {
|
||||||
{
|
|
||||||
unsigned start_level;
|
unsigned start_level;
|
||||||
uintptr_t key_diff;
|
uintptr_t key_diff;
|
||||||
|
|
||||||
@ -72,48 +71,45 @@ rtree_ctx_start_level(const rtree_t *rtree, const rtree_ctx_t *rtree_ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE uintptr_t
|
JEMALLOC_ALWAYS_INLINE uintptr_t
|
||||||
rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level)
|
rtree_subkey(rtree_t *rtree, uintptr_t key, unsigned level) {
|
||||||
{
|
|
||||||
return ((key >> ((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
return ((key >> ((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
||||||
rtree->levels[level].cumbits)) & ((ZU(1) <<
|
rtree->levels[level].cumbits)) & ((ZU(1) <<
|
||||||
rtree->levels[level].bits) - 1));
|
rtree->levels[level].bits) - 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE bool
|
JEMALLOC_ALWAYS_INLINE bool
|
||||||
rtree_node_valid(rtree_elm_t *node)
|
rtree_node_valid(rtree_elm_t *node) {
|
||||||
{
|
|
||||||
return ((uintptr_t)node != (uintptr_t)0);
|
return ((uintptr_t)node != (uintptr_t)0);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_child_tryread(rtree_elm_t *elm, bool dependent)
|
rtree_child_tryread(rtree_elm_t *elm, bool dependent) {
|
||||||
{
|
|
||||||
rtree_elm_t *child;
|
rtree_elm_t *child;
|
||||||
|
|
||||||
/* Double-checked read (first read may be stale). */
|
/* Double-checked read (first read may be stale). */
|
||||||
child = elm->child;
|
child = elm->child;
|
||||||
if (!dependent && !rtree_node_valid(child))
|
if (!dependent && !rtree_node_valid(child)) {
|
||||||
child = (rtree_elm_t *)atomic_read_p(&elm->pun);
|
child = (rtree_elm_t *)atomic_read_p(&elm->pun);
|
||||||
|
}
|
||||||
assert(!dependent || child != NULL);
|
assert(!dependent || child != NULL);
|
||||||
return (child);
|
return (child);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
rtree_child_read(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm, unsigned level,
|
||||||
bool dependent)
|
bool dependent) {
|
||||||
{
|
|
||||||
rtree_elm_t *child;
|
rtree_elm_t *child;
|
||||||
|
|
||||||
child = rtree_child_tryread(elm, dependent);
|
child = rtree_child_tryread(elm, dependent);
|
||||||
if (!dependent && unlikely(!rtree_node_valid(child)))
|
if (!dependent && unlikely(!rtree_node_valid(child))) {
|
||||||
child = rtree_child_read_hard(tsdn, rtree, elm, level);
|
child = rtree_child_read_hard(tsdn, rtree, elm, level);
|
||||||
|
}
|
||||||
assert(!dependent || child != NULL);
|
assert(!dependent || child != NULL);
|
||||||
return (child);
|
return (child);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
rtree_elm_read(rtree_elm_t *elm, bool dependent)
|
rtree_elm_read(rtree_elm_t *elm, bool dependent) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
if (dependent) {
|
if (dependent) {
|
||||||
@ -140,14 +136,12 @@ rtree_elm_read(rtree_elm_t *elm, bool dependent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_elm_write(rtree_elm_t *elm, const extent_t *extent)
|
rtree_elm_write(rtree_elm_t *elm, const extent_t *extent) {
|
||||||
{
|
|
||||||
atomic_write_p(&elm->pun, extent);
|
atomic_write_p(&elm->pun, extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent)
|
rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent) {
|
||||||
{
|
|
||||||
rtree_elm_t *subtree;
|
rtree_elm_t *subtree;
|
||||||
|
|
||||||
/* Double-checked read (first read may be stale). */
|
/* Double-checked read (first read may be stale). */
|
||||||
@ -161,21 +155,21 @@ rtree_subtree_tryread(rtree_t *rtree, unsigned level, bool dependent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree, unsigned level, bool dependent)
|
rtree_subtree_read(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
||||||
{
|
bool dependent) {
|
||||||
rtree_elm_t *subtree;
|
rtree_elm_t *subtree;
|
||||||
|
|
||||||
subtree = rtree_subtree_tryread(rtree, level, dependent);
|
subtree = rtree_subtree_tryread(rtree, level, dependent);
|
||||||
if (!dependent && unlikely(!rtree_node_valid(subtree)))
|
if (!dependent && unlikely(!rtree_node_valid(subtree))) {
|
||||||
subtree = rtree_subtree_read_hard(tsdn, rtree, level);
|
subtree = rtree_subtree_read_hard(tsdn, rtree, level);
|
||||||
|
}
|
||||||
assert(!dependent || subtree != NULL);
|
assert(!dependent || subtree != NULL);
|
||||||
return (subtree);
|
return (subtree);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
JEMALLOC_ALWAYS_INLINE rtree_elm_t *
|
||||||
rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, bool dependent, bool init_missing)
|
uintptr_t key, bool dependent, bool init_missing) {
|
||||||
{
|
|
||||||
uintptr_t subkey;
|
uintptr_t subkey;
|
||||||
unsigned start_level;
|
unsigned start_level;
|
||||||
rtree_elm_t *node;
|
rtree_elm_t *node;
|
||||||
@ -184,9 +178,9 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
|
|
||||||
if (dependent || init_missing) {
|
if (dependent || init_missing) {
|
||||||
if (likely(rtree_ctx->valid)) {
|
if (likely(rtree_ctx->valid)) {
|
||||||
if (key == rtree_ctx->key)
|
if (key == rtree_ctx->key) {
|
||||||
return (rtree_ctx->elms[rtree->height]);
|
return (rtree_ctx->elms[rtree->height]);
|
||||||
else {
|
} else {
|
||||||
unsigned no_ctx_start_level =
|
unsigned no_ctx_start_level =
|
||||||
rtree_start_level(rtree, key);
|
rtree_start_level(rtree, key);
|
||||||
unsigned ctx_start_level;
|
unsigned ctx_start_level;
|
||||||
@ -237,8 +231,9 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
case level: \
|
case level: \
|
||||||
assert(level < (RTREE_HEIGHT_MAX-1)); \
|
assert(level < (RTREE_HEIGHT_MAX-1)); \
|
||||||
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
||||||
if (init_missing) \
|
if (init_missing) { \
|
||||||
rtree_ctx->valid = false; \
|
rtree_ctx->valid = false; \
|
||||||
|
} \
|
||||||
return (NULL); \
|
return (NULL); \
|
||||||
} \
|
} \
|
||||||
subkey = rtree_subkey(rtree, key, level - \
|
subkey = rtree_subkey(rtree, key, level - \
|
||||||
@ -255,8 +250,9 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
case level: \
|
case level: \
|
||||||
assert(level == (RTREE_HEIGHT_MAX-1)); \
|
assert(level == (RTREE_HEIGHT_MAX-1)); \
|
||||||
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
if (!dependent && unlikely(!rtree_node_valid(node))) { \
|
||||||
if (init_missing) \
|
if (init_missing) { \
|
||||||
rtree_ctx->valid = false; \
|
rtree_ctx->valid = false; \
|
||||||
|
} \
|
||||||
return (NULL); \
|
return (NULL); \
|
||||||
} \
|
} \
|
||||||
subkey = rtree_subkey(rtree, key, level - \
|
subkey = rtree_subkey(rtree, key, level - \
|
||||||
@ -330,16 +326,16 @@ rtree_elm_lookup(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
||||||
const extent_t *extent)
|
const extent_t *extent) {
|
||||||
{
|
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
assert(extent != NULL); /* Use rtree_clear() for this case. */
|
assert(extent != NULL); /* Use rtree_clear() for this case. */
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
|
|
||||||
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, false, true);
|
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, false, true);
|
||||||
if (elm == NULL)
|
if (elm == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
assert(rtree_elm_read(elm, false) == NULL);
|
assert(rtree_elm_read(elm, false) == NULL);
|
||||||
rtree_elm_write(elm, extent);
|
rtree_elm_write(elm, extent);
|
||||||
|
|
||||||
@ -348,27 +344,27 @@ rtree_write(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE extent_t *
|
JEMALLOC_ALWAYS_INLINE extent_t *
|
||||||
rtree_read(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
rtree_read(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key,
|
||||||
bool dependent)
|
bool dependent) {
|
||||||
{
|
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, dependent, false);
|
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, dependent, false);
|
||||||
if (elm == NULL)
|
if (elm == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
return (rtree_elm_read(elm, dependent));
|
return (rtree_elm_read(elm, dependent));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE rtree_elm_t *
|
JEMALLOC_INLINE rtree_elm_t *
|
||||||
rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
uintptr_t key, bool dependent, bool init_missing)
|
uintptr_t key, bool dependent, bool init_missing) {
|
||||||
{
|
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, dependent,
|
elm = rtree_elm_lookup(tsdn, rtree, rtree_ctx, key, dependent,
|
||||||
init_missing);
|
init_missing);
|
||||||
if (!dependent && elm == NULL)
|
if (!dependent && elm == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
{
|
{
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
void *s;
|
void *s;
|
||||||
@ -380,52 +376,53 @@ rtree_elm_acquire(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
|||||||
} while (atomic_cas_p(&elm->pun, (void *)extent, s));
|
} while (atomic_cas_p(&elm->pun, (void *)extent, s));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config_debug)
|
if (config_debug) {
|
||||||
rtree_elm_witness_acquire(tsdn, rtree, key, elm);
|
rtree_elm_witness_acquire(tsdn, rtree, key, elm);
|
||||||
|
}
|
||||||
|
|
||||||
return (elm);
|
return (elm);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE extent_t *
|
JEMALLOC_INLINE extent_t *
|
||||||
rtree_elm_read_acquired(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm)
|
rtree_elm_read_acquired(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
assert(((uintptr_t)elm->pun & (uintptr_t)0x1) == (uintptr_t)0x1);
|
assert(((uintptr_t)elm->pun & (uintptr_t)0x1) == (uintptr_t)0x1);
|
||||||
extent = (extent_t *)((uintptr_t)elm->pun & ~((uintptr_t)0x1));
|
extent = (extent_t *)((uintptr_t)elm->pun & ~((uintptr_t)0x1));
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
|
|
||||||
if (config_debug)
|
if (config_debug) {
|
||||||
rtree_elm_witness_access(tsdn, rtree, elm);
|
rtree_elm_witness_access(tsdn, rtree, elm);
|
||||||
|
}
|
||||||
|
|
||||||
return (extent);
|
return (extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_elm_write_acquired(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm,
|
rtree_elm_write_acquired(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm,
|
||||||
const extent_t *extent)
|
const extent_t *extent) {
|
||||||
{
|
|
||||||
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
assert(((uintptr_t)extent & (uintptr_t)0x1) == (uintptr_t)0x0);
|
||||||
assert(((uintptr_t)elm->pun & (uintptr_t)0x1) == (uintptr_t)0x1);
|
assert(((uintptr_t)elm->pun & (uintptr_t)0x1) == (uintptr_t)0x1);
|
||||||
|
|
||||||
if (config_debug)
|
if (config_debug) {
|
||||||
rtree_elm_witness_access(tsdn, rtree, elm);
|
rtree_elm_witness_access(tsdn, rtree, elm);
|
||||||
|
}
|
||||||
|
|
||||||
elm->pun = (void *)((uintptr_t)extent | (uintptr_t)0x1);
|
elm->pun = (void *)((uintptr_t)extent | (uintptr_t)0x1);
|
||||||
assert(rtree_elm_read_acquired(tsdn, rtree, elm) == extent);
|
assert(rtree_elm_read_acquired(tsdn, rtree, elm) == extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_elm_release(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm)
|
rtree_elm_release(tsdn_t *tsdn, const rtree_t *rtree, rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
rtree_elm_write(elm, rtree_elm_read_acquired(tsdn, rtree, elm));
|
rtree_elm_write(elm, rtree_elm_read_acquired(tsdn, rtree, elm));
|
||||||
if (config_debug)
|
if (config_debug) {
|
||||||
rtree_elm_witness_release(tsdn, rtree, elm);
|
rtree_elm_witness_release(tsdn, rtree, elm);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx, uintptr_t key)
|
rtree_clear(tsdn_t *tsdn, rtree_t *rtree, rtree_ctx_t *rtree_ctx,
|
||||||
{
|
uintptr_t key) {
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
|
|
||||||
elm = rtree_elm_acquire(tsdn, rtree, rtree_ctx, key, true, false);
|
elm = rtree_elm_acquire(tsdn, rtree, rtree_ctx, key, true, false);
|
||||||
|
@ -8,22 +8,22 @@ void spin_adaptive(spin_t *spin);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_SPIN_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_SPIN_C_))
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
spin_init(spin_t *spin)
|
spin_init(spin_t *spin) {
|
||||||
{
|
|
||||||
spin->iteration = 0;
|
spin->iteration = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
spin_adaptive(spin_t *spin)
|
spin_adaptive(spin_t *spin) {
|
||||||
{
|
|
||||||
volatile uint64_t i;
|
volatile uint64_t i;
|
||||||
|
|
||||||
for (i = 0; i < (KQU(1) << spin->iteration); i++)
|
for (i = 0; i < (KQU(1) << spin->iteration); i++) {
|
||||||
CPU_SPINWAIT;
|
CPU_SPINWAIT;
|
||||||
|
}
|
||||||
|
|
||||||
if (spin->iteration < 63)
|
if (spin->iteration < 63) {
|
||||||
spin->iteration++;
|
spin->iteration++;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -21,8 +21,7 @@ tcache_t *tcaches_get(tsd_t *tsd, unsigned ind);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TCACHE_C_))
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
tcache_flush(void)
|
tcache_flush(void) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
|
|
||||||
cassert(config_tcache);
|
cassert(config_tcache);
|
||||||
@ -32,8 +31,7 @@ tcache_flush(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
tcache_enabled_get(void)
|
tcache_enabled_get(void) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
tcache_enabled_t tcache_enabled;
|
tcache_enabled_t tcache_enabled;
|
||||||
|
|
||||||
@ -50,8 +48,7 @@ tcache_enabled_get(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
tcache_enabled_set(bool enabled)
|
tcache_enabled_set(bool enabled) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
tcache_enabled_t tcache_enabled;
|
tcache_enabled_t tcache_enabled;
|
||||||
|
|
||||||
@ -62,21 +59,23 @@ tcache_enabled_set(bool enabled)
|
|||||||
tcache_enabled = (tcache_enabled_t)enabled;
|
tcache_enabled = (tcache_enabled_t)enabled;
|
||||||
tsd_tcache_enabled_set(tsd, tcache_enabled);
|
tsd_tcache_enabled_set(tsd, tcache_enabled);
|
||||||
|
|
||||||
if (!enabled)
|
if (!enabled) {
|
||||||
tcache_cleanup(tsd);
|
tcache_cleanup(tsd);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tcache_t *
|
JEMALLOC_ALWAYS_INLINE tcache_t *
|
||||||
tcache_get(tsd_t *tsd, bool create)
|
tcache_get(tsd_t *tsd, bool create) {
|
||||||
{
|
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
tcache = tsd_tcache_get(tsd);
|
tcache = tsd_tcache_get(tsd);
|
||||||
if (!create)
|
if (!create) {
|
||||||
return (tcache);
|
return (tcache);
|
||||||
|
}
|
||||||
if (unlikely(tcache == NULL) && tsd_nominal(tsd)) {
|
if (unlikely(tcache == NULL) && tsd_nominal(tsd)) {
|
||||||
tcache = tcache_get_hard(tsd);
|
tcache = tcache_get_hard(tsd);
|
||||||
tsd_tcache_set(tsd, tcache);
|
tsd_tcache_set(tsd, tcache);
|
||||||
@ -86,18 +85,18 @@ tcache_get(tsd_t *tsd, bool create)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_event(tsd_t *tsd, tcache_t *tcache)
|
tcache_event(tsd_t *tsd, tcache_t *tcache) {
|
||||||
{
|
if (TCACHE_GC_INCR == 0) {
|
||||||
if (TCACHE_GC_INCR == 0)
|
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (unlikely(ticker_tick(&tcache->gc_ticker)))
|
if (unlikely(ticker_tick(&tcache->gc_ticker))) {
|
||||||
tcache_event_hard(tsd, tcache);
|
tcache_event_hard(tsd, tcache);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
tcache_alloc_easy(tcache_bin_t *tbin, bool *tcache_success)
|
tcache_alloc_easy(tcache_bin_t *tbin, bool *tcache_success) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
if (unlikely(tbin->ncached == 0)) {
|
if (unlikely(tbin->ncached == 0)) {
|
||||||
@ -116,16 +115,16 @@ tcache_alloc_easy(tcache_bin_t *tbin, bool *tcache_success)
|
|||||||
ret = *(tbin->avail - tbin->ncached);
|
ret = *(tbin->avail - tbin->ncached);
|
||||||
tbin->ncached--;
|
tbin->ncached--;
|
||||||
|
|
||||||
if (unlikely((int)tbin->ncached < tbin->low_water))
|
if (unlikely((int)tbin->ncached < tbin->low_water)) {
|
||||||
tbin->low_water = tbin->ncached;
|
tbin->low_water = tbin->ncached;
|
||||||
|
}
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
||||||
szind_t binind, bool zero, bool slow_path)
|
szind_t binind, bool zero, bool slow_path) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
bool tcache_success;
|
bool tcache_success;
|
||||||
@ -138,14 +137,16 @@ tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
if (unlikely(!tcache_success)) {
|
if (unlikely(!tcache_success)) {
|
||||||
bool tcache_hard_success;
|
bool tcache_hard_success;
|
||||||
arena = arena_choose(tsd, arena);
|
arena = arena_choose(tsd, arena);
|
||||||
if (unlikely(arena == NULL))
|
if (unlikely(arena == NULL)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache,
|
ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache,
|
||||||
tbin, binind, &tcache_hard_success);
|
tbin, binind, &tcache_hard_success);
|
||||||
if (tcache_hard_success == false)
|
if (tcache_hard_success == false) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
assert(ret);
|
assert(ret);
|
||||||
/*
|
/*
|
||||||
@ -162,9 +163,10 @@ tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
if (unlikely(opt_junk_alloc)) {
|
if (unlikely(opt_junk_alloc)) {
|
||||||
arena_alloc_junk_small(ret,
|
arena_alloc_junk_small(ret,
|
||||||
&arena_bin_info[binind], false);
|
&arena_bin_info[binind], false);
|
||||||
} else if (unlikely(opt_zero))
|
} else if (unlikely(opt_zero)) {
|
||||||
memset(ret, 0, usize);
|
memset(ret, 0, usize);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (slow_path && config_fill && unlikely(opt_junk_alloc)) {
|
if (slow_path && config_fill && unlikely(opt_junk_alloc)) {
|
||||||
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
arena_alloc_junk_small(ret, &arena_bin_info[binind],
|
||||||
@ -173,18 +175,19 @@ tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
memset(ret, 0, usize);
|
memset(ret, 0, usize);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats) {
|
||||||
tbin->tstats.nrequests++;
|
tbin->tstats.nrequests++;
|
||||||
if (config_prof)
|
}
|
||||||
|
if (config_prof) {
|
||||||
tcache->prof_accumbytes += usize;
|
tcache->prof_accumbytes += usize;
|
||||||
|
}
|
||||||
tcache_event(tsd, tcache);
|
tcache_event(tsd, tcache);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void *
|
JEMALLOC_ALWAYS_INLINE void *
|
||||||
tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
||||||
szind_t binind, bool zero, bool slow_path)
|
szind_t binind, bool zero, bool slow_path) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
bool tcache_success;
|
bool tcache_success;
|
||||||
@ -199,12 +202,14 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
* expensive to create one and not use it.
|
* expensive to create one and not use it.
|
||||||
*/
|
*/
|
||||||
arena = arena_choose(tsd, arena);
|
arena = arena_choose(tsd, arena);
|
||||||
if (unlikely(arena == NULL))
|
if (unlikely(arena == NULL)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
ret = large_malloc(tsd_tsdn(tsd), arena, s2u(size), zero);
|
ret = large_malloc(tsd_tsdn(tsd), arena, s2u(size), zero);
|
||||||
if (ret == NULL)
|
if (ret == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
size_t usize JEMALLOC_CC_SILENCE_INIT(0);
|
size_t usize JEMALLOC_CC_SILENCE_INIT(0);
|
||||||
|
|
||||||
@ -220,17 +225,21 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
if (unlikely(opt_junk_alloc)) {
|
if (unlikely(opt_junk_alloc)) {
|
||||||
memset(ret, JEMALLOC_ALLOC_JUNK,
|
memset(ret, JEMALLOC_ALLOC_JUNK,
|
||||||
usize);
|
usize);
|
||||||
} else if (unlikely(opt_zero))
|
} else if (unlikely(opt_zero)) {
|
||||||
memset(ret, 0, usize);
|
memset(ret, 0, usize);
|
||||||
}
|
}
|
||||||
} else
|
}
|
||||||
|
} else {
|
||||||
memset(ret, 0, usize);
|
memset(ret, 0, usize);
|
||||||
|
}
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats) {
|
||||||
tbin->tstats.nrequests++;
|
tbin->tstats.nrequests++;
|
||||||
if (config_prof)
|
}
|
||||||
|
if (config_prof) {
|
||||||
tcache->prof_accumbytes += usize;
|
tcache->prof_accumbytes += usize;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
tcache_event(tsd, tcache);
|
tcache_event(tsd, tcache);
|
||||||
return (ret);
|
return (ret);
|
||||||
@ -238,15 +247,15 @@ tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, szind_t binind,
|
tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, szind_t binind,
|
||||||
bool slow_path)
|
bool slow_path) {
|
||||||
{
|
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
tcache_bin_info_t *tbin_info;
|
tcache_bin_info_t *tbin_info;
|
||||||
|
|
||||||
assert(tcache_salloc(tsd_tsdn(tsd), ptr) <= SMALL_MAXCLASS);
|
assert(tcache_salloc(tsd_tsdn(tsd), ptr) <= SMALL_MAXCLASS);
|
||||||
|
|
||||||
if (slow_path && config_fill && unlikely(opt_junk_free))
|
if (slow_path && config_fill && unlikely(opt_junk_free)) {
|
||||||
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
|
arena_dalloc_junk_small(ptr, &arena_bin_info[binind]);
|
||||||
|
}
|
||||||
|
|
||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
tbin_info = &tcache_bin_info[binind];
|
tbin_info = &tcache_bin_info[binind];
|
||||||
@ -263,8 +272,7 @@ tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, szind_t binind,
|
|||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE void
|
JEMALLOC_ALWAYS_INLINE void
|
||||||
tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size,
|
tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size,
|
||||||
bool slow_path)
|
bool slow_path) {
|
||||||
{
|
|
||||||
szind_t binind;
|
szind_t binind;
|
||||||
tcache_bin_t *tbin;
|
tcache_bin_t *tbin;
|
||||||
tcache_bin_info_t *tbin_info;
|
tcache_bin_info_t *tbin_info;
|
||||||
@ -274,8 +282,9 @@ tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size,
|
|||||||
|
|
||||||
binind = size2index(size);
|
binind = size2index(size);
|
||||||
|
|
||||||
if (slow_path && config_fill && unlikely(opt_junk_free))
|
if (slow_path && config_fill && unlikely(opt_junk_free)) {
|
||||||
large_dalloc_junk(ptr, size);
|
large_dalloc_junk(ptr, size);
|
||||||
|
}
|
||||||
|
|
||||||
tbin = &tcache->tbins[binind];
|
tbin = &tcache->tbins[binind];
|
||||||
tbin_info = &tcache_bin_info[binind];
|
tbin_info = &tcache_bin_info[binind];
|
||||||
@ -291,8 +300,7 @@ tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, size_t size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tcache_t *
|
JEMALLOC_ALWAYS_INLINE tcache_t *
|
||||||
tcaches_get(tsd_t *tsd, unsigned ind)
|
tcaches_get(tsd_t *tsd, unsigned ind) {
|
||||||
{
|
|
||||||
tcaches_t *elm = &tcaches[ind];
|
tcaches_t *elm = &tcaches[ind];
|
||||||
if (unlikely(elm->tcache == NULL)) {
|
if (unlikely(elm->tcache == NULL)) {
|
||||||
elm->tcache = tcache_create(tsd_tsdn(tsd), arena_choose(tsd,
|
elm->tcache = tcache_create(tsd_tsdn(tsd), arena_choose(tsd,
|
||||||
|
@ -11,27 +11,23 @@ bool ticker_tick(ticker_t *ticker);
|
|||||||
|
|
||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TICKER_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TICKER_C_))
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
ticker_init(ticker_t *ticker, int32_t nticks)
|
ticker_init(ticker_t *ticker, int32_t nticks) {
|
||||||
{
|
|
||||||
ticker->tick = nticks;
|
ticker->tick = nticks;
|
||||||
ticker->nticks = nticks;
|
ticker->nticks = nticks;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
ticker_copy(ticker_t *ticker, const ticker_t *other)
|
ticker_copy(ticker_t *ticker, const ticker_t *other) {
|
||||||
{
|
|
||||||
*ticker = *other;
|
*ticker = *other;
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE int32_t
|
JEMALLOC_INLINE int32_t
|
||||||
ticker_read(const ticker_t *ticker)
|
ticker_read(const ticker_t *ticker) {
|
||||||
{
|
|
||||||
return (ticker->tick);
|
return (ticker->tick);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
ticker_ticks(ticker_t *ticker, int32_t nticks)
|
ticker_ticks(ticker_t *ticker, int32_t nticks) {
|
||||||
{
|
|
||||||
if (unlikely(ticker->tick < nticks)) {
|
if (unlikely(ticker->tick < nticks)) {
|
||||||
ticker->tick = ticker->nticks;
|
ticker->tick = ticker->nticks;
|
||||||
return (true);
|
return (true);
|
||||||
@ -41,8 +37,7 @@ ticker_ticks(ticker_t *ticker, int32_t nticks)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
ticker_tick(ticker_t *ticker)
|
ticker_tick(ticker_t *ticker) {
|
||||||
{
|
|
||||||
return (ticker_ticks(ticker, 1));
|
return (ticker_ticks(ticker, 1));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -25,12 +25,12 @@ malloc_tsd_externs(, tsd_t)
|
|||||||
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, , tsd_t, tsd_initializer, tsd_cleanup)
|
malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, , tsd_t, tsd_initializer, tsd_cleanup)
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tsd_t *
|
JEMALLOC_ALWAYS_INLINE tsd_t *
|
||||||
tsd_fetch_impl(bool init)
|
tsd_fetch_impl(bool init) {
|
||||||
{
|
|
||||||
tsd_t *tsd = tsd_get(init);
|
tsd_t *tsd = tsd_get(init);
|
||||||
|
|
||||||
if (!init && tsd_get_allocates() && tsd == NULL)
|
if (!init && tsd_get_allocates() && tsd == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
assert(tsd != NULL);
|
assert(tsd != NULL);
|
||||||
|
|
||||||
if (unlikely(tsd->state != tsd_state_nominal)) {
|
if (unlikely(tsd->state != tsd_state_nominal)) {
|
||||||
@ -41,47 +41,42 @@ tsd_fetch_impl(bool init)
|
|||||||
} else if (tsd->state == tsd_state_purgatory) {
|
} else if (tsd->state == tsd_state_purgatory) {
|
||||||
tsd->state = tsd_state_reincarnated;
|
tsd->state = tsd_state_reincarnated;
|
||||||
tsd_set(tsd);
|
tsd_set(tsd);
|
||||||
} else
|
} else {
|
||||||
assert(tsd->state == tsd_state_reincarnated);
|
assert(tsd->state == tsd_state_reincarnated);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (tsd);
|
return (tsd);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tsd_t *
|
JEMALLOC_ALWAYS_INLINE tsd_t *
|
||||||
tsd_fetch(void)
|
tsd_fetch(void) {
|
||||||
{
|
|
||||||
return (tsd_fetch_impl(true));
|
return (tsd_fetch_impl(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tsdn_t *
|
JEMALLOC_ALWAYS_INLINE tsdn_t *
|
||||||
tsd_tsdn(tsd_t *tsd)
|
tsd_tsdn(tsd_t *tsd) {
|
||||||
{
|
|
||||||
return ((tsdn_t *)tsd);
|
return ((tsdn_t *)tsd);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
tsd_nominal(tsd_t *tsd)
|
tsd_nominal(tsd_t *tsd) {
|
||||||
{
|
|
||||||
return (tsd->state == tsd_state_nominal);
|
return (tsd->state == tsd_state_nominal);
|
||||||
}
|
}
|
||||||
|
|
||||||
#define O(n, t, c) \
|
#define O(n, t, c) \
|
||||||
JEMALLOC_ALWAYS_INLINE t * \
|
JEMALLOC_ALWAYS_INLINE t * \
|
||||||
tsd_##n##p_get(tsd_t *tsd) \
|
tsd_##n##p_get(tsd_t *tsd) { \
|
||||||
{ \
|
|
||||||
return (&tsd->n); \
|
return (&tsd->n); \
|
||||||
} \
|
} \
|
||||||
\
|
\
|
||||||
JEMALLOC_ALWAYS_INLINE t \
|
JEMALLOC_ALWAYS_INLINE t \
|
||||||
tsd_##n##_get(tsd_t *tsd) \
|
tsd_##n##_get(tsd_t *tsd) { \
|
||||||
{ \
|
|
||||||
return (*tsd_##n##p_get(tsd)); \
|
return (*tsd_##n##p_get(tsd)); \
|
||||||
} \
|
} \
|
||||||
\
|
\
|
||||||
JEMALLOC_ALWAYS_INLINE void \
|
JEMALLOC_ALWAYS_INLINE void \
|
||||||
tsd_##n##_set(tsd_t *tsd, t n) \
|
tsd_##n##_set(tsd_t *tsd, t n) { \
|
||||||
{ \
|
|
||||||
assert(tsd->state == tsd_state_nominal); \
|
assert(tsd->state == tsd_state_nominal); \
|
||||||
tsd->n = n; \
|
tsd->n = n; \
|
||||||
}
|
}
|
||||||
@ -89,31 +84,28 @@ MALLOC_TSD
|
|||||||
#undef O
|
#undef O
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tsdn_t *
|
JEMALLOC_ALWAYS_INLINE tsdn_t *
|
||||||
tsdn_fetch(void)
|
tsdn_fetch(void) {
|
||||||
{
|
if (!tsd_booted_get()) {
|
||||||
if (!tsd_booted_get())
|
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
return (tsd_tsdn(tsd_fetch_impl(false)));
|
return (tsd_tsdn(tsd_fetch_impl(false)));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE bool
|
JEMALLOC_ALWAYS_INLINE bool
|
||||||
tsdn_null(const tsdn_t *tsdn)
|
tsdn_null(const tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
return (tsdn == NULL);
|
return (tsdn == NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE tsd_t *
|
JEMALLOC_ALWAYS_INLINE tsd_t *
|
||||||
tsdn_tsd(tsdn_t *tsdn)
|
tsdn_tsd(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
assert(!tsdn_null(tsdn));
|
assert(!tsdn_null(tsdn));
|
||||||
|
|
||||||
return (&tsdn->tsd);
|
return (&tsdn->tsd);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE rtree_ctx_t *
|
JEMALLOC_ALWAYS_INLINE rtree_ctx_t *
|
||||||
tsdn_rtree_ctx(tsdn_t *tsdn, rtree_ctx_t *fallback)
|
tsdn_rtree_ctx(tsdn_t *tsdn, rtree_ctx_t *fallback) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* If tsd cannot be accessed, initialize the fallback rtree_ctx and
|
* If tsd cannot be accessed, initialize the fallback rtree_ctx and
|
||||||
* return a pointer to it.
|
* return a pointer to it.
|
||||||
|
@ -175,8 +175,7 @@ a_attr bool a_name##tsd_booted = false;
|
|||||||
a_cleanup) \
|
a_cleanup) \
|
||||||
/* Initialization/cleanup. */ \
|
/* Initialization/cleanup. */ \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_cleanup_wrapper(void) \
|
a_name##tsd_cleanup_wrapper(void) { \
|
||||||
{ \
|
|
||||||
if (a_name##tsd_initialized) { \
|
if (a_name##tsd_initialized) { \
|
||||||
a_name##tsd_initialized = false; \
|
a_name##tsd_initialized = false; \
|
||||||
a_cleanup(&a_name##tsd_tls); \
|
a_cleanup(&a_name##tsd_tls); \
|
||||||
@ -184,8 +183,7 @@ a_name##tsd_cleanup_wrapper(void) \
|
|||||||
return (a_name##tsd_initialized); \
|
return (a_name##tsd_initialized); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot0(void) \
|
a_name##tsd_boot0(void) { \
|
||||||
{ \
|
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
malloc_tsd_cleanup_register( \
|
malloc_tsd_cleanup_register( \
|
||||||
&a_name##tsd_cleanup_wrapper); \
|
&a_name##tsd_cleanup_wrapper); \
|
||||||
@ -194,113 +192,105 @@ a_name##tsd_boot0(void) \
|
|||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_boot1(void) \
|
a_name##tsd_boot1(void) { \
|
||||||
{ \
|
|
||||||
/* Do nothing. */ \
|
/* Do nothing. */ \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot(void) \
|
a_name##tsd_boot(void) { \
|
||||||
{ \
|
|
||||||
return (a_name##tsd_boot0()); \
|
return (a_name##tsd_boot0()); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_booted_get(void) \
|
a_name##tsd_booted_get(void) { \
|
||||||
{ \
|
|
||||||
return (a_name##tsd_booted); \
|
return (a_name##tsd_booted); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_get_allocates(void) \
|
a_name##tsd_get_allocates(void) { \
|
||||||
{ \
|
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
/* Get/set. */ \
|
/* Get/set. */ \
|
||||||
a_attr a_type * \
|
a_attr a_type * \
|
||||||
a_name##tsd_get(bool init) \
|
a_name##tsd_get(bool init) { \
|
||||||
{ \
|
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
return (&a_name##tsd_tls); \
|
return (&a_name##tsd_tls); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_set(a_type *val) \
|
a_name##tsd_set(a_type *val) { \
|
||||||
{ \
|
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
if (likely(&a_name##tsd_tls != val)) \
|
if (likely(&a_name##tsd_tls != val)) { \
|
||||||
a_name##tsd_tls = (*val); \
|
a_name##tsd_tls = (*val); \
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) \
|
} \
|
||||||
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
a_name##tsd_initialized = true; \
|
a_name##tsd_initialized = true; \
|
||||||
|
} \
|
||||||
}
|
}
|
||||||
#elif (defined(JEMALLOC_TLS))
|
#elif (defined(JEMALLOC_TLS))
|
||||||
#define malloc_tsd_funcs(a_attr, a_name, a_type, a_initializer, \
|
#define malloc_tsd_funcs(a_attr, a_name, a_type, a_initializer, \
|
||||||
a_cleanup) \
|
a_cleanup) \
|
||||||
/* Initialization/cleanup. */ \
|
/* Initialization/cleanup. */ \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot0(void) \
|
a_name##tsd_boot0(void) { \
|
||||||
{ \
|
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
if (pthread_key_create(&a_name##tsd_tsd, a_cleanup) != \
|
if (pthread_key_create(&a_name##tsd_tsd, a_cleanup) != \
|
||||||
0) \
|
0) { \
|
||||||
return (true); \
|
return (true); \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
a_name##tsd_booted = true; \
|
a_name##tsd_booted = true; \
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_boot1(void) \
|
a_name##tsd_boot1(void) { \
|
||||||
{ \
|
|
||||||
/* Do nothing. */ \
|
/* Do nothing. */ \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot(void) \
|
a_name##tsd_boot(void) { \
|
||||||
{ \
|
|
||||||
return (a_name##tsd_boot0()); \
|
return (a_name##tsd_boot0()); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_booted_get(void) \
|
a_name##tsd_booted_get(void) { \
|
||||||
{ \
|
|
||||||
return (a_name##tsd_booted); \
|
return (a_name##tsd_booted); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_get_allocates(void) \
|
a_name##tsd_get_allocates(void) { \
|
||||||
{ \
|
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
/* Get/set. */ \
|
/* Get/set. */ \
|
||||||
a_attr a_type * \
|
a_attr a_type * \
|
||||||
a_name##tsd_get(bool init) \
|
a_name##tsd_get(bool init) { \
|
||||||
{ \
|
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
return (&a_name##tsd_tls); \
|
return (&a_name##tsd_tls); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_set(a_type *val) \
|
a_name##tsd_set(a_type *val) { \
|
||||||
{ \
|
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
if (likely(&a_name##tsd_tls != val)) \
|
if (likely(&a_name##tsd_tls != val)) { \
|
||||||
a_name##tsd_tls = (*val); \
|
a_name##tsd_tls = (*val); \
|
||||||
|
} \
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
if (pthread_setspecific(a_name##tsd_tsd, \
|
if (pthread_setspecific(a_name##tsd_tsd, \
|
||||||
(void *)(&a_name##tsd_tls))) { \
|
(void *)(&a_name##tsd_tls))) { \
|
||||||
malloc_write("<jemalloc>: Error" \
|
malloc_write("<jemalloc>: Error" \
|
||||||
" setting TSD for "#a_name"\n"); \
|
" setting TSD for "#a_name"\n"); \
|
||||||
if (opt_abort) \
|
if (opt_abort) { \
|
||||||
abort(); \
|
abort(); \
|
||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
}
|
}
|
||||||
#elif (defined(_WIN32))
|
#elif (defined(_WIN32))
|
||||||
#define malloc_tsd_funcs(a_attr, a_name, a_type, a_initializer, \
|
#define malloc_tsd_funcs(a_attr, a_name, a_type, a_initializer, \
|
||||||
a_cleanup) \
|
a_cleanup) \
|
||||||
/* Initialization/cleanup. */ \
|
/* Initialization/cleanup. */ \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_cleanup_wrapper(void) \
|
a_name##tsd_cleanup_wrapper(void) { \
|
||||||
{ \
|
|
||||||
DWORD error = GetLastError(); \
|
DWORD error = GetLastError(); \
|
||||||
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *) \
|
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
TlsGetValue(a_name##tsd_tsd); \
|
TlsGetValue(a_name##tsd_tsd); \
|
||||||
SetLastError(error); \
|
SetLastError(error); \
|
||||||
\
|
\
|
||||||
if (wrapper == NULL) \
|
if (wrapper == NULL) { \
|
||||||
return (false); \
|
return (false); \
|
||||||
|
} \
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup && \
|
if (a_cleanup != malloc_tsd_no_cleanup && \
|
||||||
wrapper->initialized) { \
|
wrapper->initialized) { \
|
||||||
wrapper->initialized = false; \
|
wrapper->initialized = false; \
|
||||||
@ -314,8 +304,7 @@ a_name##tsd_cleanup_wrapper(void) \
|
|||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_wrapper_set(a_name##tsd_wrapper_t *wrapper) \
|
a_name##tsd_wrapper_set(a_name##tsd_wrapper_t *wrapper) { \
|
||||||
{ \
|
|
||||||
if (!TlsSetValue(a_name##tsd_tsd, (void *)wrapper)) { \
|
if (!TlsSetValue(a_name##tsd_tsd, (void *)wrapper)) { \
|
||||||
malloc_write("<jemalloc>: Error setting" \
|
malloc_write("<jemalloc>: Error setting" \
|
||||||
" TSD for "#a_name"\n"); \
|
" TSD for "#a_name"\n"); \
|
||||||
@ -323,8 +312,7 @@ a_name##tsd_wrapper_set(a_name##tsd_wrapper_t *wrapper) \
|
|||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
a_attr a_name##tsd_wrapper_t * \
|
a_attr a_name##tsd_wrapper_t * \
|
||||||
a_name##tsd_wrapper_get(bool init) \
|
a_name##tsd_wrapper_get(bool init) { \
|
||||||
{ \
|
|
||||||
DWORD error = GetLastError(); \
|
DWORD error = GetLastError(); \
|
||||||
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *) \
|
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
TlsGetValue(a_name##tsd_tsd); \
|
TlsGetValue(a_name##tsd_tsd); \
|
||||||
@ -346,11 +334,11 @@ a_name##tsd_wrapper_get(bool init) \
|
|||||||
return (wrapper); \
|
return (wrapper); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot0(void) \
|
a_name##tsd_boot0(void) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_tsd = TlsAlloc(); \
|
a_name##tsd_tsd = TlsAlloc(); \
|
||||||
if (a_name##tsd_tsd == TLS_OUT_OF_INDEXES) \
|
if (a_name##tsd_tsd == TLS_OUT_OF_INDEXES) { \
|
||||||
return (true); \
|
return (true); \
|
||||||
|
} \
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
malloc_tsd_cleanup_register( \
|
malloc_tsd_cleanup_register( \
|
||||||
&a_name##tsd_cleanup_wrapper); \
|
&a_name##tsd_cleanup_wrapper); \
|
||||||
@ -360,8 +348,7 @@ a_name##tsd_boot0(void) \
|
|||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_boot1(void) \
|
a_name##tsd_boot1(void) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper; \
|
a_name##tsd_wrapper_t *wrapper; \
|
||||||
wrapper = (a_name##tsd_wrapper_t *) \
|
wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
malloc_tsd_malloc(sizeof(a_name##tsd_wrapper_t)); \
|
malloc_tsd_malloc(sizeof(a_name##tsd_wrapper_t)); \
|
||||||
@ -375,54 +362,52 @@ a_name##tsd_boot1(void) \
|
|||||||
a_name##tsd_wrapper_set(wrapper); \
|
a_name##tsd_wrapper_set(wrapper); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot(void) \
|
a_name##tsd_boot(void) { \
|
||||||
{ \
|
if (a_name##tsd_boot0()) { \
|
||||||
if (a_name##tsd_boot0()) \
|
|
||||||
return (true); \
|
return (true); \
|
||||||
|
} \
|
||||||
a_name##tsd_boot1(); \
|
a_name##tsd_boot1(); \
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_booted_get(void) \
|
a_name##tsd_booted_get(void) { \
|
||||||
{ \
|
|
||||||
return (a_name##tsd_booted); \
|
return (a_name##tsd_booted); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_get_allocates(void) \
|
a_name##tsd_get_allocates(void) { \
|
||||||
{ \
|
|
||||||
return (true); \
|
return (true); \
|
||||||
} \
|
} \
|
||||||
/* Get/set. */ \
|
/* Get/set. */ \
|
||||||
a_attr a_type * \
|
a_attr a_type * \
|
||||||
a_name##tsd_get(bool init) \
|
a_name##tsd_get(bool init) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper; \
|
a_name##tsd_wrapper_t *wrapper; \
|
||||||
\
|
\
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
wrapper = a_name##tsd_wrapper_get(init); \
|
wrapper = a_name##tsd_wrapper_get(init); \
|
||||||
if (a_name##tsd_get_allocates() && !init && wrapper == NULL) \
|
if (a_name##tsd_get_allocates() && !init && wrapper == NULL) { \
|
||||||
return (NULL); \
|
return (NULL); \
|
||||||
|
} \
|
||||||
return (&wrapper->val); \
|
return (&wrapper->val); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_set(a_type *val) \
|
a_name##tsd_set(a_type *val) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper; \
|
a_name##tsd_wrapper_t *wrapper; \
|
||||||
\
|
\
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
wrapper = a_name##tsd_wrapper_get(true); \
|
wrapper = a_name##tsd_wrapper_get(true); \
|
||||||
if (likely(&wrapper->val != val)) \
|
if (likely(&wrapper->val != val)) { \
|
||||||
wrapper->val = *(val); \
|
wrapper->val = *(val); \
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) \
|
} \
|
||||||
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
wrapper->initialized = true; \
|
wrapper->initialized = true; \
|
||||||
|
} \
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
#define malloc_tsd_funcs(a_attr, a_name, a_type, a_initializer, \
|
#define malloc_tsd_funcs(a_attr, a_name, a_type, a_initializer, \
|
||||||
a_cleanup) \
|
a_cleanup) \
|
||||||
/* Initialization/cleanup. */ \
|
/* Initialization/cleanup. */ \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_cleanup_wrapper(void *arg) \
|
a_name##tsd_cleanup_wrapper(void *arg) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *)arg; \
|
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *)arg; \
|
||||||
\
|
\
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup && \
|
if (a_cleanup != malloc_tsd_no_cleanup && \
|
||||||
@ -435,17 +420,17 @@ a_name##tsd_cleanup_wrapper(void *arg) \
|
|||||||
(void *)wrapper)) { \
|
(void *)wrapper)) { \
|
||||||
malloc_write("<jemalloc>: Error" \
|
malloc_write("<jemalloc>: Error" \
|
||||||
" setting TSD for "#a_name"\n"); \
|
" setting TSD for "#a_name"\n"); \
|
||||||
if (opt_abort) \
|
if (opt_abort) { \
|
||||||
abort(); \
|
abort(); \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
return; \
|
return; \
|
||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
malloc_tsd_dalloc(wrapper); \
|
malloc_tsd_dalloc(wrapper); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_wrapper_set(a_name##tsd_wrapper_t *wrapper) \
|
a_name##tsd_wrapper_set(a_name##tsd_wrapper_t *wrapper) { \
|
||||||
{ \
|
|
||||||
if (pthread_setspecific(a_name##tsd_tsd, \
|
if (pthread_setspecific(a_name##tsd_tsd, \
|
||||||
(void *)wrapper)) { \
|
(void *)wrapper)) { \
|
||||||
malloc_write("<jemalloc>: Error setting" \
|
malloc_write("<jemalloc>: Error setting" \
|
||||||
@ -454,8 +439,7 @@ a_name##tsd_wrapper_set(a_name##tsd_wrapper_t *wrapper) \
|
|||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
a_attr a_name##tsd_wrapper_t * \
|
a_attr a_name##tsd_wrapper_t * \
|
||||||
a_name##tsd_wrapper_get(bool init) \
|
a_name##tsd_wrapper_get(bool init) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *) \
|
a_name##tsd_wrapper_t *wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
pthread_getspecific(a_name##tsd_tsd); \
|
pthread_getspecific(a_name##tsd_tsd); \
|
||||||
\
|
\
|
||||||
@ -464,8 +448,9 @@ a_name##tsd_wrapper_get(bool init) \
|
|||||||
wrapper = (a_name##tsd_wrapper_t *) \
|
wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
tsd_init_check_recursion(&a_name##tsd_init_head, \
|
tsd_init_check_recursion(&a_name##tsd_init_head, \
|
||||||
&block); \
|
&block); \
|
||||||
if (wrapper) \
|
if (wrapper) { \
|
||||||
return (wrapper); \
|
return (wrapper); \
|
||||||
|
} \
|
||||||
wrapper = (a_name##tsd_wrapper_t *) \
|
wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
malloc_tsd_malloc(sizeof(a_name##tsd_wrapper_t)); \
|
malloc_tsd_malloc(sizeof(a_name##tsd_wrapper_t)); \
|
||||||
block.data = (void *)wrapper; \
|
block.data = (void *)wrapper; \
|
||||||
@ -483,18 +468,17 @@ a_name##tsd_wrapper_get(bool init) \
|
|||||||
return (wrapper); \
|
return (wrapper); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot0(void) \
|
a_name##tsd_boot0(void) { \
|
||||||
{ \
|
|
||||||
if (pthread_key_create(&a_name##tsd_tsd, \
|
if (pthread_key_create(&a_name##tsd_tsd, \
|
||||||
a_name##tsd_cleanup_wrapper) != 0) \
|
a_name##tsd_cleanup_wrapper) != 0) { \
|
||||||
return (true); \
|
return (true); \
|
||||||
|
} \
|
||||||
a_name##tsd_wrapper_set(&a_name##tsd_boot_wrapper); \
|
a_name##tsd_wrapper_set(&a_name##tsd_boot_wrapper); \
|
||||||
a_name##tsd_booted = true; \
|
a_name##tsd_booted = true; \
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_boot1(void) \
|
a_name##tsd_boot1(void) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper; \
|
a_name##tsd_wrapper_t *wrapper; \
|
||||||
wrapper = (a_name##tsd_wrapper_t *) \
|
wrapper = (a_name##tsd_wrapper_t *) \
|
||||||
malloc_tsd_malloc(sizeof(a_name##tsd_wrapper_t)); \
|
malloc_tsd_malloc(sizeof(a_name##tsd_wrapper_t)); \
|
||||||
@ -508,46 +492,45 @@ a_name##tsd_boot1(void) \
|
|||||||
a_name##tsd_wrapper_set(wrapper); \
|
a_name##tsd_wrapper_set(wrapper); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_boot(void) \
|
a_name##tsd_boot(void) { \
|
||||||
{ \
|
if (a_name##tsd_boot0()) { \
|
||||||
if (a_name##tsd_boot0()) \
|
|
||||||
return (true); \
|
return (true); \
|
||||||
|
} \
|
||||||
a_name##tsd_boot1(); \
|
a_name##tsd_boot1(); \
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_booted_get(void) \
|
a_name##tsd_booted_get(void) { \
|
||||||
{ \
|
|
||||||
return (a_name##tsd_booted); \
|
return (a_name##tsd_booted); \
|
||||||
} \
|
} \
|
||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_name##tsd_get_allocates(void) \
|
a_name##tsd_get_allocates(void) { \
|
||||||
{ \
|
|
||||||
return (true); \
|
return (true); \
|
||||||
} \
|
} \
|
||||||
/* Get/set. */ \
|
/* Get/set. */ \
|
||||||
a_attr a_type * \
|
a_attr a_type * \
|
||||||
a_name##tsd_get(bool init) \
|
a_name##tsd_get(bool init) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper; \
|
a_name##tsd_wrapper_t *wrapper; \
|
||||||
\
|
\
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
wrapper = a_name##tsd_wrapper_get(init); \
|
wrapper = a_name##tsd_wrapper_get(init); \
|
||||||
if (a_name##tsd_get_allocates() && !init && wrapper == NULL) \
|
if (a_name##tsd_get_allocates() && !init && wrapper == NULL) { \
|
||||||
return (NULL); \
|
return (NULL); \
|
||||||
|
} \
|
||||||
return (&wrapper->val); \
|
return (&wrapper->val); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_name##tsd_set(a_type *val) \
|
a_name##tsd_set(a_type *val) { \
|
||||||
{ \
|
|
||||||
a_name##tsd_wrapper_t *wrapper; \
|
a_name##tsd_wrapper_t *wrapper; \
|
||||||
\
|
\
|
||||||
assert(a_name##tsd_booted); \
|
assert(a_name##tsd_booted); \
|
||||||
wrapper = a_name##tsd_wrapper_get(true); \
|
wrapper = a_name##tsd_wrapper_get(true); \
|
||||||
if (likely(&wrapper->val != val)) \
|
if (likely(&wrapper->val != val)) { \
|
||||||
wrapper->val = *(val); \
|
wrapper->val = *(val); \
|
||||||
if (a_cleanup != malloc_tsd_no_cleanup) \
|
} \
|
||||||
|
if (a_cleanup != malloc_tsd_no_cleanup) { \
|
||||||
wrapper->initialized = true; \
|
wrapper->initialized = true; \
|
||||||
|
} \
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -25,26 +25,22 @@ int get_errno(void);
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
ffs_llu(unsigned long long bitmap)
|
ffs_llu(unsigned long long bitmap) {
|
||||||
{
|
|
||||||
return (JEMALLOC_INTERNAL_FFSLL(bitmap));
|
return (JEMALLOC_INTERNAL_FFSLL(bitmap));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
ffs_lu(unsigned long bitmap)
|
ffs_lu(unsigned long bitmap) {
|
||||||
{
|
|
||||||
return (JEMALLOC_INTERNAL_FFSL(bitmap));
|
return (JEMALLOC_INTERNAL_FFSL(bitmap));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
ffs_u(unsigned bitmap)
|
ffs_u(unsigned bitmap) {
|
||||||
{
|
|
||||||
return (JEMALLOC_INTERNAL_FFS(bitmap));
|
return (JEMALLOC_INTERNAL_FFS(bitmap));
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
ffs_zu(size_t bitmap)
|
ffs_zu(size_t bitmap) {
|
||||||
{
|
|
||||||
#if LG_SIZEOF_PTR == LG_SIZEOF_INT
|
#if LG_SIZEOF_PTR == LG_SIZEOF_INT
|
||||||
return (ffs_u(bitmap));
|
return (ffs_u(bitmap));
|
||||||
#elif LG_SIZEOF_PTR == LG_SIZEOF_LONG
|
#elif LG_SIZEOF_PTR == LG_SIZEOF_LONG
|
||||||
@ -57,8 +53,7 @@ ffs_zu(size_t bitmap)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
ffs_u64(uint64_t bitmap)
|
ffs_u64(uint64_t bitmap) {
|
||||||
{
|
|
||||||
#if LG_SIZEOF_LONG == 3
|
#if LG_SIZEOF_LONG == 3
|
||||||
return (ffs_lu(bitmap));
|
return (ffs_lu(bitmap));
|
||||||
#elif LG_SIZEOF_LONG_LONG == 3
|
#elif LG_SIZEOF_LONG_LONG == 3
|
||||||
@ -69,8 +64,7 @@ ffs_u64(uint64_t bitmap)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_ALWAYS_INLINE unsigned
|
JEMALLOC_ALWAYS_INLINE unsigned
|
||||||
ffs_u32(uint32_t bitmap)
|
ffs_u32(uint32_t bitmap) {
|
||||||
{
|
|
||||||
#if LG_SIZEOF_INT == 2
|
#if LG_SIZEOF_INT == 2
|
||||||
return (ffs_u(bitmap));
|
return (ffs_u(bitmap));
|
||||||
#else
|
#else
|
||||||
@ -80,8 +74,7 @@ ffs_u32(uint32_t bitmap)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint64_t
|
JEMALLOC_INLINE uint64_t
|
||||||
pow2_ceil_u64(uint64_t x)
|
pow2_ceil_u64(uint64_t x) {
|
||||||
{
|
|
||||||
x--;
|
x--;
|
||||||
x |= x >> 1;
|
x |= x >> 1;
|
||||||
x |= x >> 2;
|
x |= x >> 2;
|
||||||
@ -94,8 +87,7 @@ pow2_ceil_u64(uint64_t x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE uint32_t
|
JEMALLOC_INLINE uint32_t
|
||||||
pow2_ceil_u32(uint32_t x)
|
pow2_ceil_u32(uint32_t x) {
|
||||||
{
|
|
||||||
x--;
|
x--;
|
||||||
x |= x >> 1;
|
x |= x >> 1;
|
||||||
x |= x >> 2;
|
x |= x >> 2;
|
||||||
@ -108,8 +100,7 @@ pow2_ceil_u32(uint32_t x)
|
|||||||
|
|
||||||
/* Compute the smallest power of 2 that is >= x. */
|
/* Compute the smallest power of 2 that is >= x. */
|
||||||
JEMALLOC_INLINE size_t
|
JEMALLOC_INLINE size_t
|
||||||
pow2_ceil_zu(size_t x)
|
pow2_ceil_zu(size_t x) {
|
||||||
{
|
|
||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
return (pow2_ceil_u64(x));
|
return (pow2_ceil_u64(x));
|
||||||
#else
|
#else
|
||||||
@ -119,8 +110,7 @@ pow2_ceil_zu(size_t x)
|
|||||||
|
|
||||||
#if (defined(__i386__) || defined(__amd64__) || defined(__x86_64__))
|
#if (defined(__i386__) || defined(__amd64__) || defined(__x86_64__))
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
lg_floor(size_t x)
|
lg_floor(size_t x) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
|
|
||||||
assert(x != 0);
|
assert(x != 0);
|
||||||
@ -134,8 +124,7 @@ lg_floor(size_t x)
|
|||||||
}
|
}
|
||||||
#elif (defined(_MSC_VER))
|
#elif (defined(_MSC_VER))
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
lg_floor(size_t x)
|
lg_floor(size_t x) {
|
||||||
{
|
|
||||||
unsigned long ret;
|
unsigned long ret;
|
||||||
|
|
||||||
assert(x != 0);
|
assert(x != 0);
|
||||||
@ -152,8 +141,7 @@ lg_floor(size_t x)
|
|||||||
}
|
}
|
||||||
#elif (defined(JEMALLOC_HAVE_BUILTIN_CLZ))
|
#elif (defined(JEMALLOC_HAVE_BUILTIN_CLZ))
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
lg_floor(size_t x)
|
lg_floor(size_t x) {
|
||||||
{
|
|
||||||
assert(x != 0);
|
assert(x != 0);
|
||||||
|
|
||||||
#if (LG_SIZEOF_PTR == LG_SIZEOF_INT)
|
#if (LG_SIZEOF_PTR == LG_SIZEOF_INT)
|
||||||
@ -166,8 +154,7 @@ lg_floor(size_t x)
|
|||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
JEMALLOC_INLINE unsigned
|
JEMALLOC_INLINE unsigned
|
||||||
lg_floor(size_t x)
|
lg_floor(size_t x) {
|
||||||
{
|
|
||||||
assert(x != 0);
|
assert(x != 0);
|
||||||
|
|
||||||
x |= (x >> 1);
|
x |= (x >> 1);
|
||||||
@ -178,8 +165,9 @@ lg_floor(size_t x)
|
|||||||
#if (LG_SIZEOF_PTR == 3)
|
#if (LG_SIZEOF_PTR == 3)
|
||||||
x |= (x >> 32);
|
x |= (x >> 32);
|
||||||
#endif
|
#endif
|
||||||
if (x == SIZE_T_MAX)
|
if (x == SIZE_T_MAX) {
|
||||||
return ((8 << LG_SIZEOF_PTR) - 1);
|
return ((8 << LG_SIZEOF_PTR) - 1);
|
||||||
|
}
|
||||||
x++;
|
x++;
|
||||||
return (ffs_zu(x) - 2);
|
return (ffs_zu(x) - 2);
|
||||||
}
|
}
|
||||||
@ -187,8 +175,7 @@ lg_floor(size_t x)
|
|||||||
|
|
||||||
/* Set error code. */
|
/* Set error code. */
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
set_errno(int errnum)
|
set_errno(int errnum) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
SetLastError(errnum);
|
SetLastError(errnum);
|
||||||
#else
|
#else
|
||||||
@ -198,8 +185,7 @@ set_errno(int errnum)
|
|||||||
|
|
||||||
/* Get last error code. */
|
/* Get last error code. */
|
||||||
JEMALLOC_INLINE int
|
JEMALLOC_INLINE int
|
||||||
get_errno(void)
|
get_errno(void) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
return (GetLastError());
|
return (GetLastError());
|
||||||
#else
|
#else
|
||||||
|
@ -87,8 +87,9 @@
|
|||||||
|
|
||||||
/* Use to assert a particular configuration, e.g., cassert(config_debug). */
|
/* Use to assert a particular configuration, e.g., cassert(config_debug). */
|
||||||
#define cassert(c) do { \
|
#define cassert(c) do { \
|
||||||
if (unlikely(!(c))) \
|
if (unlikely(!(c))) { \
|
||||||
not_reached(); \
|
not_reached(); \
|
||||||
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#endif /* JEMALLOC_INTERNAL_UTIL_TYPES_H */
|
#endif /* JEMALLOC_INTERNAL_UTIL_TYPES_H */
|
||||||
|
@ -13,8 +13,7 @@ void witness_unlock(tsdn_t *tsdn, witness_t *witness);
|
|||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_MUTEX_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_MUTEX_C_))
|
||||||
/* Helper, not intended for direct use. */
|
/* Helper, not intended for direct use. */
|
||||||
JEMALLOC_INLINE bool
|
JEMALLOC_INLINE bool
|
||||||
witness_owner(tsd_t *tsd, const witness_t *witness)
|
witness_owner(tsd_t *tsd, const witness_t *witness) {
|
||||||
{
|
|
||||||
witness_list_t *witnesses;
|
witness_list_t *witnesses;
|
||||||
witness_t *w;
|
witness_t *w;
|
||||||
|
|
||||||
@ -22,90 +21,101 @@ witness_owner(tsd_t *tsd, const witness_t *witness)
|
|||||||
|
|
||||||
witnesses = tsd_witnessesp_get(tsd);
|
witnesses = tsd_witnessesp_get(tsd);
|
||||||
ql_foreach(w, witnesses, link) {
|
ql_foreach(w, witnesses, link) {
|
||||||
if (w == witness)
|
if (w == witness) {
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
witness_assert_owner(tsdn_t *tsdn, const witness_t *witness)
|
witness_assert_owner(tsdn_t *tsdn, const witness_t *witness) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
|
|
||||||
if (!config_debug)
|
if (!config_debug) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
if (witness->rank == WITNESS_RANK_OMIT)
|
if (witness->rank == WITNESS_RANK_OMIT) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (witness_owner(tsd, witness))
|
if (witness_owner(tsd, witness)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
witness_owner_error(witness);
|
witness_owner_error(witness);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
witness_assert_not_owner(tsdn_t *tsdn, const witness_t *witness)
|
witness_assert_not_owner(tsdn_t *tsdn, const witness_t *witness) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
witness_list_t *witnesses;
|
witness_list_t *witnesses;
|
||||||
witness_t *w;
|
witness_t *w;
|
||||||
|
|
||||||
if (!config_debug)
|
if (!config_debug) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
if (witness->rank == WITNESS_RANK_OMIT)
|
if (witness->rank == WITNESS_RANK_OMIT) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
witnesses = tsd_witnessesp_get(tsd);
|
witnesses = tsd_witnessesp_get(tsd);
|
||||||
ql_foreach(w, witnesses, link) {
|
ql_foreach(w, witnesses, link) {
|
||||||
if (w == witness)
|
if (w == witness) {
|
||||||
witness_not_owner_error(witness);
|
witness_not_owner_error(witness);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
witness_assert_lockless(tsdn_t *tsdn)
|
witness_assert_lockless(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
witness_list_t *witnesses;
|
witness_list_t *witnesses;
|
||||||
witness_t *w;
|
witness_t *w;
|
||||||
|
|
||||||
if (!config_debug)
|
if (!config_debug) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
|
|
||||||
witnesses = tsd_witnessesp_get(tsd);
|
witnesses = tsd_witnessesp_get(tsd);
|
||||||
w = ql_last(witnesses, link);
|
w = ql_last(witnesses, link);
|
||||||
if (w != NULL)
|
if (w != NULL) {
|
||||||
witness_lockless_error(witnesses);
|
witness_lockless_error(witnesses);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
witness_lock(tsdn_t *tsdn, witness_t *witness)
|
witness_lock(tsdn_t *tsdn, witness_t *witness) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
witness_list_t *witnesses;
|
witness_list_t *witnesses;
|
||||||
witness_t *w;
|
witness_t *w;
|
||||||
|
|
||||||
if (!config_debug)
|
if (!config_debug) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
if (witness->rank == WITNESS_RANK_OMIT)
|
if (witness->rank == WITNESS_RANK_OMIT) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
witness_assert_not_owner(tsdn, witness);
|
witness_assert_not_owner(tsdn, witness);
|
||||||
|
|
||||||
@ -133,19 +143,21 @@ witness_lock(tsdn_t *tsdn, witness_t *witness)
|
|||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE void
|
JEMALLOC_INLINE void
|
||||||
witness_unlock(tsdn_t *tsdn, witness_t *witness)
|
witness_unlock(tsdn_t *tsdn, witness_t *witness) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
witness_list_t *witnesses;
|
witness_list_t *witnesses;
|
||||||
|
|
||||||
if (!config_debug)
|
if (!config_debug) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tsd = tsdn_tsd(tsdn);
|
tsd = tsdn_tsd(tsdn);
|
||||||
if (witness->rank == WITNESS_RANK_OMIT)
|
if (witness->rank == WITNESS_RANK_OMIT) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Check whether owner before removal, rather than relying on
|
* Check whether owner before removal, rather than relying on
|
||||||
@ -155,9 +167,10 @@ witness_unlock(tsdn_t *tsdn, witness_t *witness)
|
|||||||
if (witness_owner(tsd, witness)) {
|
if (witness_owner(tsd, witness)) {
|
||||||
witnesses = tsd_witnessesp_get(tsd);
|
witnesses = tsd_witnessesp_get(tsd);
|
||||||
ql_remove(witnesses, witness, link);
|
ql_remove(witnesses, witness, link);
|
||||||
} else
|
} else {
|
||||||
witness_assert_owner(tsdn, witness);
|
witness_assert_owner(tsdn, witness);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* JEMALLOC_INTERNAL_WITNESS_INLINES_H */
|
#endif /* JEMALLOC_INTERNAL_WITNESS_INLINES_H */
|
||||||
|
@ -6,17 +6,16 @@
|
|||||||
#ifdef _MSC_VER
|
#ifdef _MSC_VER
|
||||||
# include <intrin.h>
|
# include <intrin.h>
|
||||||
# pragma intrinsic(_BitScanForward)
|
# pragma intrinsic(_BitScanForward)
|
||||||
static __forceinline int ffsl(long x)
|
static __forceinline int ffsl(long x) {
|
||||||
{
|
|
||||||
unsigned long i;
|
unsigned long i;
|
||||||
|
|
||||||
if (_BitScanForward(&i, x))
|
if (_BitScanForward(&i, x)) {
|
||||||
return (i + 1);
|
return (i + 1);
|
||||||
|
}
|
||||||
return (0);
|
return (0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static __forceinline int ffs(int x)
|
static __forceinline int ffs(int x) {
|
||||||
{
|
|
||||||
return (ffsl(x));
|
return (ffsl(x));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -24,12 +23,12 @@ static __forceinline int ffs(int x)
|
|||||||
# pragma intrinsic(_BitScanForward64)
|
# pragma intrinsic(_BitScanForward64)
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
static __forceinline int ffsll(unsigned __int64 x)
|
static __forceinline int ffsll(unsigned __int64 x) {
|
||||||
{
|
|
||||||
unsigned long i;
|
unsigned long i;
|
||||||
#ifdef _M_X64
|
#ifdef _M_X64
|
||||||
if (_BitScanForward64(&i, x))
|
if (_BitScanForward64(&i, x)) {
|
||||||
return (i + 1);
|
return (i + 1);
|
||||||
|
}
|
||||||
return (0);
|
return (0);
|
||||||
#else
|
#else
|
||||||
// Fallback for 32-bit build where 64-bit version not available
|
// Fallback for 32-bit build where 64-bit version not available
|
||||||
@ -41,10 +40,11 @@ static __forceinline int ffsll(unsigned __int64 x)
|
|||||||
|
|
||||||
s.ll = x;
|
s.ll = x;
|
||||||
|
|
||||||
if (_BitScanForward(&i, s.l[0]))
|
if (_BitScanForward(&i, s.l[0])) {
|
||||||
return (i + 1);
|
return (i + 1);
|
||||||
else if(_BitScanForward(&i, s.l[1]))
|
} else if(_BitScanForward(&i, s.l[1])) {
|
||||||
return (i + 33);
|
return (i + 33);
|
||||||
|
}
|
||||||
return (0);
|
return (0);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -16,8 +16,7 @@ using std::thread;
|
|||||||
using std::uniform_int_distribution;
|
using std::uniform_int_distribution;
|
||||||
using std::minstd_rand;
|
using std::minstd_rand;
|
||||||
|
|
||||||
int test_threads()
|
int test_threads() {
|
||||||
{
|
|
||||||
je_malloc_conf = "narenas:3";
|
je_malloc_conf = "narenas:3";
|
||||||
int narenas = 0;
|
int narenas = 0;
|
||||||
size_t sz = sizeof(narenas);
|
size_t sz = sizeof(narenas);
|
||||||
|
@ -5,8 +5,7 @@
|
|||||||
|
|
||||||
using namespace std::chrono_literals;
|
using namespace std::chrono_literals;
|
||||||
|
|
||||||
int main(int argc, char** argv)
|
int main(int argc, char** argv) {
|
||||||
{
|
|
||||||
int rc = test_threads();
|
int rc = test_threads();
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
516
src/arena.c
516
src/arena.c
File diff suppressed because it is too large
Load Diff
99
src/base.c
99
src/base.c
@ -9,17 +9,16 @@ static base_t *b0;
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
base_map(extent_hooks_t *extent_hooks, unsigned ind, size_t size)
|
base_map(extent_hooks_t *extent_hooks, unsigned ind, size_t size) {
|
||||||
{
|
|
||||||
void *addr;
|
void *addr;
|
||||||
bool zero = true;
|
bool zero = true;
|
||||||
bool commit = true;
|
bool commit = true;
|
||||||
|
|
||||||
assert(size == HUGEPAGE_CEILING(size));
|
assert(size == HUGEPAGE_CEILING(size));
|
||||||
|
|
||||||
if (extent_hooks == &extent_hooks_default)
|
if (extent_hooks == &extent_hooks_default) {
|
||||||
addr = extent_alloc_mmap(NULL, size, PAGE, &zero, &commit);
|
addr = extent_alloc_mmap(NULL, size, PAGE, &zero, &commit);
|
||||||
else {
|
} else {
|
||||||
addr = extent_hooks->alloc(extent_hooks, NULL, size, PAGE,
|
addr = extent_hooks->alloc(extent_hooks, NULL, size, PAGE,
|
||||||
&zero, &commit, ind);
|
&zero, &commit, ind);
|
||||||
}
|
}
|
||||||
@ -28,8 +27,8 @@ base_map(extent_hooks_t *extent_hooks, unsigned ind, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
base_unmap(extent_hooks_t *extent_hooks, unsigned ind, void *addr, size_t size)
|
base_unmap(extent_hooks_t *extent_hooks, unsigned ind, void *addr,
|
||||||
{
|
size_t size) {
|
||||||
/*
|
/*
|
||||||
* Cascade through dalloc, decommit, purge_lazy, and purge_forced,
|
* Cascade through dalloc, decommit, purge_lazy, and purge_forced,
|
||||||
* stopping at first success. This cascade is performed for consistency
|
* stopping at first success. This cascade is performed for consistency
|
||||||
@ -41,40 +40,48 @@ base_unmap(extent_hooks_t *extent_hooks, unsigned ind, void *addr, size_t size)
|
|||||||
* some consistent-but-allocated state.
|
* some consistent-but-allocated state.
|
||||||
*/
|
*/
|
||||||
if (extent_hooks == &extent_hooks_default) {
|
if (extent_hooks == &extent_hooks_default) {
|
||||||
if (!extent_dalloc_mmap(addr, size))
|
if (!extent_dalloc_mmap(addr, size)) {
|
||||||
return;
|
return;
|
||||||
if (!pages_decommit(addr, size))
|
}
|
||||||
|
if (!pages_decommit(addr, size)) {
|
||||||
return;
|
return;
|
||||||
if (!pages_purge_lazy(addr, size))
|
}
|
||||||
|
if (!pages_purge_lazy(addr, size)) {
|
||||||
return;
|
return;
|
||||||
if (!pages_purge_forced(addr, size))
|
}
|
||||||
|
if (!pages_purge_forced(addr, size)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
/* Nothing worked. This should never happen. */
|
/* Nothing worked. This should never happen. */
|
||||||
not_reached();
|
not_reached();
|
||||||
} else {
|
} else {
|
||||||
if (extent_hooks->dalloc != NULL &&
|
if (extent_hooks->dalloc != NULL &&
|
||||||
!extent_hooks->dalloc(extent_hooks, addr, size, true, ind))
|
!extent_hooks->dalloc(extent_hooks, addr, size, true,
|
||||||
|
ind)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
if (extent_hooks->decommit != NULL &&
|
if (extent_hooks->decommit != NULL &&
|
||||||
!extent_hooks->decommit(extent_hooks, addr, size, 0, size,
|
!extent_hooks->decommit(extent_hooks, addr, size, 0, size,
|
||||||
ind))
|
ind)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
if (extent_hooks->purge_lazy != NULL &&
|
if (extent_hooks->purge_lazy != NULL &&
|
||||||
!extent_hooks->purge_lazy(extent_hooks, addr, size, 0, size,
|
!extent_hooks->purge_lazy(extent_hooks, addr, size, 0, size,
|
||||||
ind))
|
ind)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
if (extent_hooks->purge_forced != NULL &&
|
if (extent_hooks->purge_forced != NULL &&
|
||||||
!extent_hooks->purge_forced(extent_hooks, addr, size, 0,
|
!extent_hooks->purge_forced(extent_hooks, addr, size, 0,
|
||||||
size, ind))
|
size, ind)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
/* Nothing worked. That's the application's problem. */
|
/* Nothing worked. That's the application's problem. */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
base_extent_init(size_t *extent_sn_next, extent_t *extent, void *addr,
|
base_extent_init(size_t *extent_sn_next, extent_t *extent, void *addr,
|
||||||
size_t size)
|
size_t size) {
|
||||||
{
|
|
||||||
size_t sn;
|
size_t sn;
|
||||||
|
|
||||||
sn = *extent_sn_next;
|
sn = *extent_sn_next;
|
||||||
@ -85,8 +92,7 @@ base_extent_init(size_t *extent_sn_next, extent_t *extent, void *addr,
|
|||||||
|
|
||||||
static void *
|
static void *
|
||||||
base_extent_bump_alloc_helper(extent_t *extent, size_t *gap_size, size_t size,
|
base_extent_bump_alloc_helper(extent_t *extent, size_t *gap_size, size_t size,
|
||||||
size_t alignment)
|
size_t alignment) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
assert(alignment == ALIGNMENT_CEILING(alignment, QUANTUM));
|
assert(alignment == ALIGNMENT_CEILING(alignment, QUANTUM));
|
||||||
@ -104,8 +110,7 @@ base_extent_bump_alloc_helper(extent_t *extent, size_t *gap_size, size_t size,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
||||||
size_t gap_size, void *addr, size_t size)
|
size_t gap_size, void *addr, size_t size) {
|
||||||
{
|
|
||||||
if (extent_size_get(extent) > 0) {
|
if (extent_size_get(extent) > 0) {
|
||||||
/*
|
/*
|
||||||
* Compute the index for the largest size class that does not
|
* Compute the index for the largest size class that does not
|
||||||
@ -131,8 +136,7 @@ base_extent_bump_alloc_post(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
|||||||
|
|
||||||
static void *
|
static void *
|
||||||
base_extent_bump_alloc(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
base_extent_bump_alloc(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
||||||
size_t size, size_t alignment)
|
size_t size, size_t alignment) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
size_t gap_size;
|
size_t gap_size;
|
||||||
|
|
||||||
@ -148,8 +152,7 @@ base_extent_bump_alloc(tsdn_t *tsdn, base_t *base, extent_t *extent,
|
|||||||
*/
|
*/
|
||||||
static base_block_t *
|
static base_block_t *
|
||||||
base_block_alloc(extent_hooks_t *extent_hooks, unsigned ind,
|
base_block_alloc(extent_hooks_t *extent_hooks, unsigned ind,
|
||||||
size_t *extent_sn_next, size_t size, size_t alignment)
|
size_t *extent_sn_next, size_t size, size_t alignment) {
|
||||||
{
|
|
||||||
base_block_t *block;
|
base_block_t *block;
|
||||||
size_t usize, header_size, gap_size, block_size;
|
size_t usize, header_size, gap_size, block_size;
|
||||||
|
|
||||||
@ -159,8 +162,9 @@ base_block_alloc(extent_hooks_t *extent_hooks, unsigned ind,
|
|||||||
gap_size = ALIGNMENT_CEILING(header_size, alignment) - header_size;
|
gap_size = ALIGNMENT_CEILING(header_size, alignment) - header_size;
|
||||||
block_size = HUGEPAGE_CEILING(header_size + gap_size + usize);
|
block_size = HUGEPAGE_CEILING(header_size + gap_size + usize);
|
||||||
block = (base_block_t *)base_map(extent_hooks, ind, block_size);
|
block = (base_block_t *)base_map(extent_hooks, ind, block_size);
|
||||||
if (block == NULL)
|
if (block == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
block->size = block_size;
|
block->size = block_size;
|
||||||
block->next = NULL;
|
block->next = NULL;
|
||||||
assert(block_size >= header_size);
|
assert(block_size >= header_size);
|
||||||
@ -174,8 +178,7 @@ base_block_alloc(extent_hooks_t *extent_hooks, unsigned ind,
|
|||||||
* specified alignment.
|
* specified alignment.
|
||||||
*/
|
*/
|
||||||
static extent_t *
|
static extent_t *
|
||||||
base_extent_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment)
|
base_extent_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment) {
|
||||||
{
|
|
||||||
extent_hooks_t *extent_hooks = base_extent_hooks_get(base);
|
extent_hooks_t *extent_hooks = base_extent_hooks_get(base);
|
||||||
base_block_t *block;
|
base_block_t *block;
|
||||||
|
|
||||||
@ -183,8 +186,9 @@ base_extent_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment)
|
|||||||
|
|
||||||
block = base_block_alloc(extent_hooks, base_ind_get(base),
|
block = base_block_alloc(extent_hooks, base_ind_get(base),
|
||||||
&base->extent_sn_next, size, alignment);
|
&base->extent_sn_next, size, alignment);
|
||||||
if (block == NULL)
|
if (block == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
block->next = base->blocks;
|
block->next = base->blocks;
|
||||||
base->blocks = block;
|
base->blocks = block;
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
@ -198,14 +202,12 @@ base_extent_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment)
|
|||||||
}
|
}
|
||||||
|
|
||||||
base_t *
|
base_t *
|
||||||
b0get(void)
|
b0get(void) {
|
||||||
{
|
|
||||||
return (b0);
|
return (b0);
|
||||||
}
|
}
|
||||||
|
|
||||||
base_t *
|
base_t *
|
||||||
base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks)
|
base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks) {
|
||||||
{
|
|
||||||
base_t *base;
|
base_t *base;
|
||||||
size_t extent_sn_next, base_alignment, base_size, gap_size;
|
size_t extent_sn_next, base_alignment, base_size, gap_size;
|
||||||
base_block_t *block;
|
base_block_t *block;
|
||||||
@ -214,8 +216,9 @@ base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks)
|
|||||||
extent_sn_next = 0;
|
extent_sn_next = 0;
|
||||||
block = base_block_alloc(extent_hooks, ind, &extent_sn_next,
|
block = base_block_alloc(extent_hooks, ind, &extent_sn_next,
|
||||||
sizeof(base_t), QUANTUM);
|
sizeof(base_t), QUANTUM);
|
||||||
if (block == NULL)
|
if (block == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
base_alignment = CACHELINE;
|
base_alignment = CACHELINE;
|
||||||
base_size = ALIGNMENT_CEILING(sizeof(base_t), base_alignment);
|
base_size = ALIGNMENT_CEILING(sizeof(base_t), base_alignment);
|
||||||
@ -229,8 +232,9 @@ base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks)
|
|||||||
}
|
}
|
||||||
base->extent_sn_next = extent_sn_next;
|
base->extent_sn_next = extent_sn_next;
|
||||||
base->blocks = block;
|
base->blocks = block;
|
||||||
for (i = 0; i < NSIZES; i++)
|
for (i = 0; i < NSIZES; i++) {
|
||||||
extent_heap_new(&base->avail[i]);
|
extent_heap_new(&base->avail[i]);
|
||||||
|
}
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
base->allocated = sizeof(base_block_t);
|
base->allocated = sizeof(base_block_t);
|
||||||
base->resident = PAGE_CEILING(sizeof(base_block_t));
|
base->resident = PAGE_CEILING(sizeof(base_block_t));
|
||||||
@ -245,8 +249,7 @@ base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
base_delete(base_t *base)
|
base_delete(base_t *base) {
|
||||||
{
|
|
||||||
extent_hooks_t *extent_hooks = base_extent_hooks_get(base);
|
extent_hooks_t *extent_hooks = base_extent_hooks_get(base);
|
||||||
base_block_t *next = base->blocks;
|
base_block_t *next = base->blocks;
|
||||||
do {
|
do {
|
||||||
@ -258,14 +261,12 @@ base_delete(base_t *base)
|
|||||||
}
|
}
|
||||||
|
|
||||||
extent_hooks_t *
|
extent_hooks_t *
|
||||||
base_extent_hooks_get(base_t *base)
|
base_extent_hooks_get(base_t *base) {
|
||||||
{
|
|
||||||
return ((extent_hooks_t *)atomic_read_p(&base->extent_hooks_pun));
|
return ((extent_hooks_t *)atomic_read_p(&base->extent_hooks_pun));
|
||||||
}
|
}
|
||||||
|
|
||||||
extent_hooks_t *
|
extent_hooks_t *
|
||||||
base_extent_hooks_set(base_t *base, extent_hooks_t *extent_hooks)
|
base_extent_hooks_set(base_t *base, extent_hooks_t *extent_hooks) {
|
||||||
{
|
|
||||||
extent_hooks_t *old_extent_hooks = base_extent_hooks_get(base);
|
extent_hooks_t *old_extent_hooks = base_extent_hooks_get(base);
|
||||||
union {
|
union {
|
||||||
extent_hooks_t **h;
|
extent_hooks_t **h;
|
||||||
@ -287,8 +288,7 @@ base_extent_hooks_set(base_t *base, extent_hooks_t *extent_hooks)
|
|||||||
* sharing.
|
* sharing.
|
||||||
*/
|
*/
|
||||||
void *
|
void *
|
||||||
base_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment)
|
base_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
size_t usize, asize;
|
size_t usize, asize;
|
||||||
szind_t i;
|
szind_t i;
|
||||||
@ -324,8 +324,7 @@ label_return:
|
|||||||
|
|
||||||
void
|
void
|
||||||
base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated, size_t *resident,
|
base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated, size_t *resident,
|
||||||
size_t *mapped)
|
size_t *mapped) {
|
||||||
{
|
|
||||||
cassert(config_stats);
|
cassert(config_stats);
|
||||||
|
|
||||||
malloc_mutex_lock(tsdn, &base->mtx);
|
malloc_mutex_lock(tsdn, &base->mtx);
|
||||||
@ -338,26 +337,22 @@ base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated, size_t *resident,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
base_prefork(tsdn_t *tsdn, base_t *base)
|
base_prefork(tsdn_t *tsdn, base_t *base) {
|
||||||
{
|
|
||||||
malloc_mutex_prefork(tsdn, &base->mtx);
|
malloc_mutex_prefork(tsdn, &base->mtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
base_postfork_parent(tsdn_t *tsdn, base_t *base)
|
base_postfork_parent(tsdn_t *tsdn, base_t *base) {
|
||||||
{
|
|
||||||
malloc_mutex_postfork_parent(tsdn, &base->mtx);
|
malloc_mutex_postfork_parent(tsdn, &base->mtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
base_postfork_child(tsdn_t *tsdn, base_t *base)
|
base_postfork_child(tsdn_t *tsdn, base_t *base) {
|
||||||
{
|
|
||||||
malloc_mutex_postfork_child(tsdn, &base->mtx);
|
malloc_mutex_postfork_child(tsdn, &base->mtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
base_boot(tsdn_t *tsdn)
|
base_boot(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
b0 = base_new(tsdn, 0, (extent_hooks_t *)&extent_hooks_default);
|
b0 = base_new(tsdn, 0, (extent_hooks_t *)&extent_hooks_default);
|
||||||
return (b0 == NULL);
|
return (b0 == NULL);
|
||||||
}
|
}
|
||||||
|
30
src/bitmap.c
30
src/bitmap.c
@ -6,8 +6,7 @@
|
|||||||
#ifdef BITMAP_USE_TREE
|
#ifdef BITMAP_USE_TREE
|
||||||
|
|
||||||
void
|
void
|
||||||
bitmap_info_init(bitmap_info_t *binfo, size_t nbits)
|
bitmap_info_init(bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
size_t group_count;
|
size_t group_count;
|
||||||
|
|
||||||
@ -35,14 +34,12 @@ bitmap_info_init(bitmap_info_t *binfo, size_t nbits)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
bitmap_info_ngroups(const bitmap_info_t *binfo)
|
bitmap_info_ngroups(const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
return (binfo->levels[binfo->nlevels].group_offset);
|
return (binfo->levels[binfo->nlevels].group_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
size_t extra;
|
size_t extra;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -56,23 +53,24 @@ bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
|||||||
memset(bitmap, 0xffU, bitmap_size(binfo));
|
memset(bitmap, 0xffU, bitmap_size(binfo));
|
||||||
extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
|
extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
|
||||||
& BITMAP_GROUP_NBITS_MASK;
|
& BITMAP_GROUP_NBITS_MASK;
|
||||||
if (extra != 0)
|
if (extra != 0) {
|
||||||
bitmap[binfo->levels[1].group_offset - 1] >>= extra;
|
bitmap[binfo->levels[1].group_offset - 1] >>= extra;
|
||||||
|
}
|
||||||
for (i = 1; i < binfo->nlevels; i++) {
|
for (i = 1; i < binfo->nlevels; i++) {
|
||||||
size_t group_count = binfo->levels[i].group_offset -
|
size_t group_count = binfo->levels[i].group_offset -
|
||||||
binfo->levels[i-1].group_offset;
|
binfo->levels[i-1].group_offset;
|
||||||
extra = (BITMAP_GROUP_NBITS - (group_count &
|
extra = (BITMAP_GROUP_NBITS - (group_count &
|
||||||
BITMAP_GROUP_NBITS_MASK)) & BITMAP_GROUP_NBITS_MASK;
|
BITMAP_GROUP_NBITS_MASK)) & BITMAP_GROUP_NBITS_MASK;
|
||||||
if (extra != 0)
|
if (extra != 0) {
|
||||||
bitmap[binfo->levels[i+1].group_offset - 1] >>= extra;
|
bitmap[binfo->levels[i+1].group_offset - 1] >>= extra;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#else /* BITMAP_USE_TREE */
|
#else /* BITMAP_USE_TREE */
|
||||||
|
|
||||||
void
|
void
|
||||||
bitmap_info_init(bitmap_info_t *binfo, size_t nbits)
|
bitmap_info_init(bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
assert(nbits > 0);
|
assert(nbits > 0);
|
||||||
assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS));
|
assert(nbits <= (ZU(1) << LG_BITMAP_MAXBITS));
|
||||||
|
|
||||||
@ -81,27 +79,25 @@ bitmap_info_init(bitmap_info_t *binfo, size_t nbits)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
bitmap_info_ngroups(const bitmap_info_t *binfo)
|
bitmap_info_ngroups(const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
return (binfo->ngroups);
|
return (binfo->ngroups);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo)
|
bitmap_init(bitmap_t *bitmap, const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
size_t extra;
|
size_t extra;
|
||||||
|
|
||||||
memset(bitmap, 0xffU, bitmap_size(binfo));
|
memset(bitmap, 0xffU, bitmap_size(binfo));
|
||||||
extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
|
extra = (BITMAP_GROUP_NBITS - (binfo->nbits & BITMAP_GROUP_NBITS_MASK))
|
||||||
& BITMAP_GROUP_NBITS_MASK;
|
& BITMAP_GROUP_NBITS_MASK;
|
||||||
if (extra != 0)
|
if (extra != 0) {
|
||||||
bitmap[binfo->ngroups - 1] >>= extra;
|
bitmap[binfo->ngroups - 1] >>= extra;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#endif /* BITMAP_USE_TREE */
|
#endif /* BITMAP_USE_TREE */
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
bitmap_size(const bitmap_info_t *binfo)
|
bitmap_size(const bitmap_info_t *binfo) {
|
||||||
{
|
|
||||||
return (bitmap_info_ngroups(binfo) << LG_SIZEOF_BITMAP);
|
return (bitmap_info_ngroups(binfo) << LG_SIZEOF_BITMAP);
|
||||||
}
|
}
|
||||||
|
101
src/ckh.c
101
src/ckh.c
@ -50,16 +50,16 @@ static void ckh_shrink(tsd_t *tsd, ckh_t *ckh);
|
|||||||
* otherwise.
|
* otherwise.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE_C size_t
|
JEMALLOC_INLINE_C size_t
|
||||||
ckh_bucket_search(ckh_t *ckh, size_t bucket, const void *key)
|
ckh_bucket_search(ckh_t *ckh, size_t bucket, const void *key) {
|
||||||
{
|
|
||||||
ckhc_t *cell;
|
ckhc_t *cell;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < (ZU(1) << LG_CKH_BUCKET_CELLS); i++) {
|
for (i = 0; i < (ZU(1) << LG_CKH_BUCKET_CELLS); i++) {
|
||||||
cell = &ckh->tab[(bucket << LG_CKH_BUCKET_CELLS) + i];
|
cell = &ckh->tab[(bucket << LG_CKH_BUCKET_CELLS) + i];
|
||||||
if (cell->key != NULL && ckh->keycomp(key, cell->key))
|
if (cell->key != NULL && ckh->keycomp(key, cell->key)) {
|
||||||
return ((bucket << LG_CKH_BUCKET_CELLS) + i);
|
return ((bucket << LG_CKH_BUCKET_CELLS) + i);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (SIZE_T_MAX);
|
return (SIZE_T_MAX);
|
||||||
}
|
}
|
||||||
@ -68,8 +68,7 @@ ckh_bucket_search(ckh_t *ckh, size_t bucket, const void *key)
|
|||||||
* Search table for key and return cell number if found; SIZE_T_MAX otherwise.
|
* Search table for key and return cell number if found; SIZE_T_MAX otherwise.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE_C size_t
|
JEMALLOC_INLINE_C size_t
|
||||||
ckh_isearch(ckh_t *ckh, const void *key)
|
ckh_isearch(ckh_t *ckh, const void *key) {
|
||||||
{
|
|
||||||
size_t hashes[2], bucket, cell;
|
size_t hashes[2], bucket, cell;
|
||||||
|
|
||||||
assert(ckh != NULL);
|
assert(ckh != NULL);
|
||||||
@ -79,8 +78,9 @@ ckh_isearch(ckh_t *ckh, const void *key)
|
|||||||
/* Search primary bucket. */
|
/* Search primary bucket. */
|
||||||
bucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
bucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
||||||
cell = ckh_bucket_search(ckh, bucket, key);
|
cell = ckh_bucket_search(ckh, bucket, key);
|
||||||
if (cell != SIZE_T_MAX)
|
if (cell != SIZE_T_MAX) {
|
||||||
return (cell);
|
return (cell);
|
||||||
|
}
|
||||||
|
|
||||||
/* Search secondary bucket. */
|
/* Search secondary bucket. */
|
||||||
bucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
bucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
||||||
@ -90,8 +90,7 @@ ckh_isearch(ckh_t *ckh, const void *key)
|
|||||||
|
|
||||||
JEMALLOC_INLINE_C bool
|
JEMALLOC_INLINE_C bool
|
||||||
ckh_try_bucket_insert(ckh_t *ckh, size_t bucket, const void *key,
|
ckh_try_bucket_insert(ckh_t *ckh, size_t bucket, const void *key,
|
||||||
const void *data)
|
const void *data) {
|
||||||
{
|
|
||||||
ckhc_t *cell;
|
ckhc_t *cell;
|
||||||
unsigned offset, i;
|
unsigned offset, i;
|
||||||
|
|
||||||
@ -123,8 +122,7 @@ ckh_try_bucket_insert(ckh_t *ckh, size_t bucket, const void *key,
|
|||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE_C bool
|
JEMALLOC_INLINE_C bool
|
||||||
ckh_evict_reloc_insert(ckh_t *ckh, size_t argbucket, void const **argkey,
|
ckh_evict_reloc_insert(ckh_t *ckh, size_t argbucket, void const **argkey,
|
||||||
void const **argdata)
|
void const **argdata) {
|
||||||
{
|
|
||||||
const void *key, *data, *tkey, *tdata;
|
const void *key, *data, *tkey, *tdata;
|
||||||
ckhc_t *cell;
|
ckhc_t *cell;
|
||||||
size_t hashes[2], bucket, tbucket;
|
size_t hashes[2], bucket, tbucket;
|
||||||
@ -187,14 +185,14 @@ ckh_evict_reloc_insert(ckh_t *ckh, size_t argbucket, void const **argkey,
|
|||||||
}
|
}
|
||||||
|
|
||||||
bucket = tbucket;
|
bucket = tbucket;
|
||||||
if (!ckh_try_bucket_insert(ckh, bucket, key, data))
|
if (!ckh_try_bucket_insert(ckh, bucket, key, data)) {
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE_C bool
|
JEMALLOC_INLINE_C bool
|
||||||
ckh_try_insert(ckh_t *ckh, void const**argkey, void const**argdata)
|
ckh_try_insert(ckh_t *ckh, void const**argkey, void const**argdata) {
|
||||||
{
|
|
||||||
size_t hashes[2], bucket;
|
size_t hashes[2], bucket;
|
||||||
const void *key = *argkey;
|
const void *key = *argkey;
|
||||||
const void *data = *argdata;
|
const void *data = *argdata;
|
||||||
@ -203,13 +201,15 @@ ckh_try_insert(ckh_t *ckh, void const**argkey, void const**argdata)
|
|||||||
|
|
||||||
/* Try to insert in primary bucket. */
|
/* Try to insert in primary bucket. */
|
||||||
bucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
bucket = hashes[0] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
||||||
if (!ckh_try_bucket_insert(ckh, bucket, key, data))
|
if (!ckh_try_bucket_insert(ckh, bucket, key, data)) {
|
||||||
return (false);
|
return (false);
|
||||||
|
}
|
||||||
|
|
||||||
/* Try to insert in secondary bucket. */
|
/* Try to insert in secondary bucket. */
|
||||||
bucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
bucket = hashes[1] & ((ZU(1) << ckh->lg_curbuckets) - 1);
|
||||||
if (!ckh_try_bucket_insert(ckh, bucket, key, data))
|
if (!ckh_try_bucket_insert(ckh, bucket, key, data)) {
|
||||||
return (false);
|
return (false);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Try to find a place for this item via iterative eviction/relocation.
|
* Try to find a place for this item via iterative eviction/relocation.
|
||||||
@ -222,8 +222,7 @@ ckh_try_insert(ckh_t *ckh, void const**argkey, void const**argdata)
|
|||||||
* old table into the new.
|
* old table into the new.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE_C bool
|
JEMALLOC_INLINE_C bool
|
||||||
ckh_rebuild(ckh_t *ckh, ckhc_t *aTab)
|
ckh_rebuild(ckh_t *ckh, ckhc_t *aTab) {
|
||||||
{
|
|
||||||
size_t count, i, nins;
|
size_t count, i, nins;
|
||||||
const void *key, *data;
|
const void *key, *data;
|
||||||
|
|
||||||
@ -245,8 +244,7 @@ ckh_rebuild(ckh_t *ckh, ckhc_t *aTab)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
ckh_grow(tsd_t *tsd, ckh_t *ckh)
|
ckh_grow(tsd_t *tsd, ckh_t *ckh) {
|
||||||
{
|
|
||||||
bool ret;
|
bool ret;
|
||||||
ckhc_t *tab, *ttab;
|
ckhc_t *tab, *ttab;
|
||||||
unsigned lg_prevbuckets, lg_curcells;
|
unsigned lg_prevbuckets, lg_curcells;
|
||||||
@ -302,8 +300,7 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
ckh_shrink(tsd_t *tsd, ckh_t *ckh)
|
ckh_shrink(tsd_t *tsd, ckh_t *ckh) {
|
||||||
{
|
|
||||||
ckhc_t *tab, *ttab;
|
ckhc_t *tab, *ttab;
|
||||||
size_t usize;
|
size_t usize;
|
||||||
unsigned lg_prevbuckets, lg_curcells;
|
unsigned lg_prevbuckets, lg_curcells;
|
||||||
@ -315,8 +312,9 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh)
|
|||||||
lg_prevbuckets = ckh->lg_curbuckets;
|
lg_prevbuckets = ckh->lg_curbuckets;
|
||||||
lg_curcells = ckh->lg_curbuckets + LG_CKH_BUCKET_CELLS - 1;
|
lg_curcells = ckh->lg_curbuckets + LG_CKH_BUCKET_CELLS - 1;
|
||||||
usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
|
usize = sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE);
|
||||||
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS))
|
if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL,
|
tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL,
|
||||||
true, arena_ichoose(tsd, NULL));
|
true, arena_ichoose(tsd, NULL));
|
||||||
if (tab == NULL) {
|
if (tab == NULL) {
|
||||||
@ -353,8 +351,7 @@ ckh_shrink(tsd_t *tsd, ckh_t *ckh)
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash,
|
ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash,
|
||||||
ckh_keycomp_t *keycomp)
|
ckh_keycomp_t *keycomp) {
|
||||||
{
|
|
||||||
bool ret;
|
bool ret;
|
||||||
size_t mincells, usize;
|
size_t mincells, usize;
|
||||||
unsigned lg_mincells;
|
unsigned lg_mincells;
|
||||||
@ -384,8 +381,9 @@ ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash,
|
|||||||
mincells = ((minitems + (3 - (minitems % 3))) / 3) << 2;
|
mincells = ((minitems + (3 - (minitems % 3))) / 3) << 2;
|
||||||
for (lg_mincells = LG_CKH_BUCKET_CELLS;
|
for (lg_mincells = LG_CKH_BUCKET_CELLS;
|
||||||
(ZU(1) << lg_mincells) < mincells;
|
(ZU(1) << lg_mincells) < mincells;
|
||||||
lg_mincells++)
|
lg_mincells++) {
|
||||||
; /* Do nothing. */
|
/* Do nothing. */
|
||||||
|
}
|
||||||
ckh->lg_minbuckets = lg_mincells - LG_CKH_BUCKET_CELLS;
|
ckh->lg_minbuckets = lg_mincells - LG_CKH_BUCKET_CELLS;
|
||||||
ckh->lg_curbuckets = lg_mincells - LG_CKH_BUCKET_CELLS;
|
ckh->lg_curbuckets = lg_mincells - LG_CKH_BUCKET_CELLS;
|
||||||
ckh->hash = hash;
|
ckh->hash = hash;
|
||||||
@ -409,8 +407,7 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ckh_delete(tsd_t *tsd, ckh_t *ckh)
|
ckh_delete(tsd_t *tsd, ckh_t *ckh) {
|
||||||
{
|
|
||||||
assert(ckh != NULL);
|
assert(ckh != NULL);
|
||||||
|
|
||||||
#ifdef CKH_VERBOSE
|
#ifdef CKH_VERBOSE
|
||||||
@ -427,30 +424,31 @@ ckh_delete(tsd_t *tsd, ckh_t *ckh)
|
|||||||
|
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab), ckh->tab,
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), ckh->tab), ckh->tab,
|
||||||
NULL, true, true);
|
NULL, true, true);
|
||||||
if (config_debug)
|
if (config_debug) {
|
||||||
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
memset(ckh, JEMALLOC_FREE_JUNK, sizeof(ckh_t));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
ckh_count(ckh_t *ckh)
|
ckh_count(ckh_t *ckh) {
|
||||||
{
|
|
||||||
assert(ckh != NULL);
|
assert(ckh != NULL);
|
||||||
|
|
||||||
return (ckh->count);
|
return (ckh->count);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_iter(ckh_t *ckh, size_t *tabind, void **key, void **data)
|
ckh_iter(ckh_t *ckh, size_t *tabind, void **key, void **data) {
|
||||||
{
|
|
||||||
size_t i, ncells;
|
size_t i, ncells;
|
||||||
|
|
||||||
for (i = *tabind, ncells = (ZU(1) << (ckh->lg_curbuckets +
|
for (i = *tabind, ncells = (ZU(1) << (ckh->lg_curbuckets +
|
||||||
LG_CKH_BUCKET_CELLS)); i < ncells; i++) {
|
LG_CKH_BUCKET_CELLS)); i < ncells; i++) {
|
||||||
if (ckh->tab[i].key != NULL) {
|
if (ckh->tab[i].key != NULL) {
|
||||||
if (key != NULL)
|
if (key != NULL) {
|
||||||
*key = (void *)ckh->tab[i].key;
|
*key = (void *)ckh->tab[i].key;
|
||||||
if (data != NULL)
|
}
|
||||||
|
if (data != NULL) {
|
||||||
*data = (void *)ckh->tab[i].data;
|
*data = (void *)ckh->tab[i].data;
|
||||||
|
}
|
||||||
*tabind = i + 1;
|
*tabind = i + 1;
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
@ -460,8 +458,7 @@ ckh_iter(ckh_t *ckh, size_t *tabind, void **key, void **data)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_insert(tsd_t *tsd, ckh_t *ckh, const void *key, const void *data)
|
ckh_insert(tsd_t *tsd, ckh_t *ckh, const void *key, const void *data) {
|
||||||
{
|
|
||||||
bool ret;
|
bool ret;
|
||||||
|
|
||||||
assert(ckh != NULL);
|
assert(ckh != NULL);
|
||||||
@ -485,18 +482,19 @@ label_return:
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_remove(tsd_t *tsd, ckh_t *ckh, const void *searchkey, void **key,
|
ckh_remove(tsd_t *tsd, ckh_t *ckh, const void *searchkey, void **key,
|
||||||
void **data)
|
void **data) {
|
||||||
{
|
|
||||||
size_t cell;
|
size_t cell;
|
||||||
|
|
||||||
assert(ckh != NULL);
|
assert(ckh != NULL);
|
||||||
|
|
||||||
cell = ckh_isearch(ckh, searchkey);
|
cell = ckh_isearch(ckh, searchkey);
|
||||||
if (cell != SIZE_T_MAX) {
|
if (cell != SIZE_T_MAX) {
|
||||||
if (key != NULL)
|
if (key != NULL) {
|
||||||
*key = (void *)ckh->tab[cell].key;
|
*key = (void *)ckh->tab[cell].key;
|
||||||
if (data != NULL)
|
}
|
||||||
|
if (data != NULL) {
|
||||||
*data = (void *)ckh->tab[cell].data;
|
*data = (void *)ckh->tab[cell].data;
|
||||||
|
}
|
||||||
ckh->tab[cell].key = NULL;
|
ckh->tab[cell].key = NULL;
|
||||||
ckh->tab[cell].data = NULL; /* Not necessary. */
|
ckh->tab[cell].data = NULL; /* Not necessary. */
|
||||||
|
|
||||||
@ -516,18 +514,19 @@ ckh_remove(tsd_t *tsd, ckh_t *ckh, const void *searchkey, void **key,
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_search(ckh_t *ckh, const void *searchkey, void **key, void **data)
|
ckh_search(ckh_t *ckh, const void *searchkey, void **key, void **data) {
|
||||||
{
|
|
||||||
size_t cell;
|
size_t cell;
|
||||||
|
|
||||||
assert(ckh != NULL);
|
assert(ckh != NULL);
|
||||||
|
|
||||||
cell = ckh_isearch(ckh, searchkey);
|
cell = ckh_isearch(ckh, searchkey);
|
||||||
if (cell != SIZE_T_MAX) {
|
if (cell != SIZE_T_MAX) {
|
||||||
if (key != NULL)
|
if (key != NULL) {
|
||||||
*key = (void *)ckh->tab[cell].key;
|
*key = (void *)ckh->tab[cell].key;
|
||||||
if (data != NULL)
|
}
|
||||||
|
if (data != NULL) {
|
||||||
*data = (void *)ckh->tab[cell].data;
|
*data = (void *)ckh->tab[cell].data;
|
||||||
|
}
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -535,14 +534,12 @@ ckh_search(ckh_t *ckh, const void *searchkey, void **key, void **data)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ckh_string_hash(const void *key, size_t r_hash[2])
|
ckh_string_hash(const void *key, size_t r_hash[2]) {
|
||||||
{
|
|
||||||
hash(key, strlen((const char *)key), 0x94122f33U, r_hash);
|
hash(key, strlen((const char *)key), 0x94122f33U, r_hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_string_keycomp(const void *k1, const void *k2)
|
ckh_string_keycomp(const void *k1, const void *k2) {
|
||||||
{
|
|
||||||
assert(k1 != NULL);
|
assert(k1 != NULL);
|
||||||
assert(k2 != NULL);
|
assert(k2 != NULL);
|
||||||
|
|
||||||
@ -550,8 +547,7 @@ ckh_string_keycomp(const void *k1, const void *k2)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ckh_pointer_hash(const void *key, size_t r_hash[2])
|
ckh_pointer_hash(const void *key, size_t r_hash[2]) {
|
||||||
{
|
|
||||||
union {
|
union {
|
||||||
const void *v;
|
const void *v;
|
||||||
size_t i;
|
size_t i;
|
||||||
@ -563,7 +559,6 @@ ckh_pointer_hash(const void *key, size_t r_hash[2])
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ckh_pointer_keycomp(const void *k1, const void *k2)
|
ckh_pointer_keycomp(const void *k1, const void *k2) {
|
||||||
{
|
|
||||||
return ((k1 == k2) ? true : false);
|
return ((k1 == k2) ? true : false);
|
||||||
}
|
}
|
||||||
|
344
src/ctl.c
344
src/ctl.c
@ -17,22 +17,19 @@ static ctl_arenas_t *ctl_arenas;
|
|||||||
/* Helpers for named and indexed nodes. */
|
/* Helpers for named and indexed nodes. */
|
||||||
|
|
||||||
JEMALLOC_INLINE_C const ctl_named_node_t *
|
JEMALLOC_INLINE_C const ctl_named_node_t *
|
||||||
ctl_named_node(const ctl_node_t *node)
|
ctl_named_node(const ctl_node_t *node) {
|
||||||
{
|
|
||||||
return ((node->named) ? (const ctl_named_node_t *)node : NULL);
|
return ((node->named) ? (const ctl_named_node_t *)node : NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE_C const ctl_named_node_t *
|
JEMALLOC_INLINE_C const ctl_named_node_t *
|
||||||
ctl_named_children(const ctl_named_node_t *node, size_t index)
|
ctl_named_children(const ctl_named_node_t *node, size_t index) {
|
||||||
{
|
|
||||||
const ctl_named_node_t *children = ctl_named_node(node->children);
|
const ctl_named_node_t *children = ctl_named_node(node->children);
|
||||||
|
|
||||||
return (children ? &children[index] : NULL);
|
return (children ? &children[index] : NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
JEMALLOC_INLINE_C const ctl_indexed_node_t *
|
JEMALLOC_INLINE_C const ctl_indexed_node_t *
|
||||||
ctl_indexed_node(const ctl_node_t *node)
|
ctl_indexed_node(const ctl_node_t *node) {
|
||||||
{
|
|
||||||
return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
|
return (!node->named ? (const ctl_indexed_node_t *)node : NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -433,8 +430,7 @@ static const ctl_named_node_t super_root_node[] = {
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
arenas_i2a_impl(size_t i, bool compat, bool validate)
|
arenas_i2a_impl(size_t i, bool compat, bool validate) {
|
||||||
{
|
|
||||||
unsigned a;
|
unsigned a;
|
||||||
|
|
||||||
switch (i) {
|
switch (i) {
|
||||||
@ -453,9 +449,9 @@ arenas_i2a_impl(size_t i, bool compat, bool validate)
|
|||||||
* removal in 6.0.0.
|
* removal in 6.0.0.
|
||||||
*/
|
*/
|
||||||
a = 0;
|
a = 0;
|
||||||
} else if (validate && i >= ctl_arenas->narenas)
|
} else if (validate && i >= ctl_arenas->narenas) {
|
||||||
a = UINT_MAX;
|
a = UINT_MAX;
|
||||||
else {
|
} else {
|
||||||
/*
|
/*
|
||||||
* This function should never be called for an index
|
* This function should never be called for an index
|
||||||
* more than one past the range of indices that have
|
* more than one past the range of indices that have
|
||||||
@ -472,14 +468,12 @@ arenas_i2a_impl(size_t i, bool compat, bool validate)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
arenas_i2a(size_t i)
|
arenas_i2a(size_t i) {
|
||||||
{
|
|
||||||
return (arenas_i2a_impl(i, true, false));
|
return (arenas_i2a_impl(i, true, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
static ctl_arena_t *
|
static ctl_arena_t *
|
||||||
arenas_i_impl(tsdn_t *tsdn, size_t i, bool compat, bool init)
|
arenas_i_impl(tsdn_t *tsdn, size_t i, bool compat, bool init) {
|
||||||
{
|
|
||||||
ctl_arena_t *ret;
|
ctl_arena_t *ret;
|
||||||
|
|
||||||
assert(!compat || !init);
|
assert(!compat || !init);
|
||||||
@ -515,16 +509,14 @@ arenas_i_impl(tsdn_t *tsdn, size_t i, bool compat, bool init)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static ctl_arena_t *
|
static ctl_arena_t *
|
||||||
arenas_i(size_t i)
|
arenas_i(size_t i) {
|
||||||
{
|
|
||||||
ctl_arena_t *ret = arenas_i_impl(TSDN_NULL, i, true, false);
|
ctl_arena_t *ret = arenas_i_impl(TSDN_NULL, i, true, false);
|
||||||
assert(ret != NULL);
|
assert(ret != NULL);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
ctl_arena_clear(ctl_arena_t *ctl_arena)
|
ctl_arena_clear(ctl_arena_t *ctl_arena) {
|
||||||
{
|
|
||||||
ctl_arena->nthreads = 0;
|
ctl_arena->nthreads = 0;
|
||||||
ctl_arena->dss = dss_prec_names[dss_prec_limit];
|
ctl_arena->dss = dss_prec_names[dss_prec_limit];
|
||||||
ctl_arena->decay_time = -1;
|
ctl_arena->decay_time = -1;
|
||||||
@ -544,8 +536,7 @@ ctl_arena_clear(ctl_arena_t *ctl_arena)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_t *ctl_arena, arena_t *arena)
|
ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_t *ctl_arena, arena_t *arena) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
@ -575,8 +566,7 @@ ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_t *ctl_arena, arena_t *arena)
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
||||||
bool destroyed)
|
bool destroyed) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
if (!destroyed) {
|
if (!destroyed) {
|
||||||
@ -605,13 +595,15 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
|||||||
sdstats->astats.base += astats->astats.base;
|
sdstats->astats.base += astats->astats.base;
|
||||||
sdstats->astats.internal += astats->astats.internal;
|
sdstats->astats.internal += astats->astats.internal;
|
||||||
sdstats->astats.resident += astats->astats.resident;
|
sdstats->astats.resident += astats->astats.resident;
|
||||||
} else
|
} else {
|
||||||
assert(astats->astats.internal == 0);
|
assert(astats->astats.internal == 0);
|
||||||
|
}
|
||||||
|
|
||||||
if (!destroyed)
|
if (!destroyed) {
|
||||||
sdstats->allocated_small += astats->allocated_small;
|
sdstats->allocated_small += astats->allocated_small;
|
||||||
else
|
} else {
|
||||||
assert(astats->allocated_small == 0);
|
assert(astats->allocated_small == 0);
|
||||||
|
}
|
||||||
sdstats->nmalloc_small += astats->nmalloc_small;
|
sdstats->nmalloc_small += astats->nmalloc_small;
|
||||||
sdstats->ndalloc_small += astats->ndalloc_small;
|
sdstats->ndalloc_small += astats->ndalloc_small;
|
||||||
sdstats->nrequests_small += astats->nrequests_small;
|
sdstats->nrequests_small += astats->nrequests_small;
|
||||||
@ -619,8 +611,9 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
|||||||
if (!destroyed) {
|
if (!destroyed) {
|
||||||
sdstats->astats.allocated_large +=
|
sdstats->astats.allocated_large +=
|
||||||
astats->astats.allocated_large;
|
astats->astats.allocated_large;
|
||||||
} else
|
} else {
|
||||||
assert(astats->astats.allocated_large == 0);
|
assert(astats->astats.allocated_large == 0);
|
||||||
|
}
|
||||||
sdstats->astats.nmalloc_large += astats->astats.nmalloc_large;
|
sdstats->astats.nmalloc_large += astats->astats.nmalloc_large;
|
||||||
sdstats->astats.ndalloc_large += astats->astats.ndalloc_large;
|
sdstats->astats.ndalloc_large += astats->astats.ndalloc_large;
|
||||||
sdstats->astats.nrequests_large +=
|
sdstats->astats.nrequests_large +=
|
||||||
@ -639,8 +632,9 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
|||||||
if (!destroyed) {
|
if (!destroyed) {
|
||||||
sdstats->bstats[i].curregs +=
|
sdstats->bstats[i].curregs +=
|
||||||
astats->bstats[i].curregs;
|
astats->bstats[i].curregs;
|
||||||
} else
|
} else {
|
||||||
assert(astats->bstats[i].curregs == 0);
|
assert(astats->bstats[i].curregs == 0);
|
||||||
|
}
|
||||||
if (config_tcache) {
|
if (config_tcache) {
|
||||||
sdstats->bstats[i].nfills +=
|
sdstats->bstats[i].nfills +=
|
||||||
astats->bstats[i].nfills;
|
astats->bstats[i].nfills;
|
||||||
@ -652,9 +646,10 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
|||||||
if (!destroyed) {
|
if (!destroyed) {
|
||||||
sdstats->bstats[i].curslabs +=
|
sdstats->bstats[i].curslabs +=
|
||||||
astats->bstats[i].curslabs;
|
astats->bstats[i].curslabs;
|
||||||
} else
|
} else {
|
||||||
assert(astats->bstats[i].curslabs == 0);
|
assert(astats->bstats[i].curslabs == 0);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (i = 0; i < NSIZES - NBINS; i++) {
|
for (i = 0; i < NSIZES - NBINS; i++) {
|
||||||
sdstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
|
sdstats->lstats[i].nmalloc += astats->lstats[i].nmalloc;
|
||||||
@ -664,16 +659,16 @@ ctl_arena_stats_sdmerge(ctl_arena_t *ctl_sdarena, ctl_arena_t *ctl_arena,
|
|||||||
if (!destroyed) {
|
if (!destroyed) {
|
||||||
sdstats->lstats[i].curlextents +=
|
sdstats->lstats[i].curlextents +=
|
||||||
astats->lstats[i].curlextents;
|
astats->lstats[i].curlextents;
|
||||||
} else
|
} else {
|
||||||
assert(astats->lstats[i].curlextents == 0);
|
assert(astats->lstats[i].curlextents == 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, ctl_arena_t *ctl_sdarena,
|
ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, ctl_arena_t *ctl_sdarena,
|
||||||
unsigned i, bool destroyed)
|
unsigned i, bool destroyed) {
|
||||||
{
|
|
||||||
ctl_arena_t *ctl_arena = arenas_i(i);
|
ctl_arena_t *ctl_arena = arenas_i(i);
|
||||||
|
|
||||||
ctl_arena_clear(ctl_arena);
|
ctl_arena_clear(ctl_arena);
|
||||||
@ -683,8 +678,7 @@ ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, ctl_arena_t *ctl_sdarena,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
ctl_arena_init(tsdn_t *tsdn, extent_hooks_t *extent_hooks)
|
ctl_arena_init(tsdn_t *tsdn, extent_hooks_t *extent_hooks) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
ctl_arena_t *ctl_arena;
|
ctl_arena_t *ctl_arena;
|
||||||
|
|
||||||
@ -692,26 +686,29 @@ ctl_arena_init(tsdn_t *tsdn, extent_hooks_t *extent_hooks)
|
|||||||
NULL) {
|
NULL) {
|
||||||
ql_remove(&ctl_arenas->destroyed, ctl_arena, destroyed_link);
|
ql_remove(&ctl_arenas->destroyed, ctl_arena, destroyed_link);
|
||||||
arena_ind = ctl_arena->arena_ind;
|
arena_ind = ctl_arena->arena_ind;
|
||||||
} else
|
} else {
|
||||||
arena_ind = ctl_arenas->narenas;
|
arena_ind = ctl_arenas->narenas;
|
||||||
|
}
|
||||||
|
|
||||||
/* Trigger stats allocation. */
|
/* Trigger stats allocation. */
|
||||||
if (arenas_i_impl(tsdn, arena_ind, false, true) == NULL)
|
if (arenas_i_impl(tsdn, arena_ind, false, true) == NULL) {
|
||||||
return (UINT_MAX);
|
return (UINT_MAX);
|
||||||
|
}
|
||||||
|
|
||||||
/* Initialize new arena. */
|
/* Initialize new arena. */
|
||||||
if (arena_init(tsdn, arena_ind, extent_hooks) == NULL)
|
if (arena_init(tsdn, arena_ind, extent_hooks) == NULL) {
|
||||||
return (UINT_MAX);
|
return (UINT_MAX);
|
||||||
|
}
|
||||||
|
|
||||||
if (arena_ind == ctl_arenas->narenas)
|
if (arena_ind == ctl_arenas->narenas) {
|
||||||
ctl_arenas->narenas++;
|
ctl_arenas->narenas++;
|
||||||
|
}
|
||||||
|
|
||||||
return (arena_ind);
|
return (arena_ind);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
ctl_refresh(tsdn_t *tsdn)
|
ctl_refresh(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
ctl_arena_t *ctl_sarena = arenas_i(MALLCTL_ARENAS_ALL);
|
ctl_arena_t *ctl_sarena = arenas_i(MALLCTL_ARENAS_ALL);
|
||||||
VARIABLE_ARRAY(arena_t *, tarenas, ctl_arenas->narenas);
|
VARIABLE_ARRAY(arena_t *, tarenas, ctl_arenas->narenas);
|
||||||
@ -751,8 +748,7 @@ ctl_refresh(tsdn_t *tsdn)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
ctl_init(tsdn_t *tsdn)
|
ctl_init(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
bool ret;
|
bool ret;
|
||||||
|
|
||||||
malloc_mutex_lock(tsdn, &ctl_mtx);
|
malloc_mutex_lock(tsdn, &ctl_mtx);
|
||||||
@ -828,8 +824,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp,
|
ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp,
|
||||||
size_t *mibp, size_t *depthp)
|
size_t *mibp, size_t *depthp) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
const char *elm, *tdot, *dot;
|
const char *elm, *tdot, *dot;
|
||||||
size_t elen, i, j;
|
size_t elen, i, j;
|
||||||
@ -857,9 +852,10 @@ ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp,
|
|||||||
if (strlen(child->name) == elen &&
|
if (strlen(child->name) == elen &&
|
||||||
strncmp(elm, child->name, elen) == 0) {
|
strncmp(elm, child->name, elen) == 0) {
|
||||||
node = child;
|
node = child;
|
||||||
if (nodesp != NULL)
|
if (nodesp != NULL) {
|
||||||
nodesp[i] =
|
nodesp[i] =
|
||||||
(const ctl_node_t *)node;
|
(const ctl_node_t *)node;
|
||||||
|
}
|
||||||
mibp[i] = j;
|
mibp[i] = j;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -886,8 +882,9 @@ ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp,
|
|||||||
goto label_return;
|
goto label_return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nodesp != NULL)
|
if (nodesp != NULL) {
|
||||||
nodesp[i] = (const ctl_node_t *)node;
|
nodesp[i] = (const ctl_node_t *)node;
|
||||||
|
}
|
||||||
mibp[i] = (size_t)index;
|
mibp[i] = (size_t)index;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -925,8 +922,7 @@ label_return:
|
|||||||
|
|
||||||
int
|
int
|
||||||
ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp,
|
ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp,
|
||||||
void *newp, size_t newlen)
|
void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
size_t depth;
|
size_t depth;
|
||||||
ctl_node_t const *nodes[CTL_MAX_DEPTH];
|
ctl_node_t const *nodes[CTL_MAX_DEPTH];
|
||||||
@ -940,12 +936,14 @@ ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp,
|
|||||||
|
|
||||||
depth = CTL_MAX_DEPTH;
|
depth = CTL_MAX_DEPTH;
|
||||||
ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth);
|
ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth);
|
||||||
if (ret != 0)
|
if (ret != 0) {
|
||||||
goto label_return;
|
goto label_return;
|
||||||
|
}
|
||||||
|
|
||||||
node = ctl_named_node(nodes[depth-1]);
|
node = ctl_named_node(nodes[depth-1]);
|
||||||
if (node != NULL && node->ctl)
|
if (node != NULL && node->ctl) {
|
||||||
ret = node->ctl(tsd, mib, depth, oldp, oldlenp, newp, newlen);
|
ret = node->ctl(tsd, mib, depth, oldp, oldlenp, newp, newlen);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
/* The name refers to a partial path through the ctl tree. */
|
/* The name refers to a partial path through the ctl tree. */
|
||||||
ret = ENOENT;
|
ret = ENOENT;
|
||||||
@ -956,8 +954,7 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
int
|
int
|
||||||
ctl_nametomib(tsdn_t *tsdn, const char *name, size_t *mibp, size_t *miblenp)
|
ctl_nametomib(tsdn_t *tsdn, const char *name, size_t *mibp, size_t *miblenp) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
if (!ctl_initialized && ctl_init(tsdn)) {
|
if (!ctl_initialized && ctl_init(tsdn)) {
|
||||||
@ -972,8 +969,7 @@ label_return:
|
|||||||
|
|
||||||
int
|
int
|
||||||
ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
const ctl_named_node_t *node;
|
const ctl_named_node_t *node;
|
||||||
size_t i;
|
size_t i;
|
||||||
@ -1009,9 +1005,9 @@ ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* Call the ctl function. */
|
/* Call the ctl function. */
|
||||||
if (node && node->ctl)
|
if (node && node->ctl) {
|
||||||
ret = node->ctl(tsd, mib, miblen, oldp, oldlenp, newp, newlen);
|
ret = node->ctl(tsd, mib, miblen, oldp, oldlenp, newp, newlen);
|
||||||
else {
|
} else {
|
||||||
/* Partial MIB. */
|
/* Partial MIB. */
|
||||||
ret = ENOENT;
|
ret = ENOENT;
|
||||||
}
|
}
|
||||||
@ -1021,10 +1017,10 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ctl_boot(void)
|
ctl_boot(void) {
|
||||||
{
|
if (malloc_mutex_init(&ctl_mtx, "ctl", WITNESS_RANK_CTL)) {
|
||||||
if (malloc_mutex_init(&ctl_mtx, "ctl", WITNESS_RANK_CTL))
|
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
ctl_initialized = false;
|
ctl_initialized = false;
|
||||||
|
|
||||||
@ -1032,20 +1028,17 @@ ctl_boot(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ctl_prefork(tsdn_t *tsdn)
|
ctl_prefork(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
malloc_mutex_prefork(tsdn, &ctl_mtx);
|
malloc_mutex_prefork(tsdn, &ctl_mtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ctl_postfork_parent(tsdn_t *tsdn)
|
ctl_postfork_parent(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
malloc_mutex_postfork_parent(tsdn, &ctl_mtx);
|
malloc_mutex_postfork_parent(tsdn, &ctl_mtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
ctl_postfork_child(tsdn_t *tsdn)
|
ctl_postfork_child(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
malloc_mutex_postfork_child(tsdn, &ctl_mtx);
|
malloc_mutex_postfork_child(tsdn, &ctl_mtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1112,36 +1105,38 @@ ctl_postfork_child(tsdn_t *tsdn)
|
|||||||
#define CTL_RO_CLGEN(c, l, n, v, t) \
|
#define CTL_RO_CLGEN(c, l, n, v, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
if (!(c)) \
|
if (!(c)) { \
|
||||||
return (ENOENT); \
|
return (ENOENT); \
|
||||||
if (l) \
|
} \
|
||||||
|
if (l) { \
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
|
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
|
||||||
|
} \
|
||||||
READONLY(); \
|
READONLY(); \
|
||||||
oldval = (v); \
|
oldval = (v); \
|
||||||
READ(oldval, t); \
|
READ(oldval, t); \
|
||||||
\
|
\
|
||||||
ret = 0; \
|
ret = 0; \
|
||||||
label_return: \
|
label_return: \
|
||||||
if (l) \
|
if (l) { \
|
||||||
malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
|
malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
|
||||||
|
} \
|
||||||
return (ret); \
|
return (ret); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define CTL_RO_CGEN(c, n, v, t) \
|
#define CTL_RO_CGEN(c, n, v, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
if (!(c)) \
|
if (!(c)) { \
|
||||||
return (ENOENT); \
|
return (ENOENT); \
|
||||||
|
} \
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
|
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
|
||||||
READONLY(); \
|
READONLY(); \
|
||||||
oldval = (v); \
|
oldval = (v); \
|
||||||
@ -1156,8 +1151,7 @@ label_return: \
|
|||||||
#define CTL_RO_GEN(n, v, t) \
|
#define CTL_RO_GEN(n, v, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
@ -1179,13 +1173,13 @@ label_return: \
|
|||||||
#define CTL_RO_NL_CGEN(c, n, v, t) \
|
#define CTL_RO_NL_CGEN(c, n, v, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
if (!(c)) \
|
if (!(c)) { \
|
||||||
return (ENOENT); \
|
return (ENOENT); \
|
||||||
|
} \
|
||||||
READONLY(); \
|
READONLY(); \
|
||||||
oldval = (v); \
|
oldval = (v); \
|
||||||
READ(oldval, t); \
|
READ(oldval, t); \
|
||||||
@ -1198,8 +1192,7 @@ label_return: \
|
|||||||
#define CTL_RO_NL_GEN(n, v, t) \
|
#define CTL_RO_NL_GEN(n, v, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
@ -1215,13 +1208,13 @@ label_return: \
|
|||||||
#define CTL_TSD_RO_NL_CGEN(c, n, m, t) \
|
#define CTL_TSD_RO_NL_CGEN(c, n, m, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
if (!(c)) \
|
if (!(c)) { \
|
||||||
return (ENOENT); \
|
return (ENOENT); \
|
||||||
|
} \
|
||||||
READONLY(); \
|
READONLY(); \
|
||||||
oldval = (m(tsd)); \
|
oldval = (m(tsd)); \
|
||||||
READ(oldval, t); \
|
READ(oldval, t); \
|
||||||
@ -1234,8 +1227,7 @@ label_return: \
|
|||||||
#define CTL_RO_CONFIG_GEN(n, t) \
|
#define CTL_RO_CONFIG_GEN(n, t) \
|
||||||
static int \
|
static int \
|
||||||
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
|
||||||
size_t *oldlenp, void *newp, size_t newlen) \
|
size_t *oldlenp, void *newp, size_t newlen) { \
|
||||||
{ \
|
|
||||||
int ret; \
|
int ret; \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
\
|
\
|
||||||
@ -1254,15 +1246,15 @@ CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *)
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
epoch_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
epoch_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
UNUSED uint64_t newval;
|
UNUSED uint64_t newval;
|
||||||
|
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
|
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
|
||||||
WRITE(newval, uint64_t);
|
WRITE(newval, uint64_t);
|
||||||
if (newp != NULL)
|
if (newp != NULL) {
|
||||||
ctl_refresh(tsd_tsdn(tsd));
|
ctl_refresh(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
READ(ctl_arenas->epoch, uint64_t);
|
READ(ctl_arenas->epoch, uint64_t);
|
||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
@ -1317,15 +1309,15 @@ CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool)
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
thread_arena_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
thread_arena_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
arena_t *oldarena;
|
arena_t *oldarena;
|
||||||
unsigned newind, oldind;
|
unsigned newind, oldind;
|
||||||
|
|
||||||
oldarena = arena_choose(tsd, NULL);
|
oldarena = arena_choose(tsd, NULL);
|
||||||
if (oldarena == NULL)
|
if (oldarena == NULL) {
|
||||||
return (EAGAIN);
|
return (EAGAIN);
|
||||||
|
}
|
||||||
|
|
||||||
newind = oldind = arena_ind_get(oldarena);
|
newind = oldind = arena_ind_get(oldarena);
|
||||||
WRITE(newind, unsigned);
|
WRITE(newind, unsigned);
|
||||||
@ -1372,13 +1364,13 @@ CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp,
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
thread_tcache_enabled_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
thread_tcache_enabled_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
||||||
void *oldp, size_t *oldlenp, void *newp, size_t newlen)
|
void *oldp, size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
bool oldval;
|
bool oldval;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
oldval = tcache_enabled_get();
|
oldval = tcache_enabled_get();
|
||||||
if (newp != NULL) {
|
if (newp != NULL) {
|
||||||
@ -1397,12 +1389,12 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
thread_tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
thread_tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
||||||
void *oldp, size_t *oldlenp, void *newp, size_t newlen)
|
void *oldp, size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
READONLY();
|
READONLY();
|
||||||
WRITEONLY();
|
WRITEONLY();
|
||||||
@ -1416,12 +1408,12 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
thread_prof_name_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
thread_prof_name_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
READ_XOR_WRITE();
|
READ_XOR_WRITE();
|
||||||
|
|
||||||
@ -1432,8 +1424,9 @@ thread_prof_name_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
|
if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) !=
|
||||||
0)
|
0) {
|
||||||
goto label_return;
|
goto label_return;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
const char *oldname = prof_thread_name_get(tsd);
|
const char *oldname = prof_thread_name_get(tsd);
|
||||||
READ(oldname, const char *);
|
READ(oldname, const char *);
|
||||||
@ -1446,13 +1439,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
thread_prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
thread_prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
bool oldval;
|
bool oldval;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
oldval = prof_thread_active_get(tsd);
|
oldval = prof_thread_active_get(tsd);
|
||||||
if (newp != NULL) {
|
if (newp != NULL) {
|
||||||
@ -1476,13 +1469,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
tcache_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
tcache_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned tcache_ind;
|
unsigned tcache_ind;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
|
malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx);
|
||||||
READONLY();
|
READONLY();
|
||||||
@ -1500,13 +1493,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned tcache_ind;
|
unsigned tcache_ind;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
WRITEONLY();
|
WRITEONLY();
|
||||||
tcache_ind = UINT_MAX;
|
tcache_ind = UINT_MAX;
|
||||||
@ -1524,13 +1517,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
tcache_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
tcache_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned tcache_ind;
|
unsigned tcache_ind;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
WRITEONLY();
|
WRITEONLY();
|
||||||
tcache_ind = UINT_MAX;
|
tcache_ind = UINT_MAX;
|
||||||
@ -1550,8 +1543,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_initialized_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
arena_i_initialized_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
||||||
void *oldp, size_t *oldlenp, void *newp, size_t newlen)
|
void *oldp, size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
tsdn_t *tsdn = tsd_tsdn(tsd);
|
tsdn_t *tsdn = tsd_tsdn(tsd);
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
@ -1572,8 +1564,7 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all)
|
arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all) {
|
||||||
{
|
|
||||||
malloc_mutex_lock(tsdn, &ctl_mtx);
|
malloc_mutex_lock(tsdn, &ctl_mtx);
|
||||||
{
|
{
|
||||||
unsigned narenas = ctl_arenas->narenas;
|
unsigned narenas = ctl_arenas->narenas;
|
||||||
@ -1586,8 +1577,9 @@ arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all)
|
|||||||
unsigned i;
|
unsigned i;
|
||||||
VARIABLE_ARRAY(arena_t *, tarenas, narenas);
|
VARIABLE_ARRAY(arena_t *, tarenas, narenas);
|
||||||
|
|
||||||
for (i = 0; i < narenas; i++)
|
for (i = 0; i < narenas; i++) {
|
||||||
tarenas[i] = arena_get(tsdn, i, false);
|
tarenas[i] = arena_get(tsdn, i, false);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* No further need to hold ctl_mtx, since narenas and
|
* No further need to hold ctl_mtx, since narenas and
|
||||||
@ -1596,9 +1588,10 @@ arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all)
|
|||||||
malloc_mutex_unlock(tsdn, &ctl_mtx);
|
malloc_mutex_unlock(tsdn, &ctl_mtx);
|
||||||
|
|
||||||
for (i = 0; i < narenas; i++) {
|
for (i = 0; i < narenas; i++) {
|
||||||
if (tarenas[i] != NULL)
|
if (tarenas[i] != NULL) {
|
||||||
arena_purge(tsdn, tarenas[i], all);
|
arena_purge(tsdn, tarenas[i], all);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
arena_t *tarena;
|
arena_t *tarena;
|
||||||
|
|
||||||
@ -1609,16 +1602,16 @@ arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all)
|
|||||||
/* No further need to hold ctl_mtx. */
|
/* No further need to hold ctl_mtx. */
|
||||||
malloc_mutex_unlock(tsdn, &ctl_mtx);
|
malloc_mutex_unlock(tsdn, &ctl_mtx);
|
||||||
|
|
||||||
if (tarena != NULL)
|
if (tarena != NULL) {
|
||||||
arena_purge(tsdn, tarena, all);
|
arena_purge(tsdn, tarena, all);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_purge_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arena_i_purge_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
|
|
||||||
@ -1634,8 +1627,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_decay_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arena_i_decay_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
|
|
||||||
@ -1652,8 +1644,7 @@ label_return:
|
|||||||
static int
|
static int
|
||||||
arena_i_reset_destroy_helper(tsd_t *tsd, const size_t *mib, size_t miblen,
|
arena_i_reset_destroy_helper(tsd_t *tsd, const size_t *mib, size_t miblen,
|
||||||
void *oldp, size_t *oldlenp, void *newp, size_t newlen, unsigned *arena_ind,
|
void *oldp, size_t *oldlenp, void *newp, size_t newlen, unsigned *arena_ind,
|
||||||
arena_t **arena)
|
arena_t **arena) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
READONLY();
|
READONLY();
|
||||||
@ -1678,16 +1669,16 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arena_i_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
ret = arena_i_reset_destroy_helper(tsd, mib, miblen, oldp, oldlenp,
|
ret = arena_i_reset_destroy_helper(tsd, mib, miblen, oldp, oldlenp,
|
||||||
newp, newlen, &arena_ind, &arena);
|
newp, newlen, &arena_ind, &arena);
|
||||||
if (ret != 0)
|
if (ret != 0) {
|
||||||
return (ret);
|
return (ret);
|
||||||
|
}
|
||||||
|
|
||||||
arena_reset(tsd, arena);
|
arena_reset(tsd, arena);
|
||||||
|
|
||||||
@ -1696,8 +1687,7 @@ arena_i_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arena_i_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
@ -1705,8 +1695,9 @@ arena_i_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
|||||||
|
|
||||||
ret = arena_i_reset_destroy_helper(tsd, mib, miblen, oldp, oldlenp,
|
ret = arena_i_reset_destroy_helper(tsd, mib, miblen, oldp, oldlenp,
|
||||||
newp, newlen, &arena_ind, &arena);
|
newp, newlen, &arena_ind, &arena);
|
||||||
if (ret != 0)
|
if (ret != 0) {
|
||||||
goto label_return;
|
goto label_return;
|
||||||
|
}
|
||||||
|
|
||||||
if (arena_nthreads_get(arena, false) != 0 || arena_nthreads_get(arena,
|
if (arena_nthreads_get(arena, false) != 0 || arena_nthreads_get(arena,
|
||||||
true) != 0) {
|
true) != 0) {
|
||||||
@ -1735,8 +1726,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_dss_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arena_i_dss_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
const char *dss = NULL;
|
const char *dss = NULL;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
@ -1797,8 +1787,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arena_i_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
@ -1833,8 +1822,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arena_i_extent_hooks_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
arena_i_extent_hooks_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
||||||
void *oldp, size_t *oldlenp, void *newp, size_t newlen)
|
void *oldp, size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
@ -1867,8 +1855,7 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static const ctl_named_node_t *
|
static const ctl_named_node_t *
|
||||||
arena_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
|
arena_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) {
|
||||||
{
|
|
||||||
const ctl_named_node_t *ret;
|
const ctl_named_node_t *ret;
|
||||||
|
|
||||||
malloc_mutex_lock(tsdn, &ctl_mtx);
|
malloc_mutex_lock(tsdn, &ctl_mtx);
|
||||||
@ -1894,8 +1881,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arenas_narenas_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arenas_narenas_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
unsigned narenas;
|
unsigned narenas;
|
||||||
|
|
||||||
@ -1916,8 +1902,7 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
arenas_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arenas_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
if (oldp != NULL && oldlenp != NULL) {
|
if (oldp != NULL && oldlenp != NULL) {
|
||||||
@ -1949,27 +1934,27 @@ CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t)
|
|||||||
CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
|
CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t)
|
||||||
CTL_RO_NL_GEN(arenas_bin_i_slab_size, arena_bin_info[mib[2]].slab_size, size_t)
|
CTL_RO_NL_GEN(arenas_bin_i_slab_size, arena_bin_info[mib[2]].slab_size, size_t)
|
||||||
static const ctl_named_node_t *
|
static const ctl_named_node_t *
|
||||||
arenas_bin_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
|
arenas_bin_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) {
|
||||||
{
|
if (i > NBINS) {
|
||||||
if (i > NBINS)
|
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
return (super_arenas_bin_i_node);
|
return (super_arenas_bin_i_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
CTL_RO_NL_GEN(arenas_nlextents, NSIZES - NBINS, unsigned)
|
CTL_RO_NL_GEN(arenas_nlextents, NSIZES - NBINS, unsigned)
|
||||||
CTL_RO_NL_GEN(arenas_lextent_i_size, index2size(NBINS+(szind_t)mib[2]), size_t)
|
CTL_RO_NL_GEN(arenas_lextent_i_size, index2size(NBINS+(szind_t)mib[2]), size_t)
|
||||||
static const ctl_named_node_t *
|
static const ctl_named_node_t *
|
||||||
arenas_lextent_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
|
arenas_lextent_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
|
||||||
{
|
size_t i) {
|
||||||
if (i > NSIZES - NBINS)
|
if (i > NSIZES - NBINS) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
return (super_arenas_lextent_i_node);
|
return (super_arenas_lextent_i_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
arenas_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
arenas_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
extent_hooks_t *extent_hooks;
|
extent_hooks_t *extent_hooks;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
@ -1995,13 +1980,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
prof_thread_active_init_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
prof_thread_active_init_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
||||||
void *oldp, size_t *oldlenp, void *newp, size_t newlen)
|
void *oldp, size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
bool oldval;
|
bool oldval;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
if (newp != NULL) {
|
if (newp != NULL) {
|
||||||
if (newlen != sizeof(bool)) {
|
if (newlen != sizeof(bool)) {
|
||||||
@ -2010,8 +1995,9 @@ prof_thread_active_init_ctl(tsd_t *tsd, const size_t *mib, size_t miblen,
|
|||||||
}
|
}
|
||||||
oldval = prof_thread_active_init_set(tsd_tsdn(tsd),
|
oldval = prof_thread_active_init_set(tsd_tsdn(tsd),
|
||||||
*(bool *)newp);
|
*(bool *)newp);
|
||||||
} else
|
} else {
|
||||||
oldval = prof_thread_active_init_get(tsd_tsdn(tsd));
|
oldval = prof_thread_active_init_get(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
READ(oldval, bool);
|
READ(oldval, bool);
|
||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
@ -2021,13 +2007,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
bool oldval;
|
bool oldval;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
if (newp != NULL) {
|
if (newp != NULL) {
|
||||||
if (newlen != sizeof(bool)) {
|
if (newlen != sizeof(bool)) {
|
||||||
@ -2035,8 +2021,9 @@ prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
|||||||
goto label_return;
|
goto label_return;
|
||||||
}
|
}
|
||||||
oldval = prof_active_set(tsd_tsdn(tsd), *(bool *)newp);
|
oldval = prof_active_set(tsd_tsdn(tsd), *(bool *)newp);
|
||||||
} else
|
} else {
|
||||||
oldval = prof_active_get(tsd_tsdn(tsd));
|
oldval = prof_active_get(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
READ(oldval, bool);
|
READ(oldval, bool);
|
||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
@ -2046,13 +2033,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
prof_dump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
prof_dump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
const char *filename = NULL;
|
const char *filename = NULL;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
WRITEONLY();
|
WRITEONLY();
|
||||||
WRITE(filename, const char *);
|
WRITE(filename, const char *);
|
||||||
@ -2069,13 +2056,13 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
prof_gdump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
prof_gdump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
bool oldval;
|
bool oldval;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
if (newp != NULL) {
|
if (newp != NULL) {
|
||||||
if (newlen != sizeof(bool)) {
|
if (newlen != sizeof(bool)) {
|
||||||
@ -2083,8 +2070,9 @@ prof_gdump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
|||||||
goto label_return;
|
goto label_return;
|
||||||
}
|
}
|
||||||
oldval = prof_gdump_set(tsd_tsdn(tsd), *(bool *)newp);
|
oldval = prof_gdump_set(tsd_tsdn(tsd), *(bool *)newp);
|
||||||
} else
|
} else {
|
||||||
oldval = prof_gdump_get(tsd_tsdn(tsd));
|
oldval = prof_gdump_get(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
READ(oldval, bool);
|
READ(oldval, bool);
|
||||||
|
|
||||||
ret = 0;
|
ret = 0;
|
||||||
@ -2094,18 +2082,19 @@ label_return:
|
|||||||
|
|
||||||
static int
|
static int
|
||||||
prof_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
prof_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
|
||||||
size_t *oldlenp, void *newp, size_t newlen)
|
size_t *oldlenp, void *newp, size_t newlen) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
size_t lg_sample = lg_prof_sample;
|
size_t lg_sample = lg_prof_sample;
|
||||||
|
|
||||||
if (!config_prof)
|
if (!config_prof) {
|
||||||
return (ENOENT);
|
return (ENOENT);
|
||||||
|
}
|
||||||
|
|
||||||
WRITEONLY();
|
WRITEONLY();
|
||||||
WRITE(lg_sample, size_t);
|
WRITE(lg_sample, size_t);
|
||||||
if (lg_sample >= (sizeof(uint64_t) << 3))
|
if (lg_sample >= (sizeof(uint64_t) << 3)) {
|
||||||
lg_sample = (sizeof(uint64_t) << 3) - 1;
|
lg_sample = (sizeof(uint64_t) << 3) - 1;
|
||||||
|
}
|
||||||
|
|
||||||
prof_reset(tsd, lg_sample);
|
prof_reset(tsd, lg_sample);
|
||||||
|
|
||||||
@ -2189,10 +2178,10 @@ CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curslabs,
|
|||||||
|
|
||||||
static const ctl_named_node_t *
|
static const ctl_named_node_t *
|
||||||
stats_arenas_i_bins_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
|
stats_arenas_i_bins_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
|
||||||
size_t j)
|
size_t j) {
|
||||||
{
|
if (j > NBINS) {
|
||||||
if (j > NBINS)
|
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
return (super_stats_arenas_i_bins_j_node);
|
return (super_stats_arenas_i_bins_j_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2207,16 +2196,15 @@ CTL_RO_CGEN(config_stats, stats_arenas_i_lextents_j_curlextents,
|
|||||||
|
|
||||||
static const ctl_named_node_t *
|
static const ctl_named_node_t *
|
||||||
stats_arenas_i_lextents_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
|
stats_arenas_i_lextents_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen,
|
||||||
size_t j)
|
size_t j) {
|
||||||
{
|
if (j > NSIZES - NBINS) {
|
||||||
if (j > NSIZES - NBINS)
|
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
return (super_stats_arenas_i_lextents_j_node);
|
return (super_stats_arenas_i_lextents_j_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
static const ctl_named_node_t *
|
static const ctl_named_node_t *
|
||||||
stats_arenas_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i)
|
stats_arenas_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) {
|
||||||
{
|
|
||||||
const ctl_named_node_t *ret;
|
const ctl_named_node_t *ret;
|
||||||
size_t a;
|
size_t a;
|
||||||
|
|
||||||
|
375
src/extent.c
375
src/extent.c
@ -75,8 +75,7 @@ static void extent_record(tsdn_t *tsdn, arena_t *arena,
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
extent_t *
|
extent_t *
|
||||||
extent_alloc(tsdn_t *tsdn, arena_t *arena)
|
extent_alloc(tsdn_t *tsdn, arena_t *arena) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
malloc_mutex_lock(tsdn, &arena->extent_cache_mtx);
|
malloc_mutex_lock(tsdn, &arena->extent_cache_mtx);
|
||||||
@ -92,8 +91,7 @@ extent_alloc(tsdn_t *tsdn, arena_t *arena)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent)
|
extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent) {
|
||||||
{
|
|
||||||
malloc_mutex_lock(tsdn, &arena->extent_cache_mtx);
|
malloc_mutex_lock(tsdn, &arena->extent_cache_mtx);
|
||||||
ql_elm_new(extent, ql_link);
|
ql_elm_new(extent, ql_link);
|
||||||
ql_tail_insert(&arena->extent_cache, extent, ql_link);
|
ql_tail_insert(&arena->extent_cache, extent, ql_link);
|
||||||
@ -101,31 +99,29 @@ extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
extent_hooks_t *
|
extent_hooks_t *
|
||||||
extent_hooks_get(arena_t *arena)
|
extent_hooks_get(arena_t *arena) {
|
||||||
{
|
|
||||||
return (base_extent_hooks_get(arena->base));
|
return (base_extent_hooks_get(arena->base));
|
||||||
}
|
}
|
||||||
|
|
||||||
extent_hooks_t *
|
extent_hooks_t *
|
||||||
extent_hooks_set(arena_t *arena, extent_hooks_t *extent_hooks)
|
extent_hooks_set(arena_t *arena, extent_hooks_t *extent_hooks) {
|
||||||
{
|
|
||||||
return (base_extent_hooks_set(arena->base, extent_hooks));
|
return (base_extent_hooks_set(arena->base, extent_hooks));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_hooks_assure_initialized(arena_t *arena, extent_hooks_t **r_extent_hooks)
|
extent_hooks_assure_initialized(arena_t *arena,
|
||||||
{
|
extent_hooks_t **r_extent_hooks) {
|
||||||
if (*r_extent_hooks == EXTENT_HOOKS_INITIALIZER)
|
if (*r_extent_hooks == EXTENT_HOOKS_INITIALIZER) {
|
||||||
*r_extent_hooks = extent_hooks_get(arena);
|
*r_extent_hooks = extent_hooks_get(arena);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
#undef extent_size_quantize_floor
|
#undef extent_size_quantize_floor
|
||||||
#define extent_size_quantize_floor JEMALLOC_N(n_extent_size_quantize_floor)
|
#define extent_size_quantize_floor JEMALLOC_N(n_extent_size_quantize_floor)
|
||||||
#endif
|
#endif
|
||||||
size_t
|
size_t
|
||||||
extent_size_quantize_floor(size_t size)
|
extent_size_quantize_floor(size_t size) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
pszind_t pind;
|
pszind_t pind;
|
||||||
|
|
||||||
@ -161,8 +157,7 @@ extent_size_quantize_t *extent_size_quantize_floor =
|
|||||||
#define extent_size_quantize_ceil JEMALLOC_N(n_extent_size_quantize_ceil)
|
#define extent_size_quantize_ceil JEMALLOC_N(n_extent_size_quantize_ceil)
|
||||||
#endif
|
#endif
|
||||||
size_t
|
size_t
|
||||||
extent_size_quantize_ceil(size_t size)
|
extent_size_quantize_ceil(size_t size) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
|
|
||||||
assert(size > 0);
|
assert(size > 0);
|
||||||
@ -195,8 +190,7 @@ ph_gen(, extent_heap_, extent_heap_t, extent_t, ph_link, extent_snad_comp)
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
extent_heaps_insert(tsdn_t *tsdn, extent_heap_t extent_heaps[NPSIZES+1],
|
extent_heaps_insert(tsdn_t *tsdn, extent_heap_t extent_heaps[NPSIZES+1],
|
||||||
extent_t *extent)
|
extent_t *extent) {
|
||||||
{
|
|
||||||
size_t psz = extent_size_quantize_floor(extent_size_get(extent));
|
size_t psz = extent_size_quantize_floor(extent_size_get(extent));
|
||||||
pszind_t pind = psz2ind(psz);
|
pszind_t pind = psz2ind(psz);
|
||||||
|
|
||||||
@ -207,8 +201,7 @@ extent_heaps_insert(tsdn_t *tsdn, extent_heap_t extent_heaps[NPSIZES+1],
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
extent_heaps_remove(tsdn_t *tsdn, extent_heap_t extent_heaps[NPSIZES+1],
|
extent_heaps_remove(tsdn_t *tsdn, extent_heap_t extent_heaps[NPSIZES+1],
|
||||||
extent_t *extent)
|
extent_t *extent) {
|
||||||
{
|
|
||||||
size_t psz = extent_size_quantize_floor(extent_size_get(extent));
|
size_t psz = extent_size_quantize_floor(extent_size_get(extent));
|
||||||
pszind_t pind = psz2ind(psz);
|
pszind_t pind = psz2ind(psz);
|
||||||
|
|
||||||
@ -220,12 +213,12 @@ extent_heaps_remove(tsdn_t *tsdn, extent_heap_t extent_heaps[NPSIZES+1],
|
|||||||
static bool
|
static bool
|
||||||
extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||||
const extent_t *extent, bool dependent, bool init_missing,
|
const extent_t *extent, bool dependent, bool init_missing,
|
||||||
rtree_elm_t **r_elm_a, rtree_elm_t **r_elm_b)
|
rtree_elm_t **r_elm_a, rtree_elm_t **r_elm_b) {
|
||||||
{
|
|
||||||
*r_elm_a = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
*r_elm_a = rtree_elm_acquire(tsdn, &extents_rtree, rtree_ctx,
|
||||||
(uintptr_t)extent_base_get(extent), dependent, init_missing);
|
(uintptr_t)extent_base_get(extent), dependent, init_missing);
|
||||||
if (!dependent && *r_elm_a == NULL)
|
if (!dependent && *r_elm_a == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
assert(*r_elm_a != NULL);
|
assert(*r_elm_a != NULL);
|
||||||
|
|
||||||
if (extent_size_get(extent) > PAGE) {
|
if (extent_size_get(extent) > PAGE) {
|
||||||
@ -237,33 +230,33 @@ extent_rtree_acquire(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
|||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
assert(*r_elm_b != NULL);
|
assert(*r_elm_b != NULL);
|
||||||
} else
|
} else {
|
||||||
*r_elm_b = NULL;
|
*r_elm_b = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_rtree_write_acquired(tsdn_t *tsdn, rtree_elm_t *elm_a,
|
extent_rtree_write_acquired(tsdn_t *tsdn, rtree_elm_t *elm_a,
|
||||||
rtree_elm_t *elm_b, const extent_t *extent)
|
rtree_elm_t *elm_b, const extent_t *extent) {
|
||||||
{
|
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_a, extent);
|
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_a, extent);
|
||||||
if (elm_b != NULL)
|
if (elm_b != NULL) {
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_b, extent);
|
rtree_elm_write_acquired(tsdn, &extents_rtree, elm_b, extent);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_rtree_release(tsdn_t *tsdn, rtree_elm_t *elm_a, rtree_elm_t *elm_b)
|
extent_rtree_release(tsdn_t *tsdn, rtree_elm_t *elm_a, rtree_elm_t *elm_b) {
|
||||||
{
|
|
||||||
rtree_elm_release(tsdn, &extents_rtree, elm_a);
|
rtree_elm_release(tsdn, &extents_rtree, elm_a);
|
||||||
if (elm_b != NULL)
|
if (elm_b != NULL) {
|
||||||
rtree_elm_release(tsdn, &extents_rtree, elm_b);
|
rtree_elm_release(tsdn, &extents_rtree, elm_b);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_interior_register(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
extent_interior_register(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||||
const extent_t *extent)
|
const extent_t *extent) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
assert(extent_slab_get(extent));
|
assert(extent_slab_get(extent));
|
||||||
@ -276,8 +269,7 @@ extent_interior_register(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_gprof_add(tsdn_t *tsdn, const extent_t *extent)
|
extent_gprof_add(tsdn_t *tsdn, const extent_t *extent) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
if (opt_prof && extent_active_get(extent)) {
|
if (opt_prof && extent_active_get(extent)) {
|
||||||
@ -291,14 +283,14 @@ extent_gprof_add(tsdn_t *tsdn, const extent_t *extent)
|
|||||||
*/
|
*/
|
||||||
high = atomic_read_zu(&highpages);
|
high = atomic_read_zu(&highpages);
|
||||||
}
|
}
|
||||||
if (cur > high && prof_gdump_get_unlocked())
|
if (cur > high && prof_gdump_get_unlocked()) {
|
||||||
prof_gdump(tsdn);
|
prof_gdump(tsdn);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_gprof_sub(tsdn_t *tsdn, const extent_t *extent)
|
extent_gprof_sub(tsdn_t *tsdn, const extent_t *extent) {
|
||||||
{
|
|
||||||
cassert(config_prof);
|
cassert(config_prof);
|
||||||
|
|
||||||
if (opt_prof && extent_active_get(extent)) {
|
if (opt_prof && extent_active_get(extent)) {
|
||||||
@ -309,37 +301,37 @@ extent_gprof_sub(tsdn_t *tsdn, const extent_t *extent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_register(tsdn_t *tsdn, const extent_t *extent)
|
extent_register(tsdn_t *tsdn, const extent_t *extent) {
|
||||||
{
|
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
rtree_elm_t *elm_a, *elm_b;
|
rtree_elm_t *elm_a, *elm_b;
|
||||||
|
|
||||||
if (extent_rtree_acquire(tsdn, rtree_ctx, extent, false, true, &elm_a,
|
if (extent_rtree_acquire(tsdn, rtree_ctx, extent, false, true, &elm_a,
|
||||||
&elm_b))
|
&elm_b)) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
extent_rtree_write_acquired(tsdn, elm_a, elm_b, extent);
|
extent_rtree_write_acquired(tsdn, elm_a, elm_b, extent);
|
||||||
if (extent_slab_get(extent))
|
if (extent_slab_get(extent)) {
|
||||||
extent_interior_register(tsdn, rtree_ctx, extent);
|
extent_interior_register(tsdn, rtree_ctx, extent);
|
||||||
|
}
|
||||||
extent_rtree_release(tsdn, elm_a, elm_b);
|
extent_rtree_release(tsdn, elm_a, elm_b);
|
||||||
|
|
||||||
if (config_prof)
|
if (config_prof) {
|
||||||
extent_gprof_add(tsdn, extent);
|
extent_gprof_add(tsdn, extent);
|
||||||
|
}
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_reregister(tsdn_t *tsdn, const extent_t *extent)
|
extent_reregister(tsdn_t *tsdn, const extent_t *extent) {
|
||||||
{
|
|
||||||
bool err = extent_register(tsdn, extent);
|
bool err = extent_register(tsdn, extent);
|
||||||
assert(!err);
|
assert(!err);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_interior_deregister(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
extent_interior_deregister(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
||||||
const extent_t *extent)
|
const extent_t *extent) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
assert(extent_slab_get(extent));
|
assert(extent_slab_get(extent));
|
||||||
@ -352,8 +344,7 @@ extent_interior_deregister(tsdn_t *tsdn, rtree_ctx_t *rtree_ctx,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_deregister(tsdn_t *tsdn, extent_t *extent)
|
extent_deregister(tsdn_t *tsdn, extent_t *extent) {
|
||||||
{
|
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
rtree_elm_t *elm_a, *elm_b;
|
rtree_elm_t *elm_a, *elm_b;
|
||||||
@ -367,9 +358,10 @@ extent_deregister(tsdn_t *tsdn, extent_t *extent)
|
|||||||
}
|
}
|
||||||
extent_rtree_release(tsdn, elm_a, elm_b);
|
extent_rtree_release(tsdn, elm_a, elm_b);
|
||||||
|
|
||||||
if (config_prof)
|
if (config_prof) {
|
||||||
extent_gprof_sub(tsdn, extent);
|
extent_gprof_sub(tsdn, extent);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Do first-best-fit extent selection, i.e. select the oldest/lowest extent that
|
* Do first-best-fit extent selection, i.e. select the oldest/lowest extent that
|
||||||
@ -377,8 +369,7 @@ extent_deregister(tsdn_t *tsdn, extent_t *extent)
|
|||||||
*/
|
*/
|
||||||
static extent_t *
|
static extent_t *
|
||||||
extent_first_best_fit(tsdn_t *tsdn, arena_t *arena,
|
extent_first_best_fit(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_heap_t extent_heaps[NPSIZES+1], size_t size)
|
extent_heap_t extent_heaps[NPSIZES+1], size_t size) {
|
||||||
{
|
|
||||||
pszind_t pind, i;
|
pszind_t pind, i;
|
||||||
|
|
||||||
malloc_mutex_assert_owner(tsdn, &arena->extents_mtx);
|
malloc_mutex_assert_owner(tsdn, &arena->extents_mtx);
|
||||||
@ -386,17 +377,17 @@ extent_first_best_fit(tsdn_t *tsdn, arena_t *arena,
|
|||||||
pind = psz2ind(extent_size_quantize_ceil(size));
|
pind = psz2ind(extent_size_quantize_ceil(size));
|
||||||
for (i = pind; i < NPSIZES+1; i++) {
|
for (i = pind; i < NPSIZES+1; i++) {
|
||||||
extent_t *extent = extent_heap_first(&extent_heaps[i]);
|
extent_t *extent = extent_heap_first(&extent_heaps[i]);
|
||||||
if (extent != NULL)
|
if (extent != NULL) {
|
||||||
return (extent);
|
return (extent);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_leak(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
extent_leak(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
||||||
bool cache, extent_t *extent)
|
bool cache, extent_t *extent) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* Leak extent after making sure its pages have already been purged, so
|
* Leak extent after making sure its pages have already been purged, so
|
||||||
* that this is only a virtual memory leak.
|
* that this is only a virtual memory leak.
|
||||||
@ -415,15 +406,15 @@ static extent_t *
|
|||||||
extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
||||||
extent_heap_t extent_heaps[NPSIZES+1], bool locked, bool cache,
|
extent_heap_t extent_heaps[NPSIZES+1], bool locked, bool cache,
|
||||||
void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero,
|
void *new_addr, size_t usize, size_t pad, size_t alignment, bool *zero,
|
||||||
bool *commit, bool slab)
|
bool *commit, bool slab) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
size_t size, alloc_size, leadsize, trailsize;
|
size_t size, alloc_size, leadsize, trailsize;
|
||||||
|
|
||||||
if (locked)
|
if (locked) {
|
||||||
malloc_mutex_assert_owner(tsdn, &arena->extents_mtx);
|
malloc_mutex_assert_owner(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
assert(new_addr == NULL || !slab);
|
assert(new_addr == NULL || !slab);
|
||||||
assert(pad == 0 || !slab);
|
assert(pad == 0 || !slab);
|
||||||
assert(alignment > 0);
|
assert(alignment > 0);
|
||||||
@ -452,10 +443,12 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
size = usize + pad;
|
size = usize + pad;
|
||||||
alloc_size = size + PAGE_CEILING(alignment) - PAGE;
|
alloc_size = size + PAGE_CEILING(alignment) - PAGE;
|
||||||
/* Beware size_t wrap-around. */
|
/* Beware size_t wrap-around. */
|
||||||
if (alloc_size < usize)
|
if (alloc_size < usize) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
if (!locked)
|
}
|
||||||
|
if (!locked) {
|
||||||
malloc_mutex_lock(tsdn, &arena->extents_mtx);
|
malloc_mutex_lock(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
if (new_addr != NULL) {
|
if (new_addr != NULL) {
|
||||||
rtree_elm_t *elm;
|
rtree_elm_t *elm;
|
||||||
@ -470,19 +463,22 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
if (extent_arena_get(extent) != arena ||
|
if (extent_arena_get(extent) != arena ||
|
||||||
extent_size_get(extent) < size ||
|
extent_size_get(extent) < size ||
|
||||||
extent_active_get(extent) ||
|
extent_active_get(extent) ||
|
||||||
extent_retained_get(extent) == cache)
|
extent_retained_get(extent) == cache) {
|
||||||
extent = NULL;
|
extent = NULL;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
rtree_elm_release(tsdn, &extents_rtree, elm);
|
rtree_elm_release(tsdn, &extents_rtree, elm);
|
||||||
} else
|
} else {
|
||||||
extent = NULL;
|
extent = NULL;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
extent = extent_first_best_fit(tsdn, arena, extent_heaps,
|
extent = extent_first_best_fit(tsdn, arena, extent_heaps,
|
||||||
alloc_size);
|
alloc_size);
|
||||||
}
|
}
|
||||||
if (extent == NULL) {
|
if (extent == NULL) {
|
||||||
if (!locked)
|
if (!locked) {
|
||||||
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
extent_heaps_remove(tsdn, extent_heaps, extent);
|
extent_heaps_remove(tsdn, extent_heaps, extent);
|
||||||
@ -493,10 +489,12 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
assert(new_addr == NULL || leadsize == 0);
|
assert(new_addr == NULL || leadsize == 0);
|
||||||
assert(extent_size_get(extent) >= leadsize + size);
|
assert(extent_size_get(extent) >= leadsize + size);
|
||||||
trailsize = extent_size_get(extent) - leadsize - size;
|
trailsize = extent_size_get(extent) - leadsize - size;
|
||||||
if (extent_zeroed_get(extent))
|
if (extent_zeroed_get(extent)) {
|
||||||
*zero = true;
|
*zero = true;
|
||||||
if (extent_committed_get(extent))
|
}
|
||||||
|
if (extent_committed_get(extent)) {
|
||||||
*commit = true;
|
*commit = true;
|
||||||
|
}
|
||||||
|
|
||||||
/* Split the lead. */
|
/* Split the lead. */
|
||||||
if (leadsize != 0) {
|
if (leadsize != 0) {
|
||||||
@ -507,8 +505,9 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
if (extent == NULL) {
|
if (extent == NULL) {
|
||||||
extent_deregister(tsdn, lead);
|
extent_deregister(tsdn, lead);
|
||||||
extent_leak(tsdn, arena, r_extent_hooks, cache, lead);
|
extent_leak(tsdn, arena, r_extent_hooks, cache, lead);
|
||||||
if (!locked)
|
if (!locked) {
|
||||||
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
extent_heaps_insert(tsdn, extent_heaps, lead);
|
extent_heaps_insert(tsdn, extent_heaps, lead);
|
||||||
@ -523,8 +522,9 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
extent_deregister(tsdn, extent);
|
extent_deregister(tsdn, extent);
|
||||||
extent_leak(tsdn, arena, r_extent_hooks, cache,
|
extent_leak(tsdn, arena, r_extent_hooks, cache,
|
||||||
extent);
|
extent);
|
||||||
if (!locked)
|
if (!locked) {
|
||||||
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
extent_heaps_insert(tsdn, extent_heaps, trail);
|
extent_heaps_insert(tsdn, extent_heaps, trail);
|
||||||
@ -540,8 +540,9 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
if (*commit && !extent_committed_get(extent)) {
|
if (*commit && !extent_committed_get(extent)) {
|
||||||
if (extent_commit_wrapper(tsdn, arena, r_extent_hooks, extent,
|
if (extent_commit_wrapper(tsdn, arena, r_extent_hooks, extent,
|
||||||
0, extent_size_get(extent))) {
|
0, extent_size_get(extent))) {
|
||||||
if (!locked)
|
if (!locked) {
|
||||||
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
extent_record(tsdn, arena, r_extent_hooks, extent_heaps,
|
extent_record(tsdn, arena, r_extent_hooks, extent_heaps,
|
||||||
cache, extent);
|
cache, extent);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
@ -549,16 +550,18 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
extent_zeroed_set(extent, true);
|
extent_zeroed_set(extent, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pad != 0)
|
if (pad != 0) {
|
||||||
extent_addr_randomize(tsdn, extent, alignment);
|
extent_addr_randomize(tsdn, extent, alignment);
|
||||||
|
}
|
||||||
extent_active_set(extent, true);
|
extent_active_set(extent, true);
|
||||||
if (slab) {
|
if (slab) {
|
||||||
extent_slab_set(extent, slab);
|
extent_slab_set(extent, slab);
|
||||||
extent_interior_register(tsdn, rtree_ctx, extent);
|
extent_interior_register(tsdn, rtree_ctx, extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!locked)
|
if (!locked) {
|
||||||
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
malloc_mutex_unlock(tsdn, &arena->extents_mtx);
|
||||||
|
}
|
||||||
|
|
||||||
if (*zero) {
|
if (*zero) {
|
||||||
if (!extent_zeroed_get(extent)) {
|
if (!extent_zeroed_get(extent)) {
|
||||||
@ -569,10 +572,11 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
size_t *p = (size_t *)(uintptr_t)
|
size_t *p = (size_t *)(uintptr_t)
|
||||||
extent_addr_get(extent);
|
extent_addr_get(extent);
|
||||||
|
|
||||||
for (i = 0; i < usize / sizeof(size_t); i++)
|
for (i = 0; i < usize / sizeof(size_t); i++) {
|
||||||
assert(p[i] == 0);
|
assert(p[i] == 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return (extent);
|
return (extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -584,8 +588,7 @@ extent_recycle(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
*/
|
*/
|
||||||
static void *
|
static void *
|
||||||
extent_alloc_core(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
extent_alloc_core(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
||||||
size_t alignment, bool *zero, bool *commit, dss_prec_t dss_prec)
|
size_t alignment, bool *zero, bool *commit, dss_prec_t dss_prec) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
@ -594,17 +597,20 @@ extent_alloc_core(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
/* "primary" dss. */
|
/* "primary" dss. */
|
||||||
if (have_dss && dss_prec == dss_prec_primary && (ret =
|
if (have_dss && dss_prec == dss_prec_primary && (ret =
|
||||||
extent_alloc_dss(tsdn, arena, new_addr, size, alignment, zero,
|
extent_alloc_dss(tsdn, arena, new_addr, size, alignment, zero,
|
||||||
commit)) != NULL)
|
commit)) != NULL) {
|
||||||
return (ret);
|
return (ret);
|
||||||
|
}
|
||||||
/* mmap. */
|
/* mmap. */
|
||||||
if ((ret = extent_alloc_mmap(new_addr, size, alignment, zero, commit))
|
if ((ret = extent_alloc_mmap(new_addr, size, alignment, zero, commit))
|
||||||
!= NULL)
|
!= NULL) {
|
||||||
return (ret);
|
return (ret);
|
||||||
|
}
|
||||||
/* "secondary" dss. */
|
/* "secondary" dss. */
|
||||||
if (have_dss && dss_prec == dss_prec_secondary && (ret =
|
if (have_dss && dss_prec == dss_prec_secondary && (ret =
|
||||||
extent_alloc_dss(tsdn, arena, new_addr, size, alignment, zero,
|
extent_alloc_dss(tsdn, arena, new_addr, size, alignment, zero,
|
||||||
commit)) != NULL)
|
commit)) != NULL) {
|
||||||
return (ret);
|
return (ret);
|
||||||
|
}
|
||||||
|
|
||||||
/* All strategies for allocation failed. */
|
/* All strategies for allocation failed. */
|
||||||
return (NULL);
|
return (NULL);
|
||||||
@ -613,8 +619,7 @@ extent_alloc_core(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
static extent_t *
|
static extent_t *
|
||||||
extent_alloc_cache_impl(tsdn_t *tsdn, arena_t *arena,
|
extent_alloc_cache_impl(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, bool locked, void *new_addr, size_t usize,
|
extent_hooks_t **r_extent_hooks, bool locked, void *new_addr, size_t usize,
|
||||||
size_t pad, size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t pad, size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
assert(usize + pad != 0);
|
assert(usize + pad != 0);
|
||||||
@ -629,8 +634,7 @@ extent_alloc_cache_impl(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_t *
|
extent_t *
|
||||||
extent_alloc_cache_locked(tsdn_t *tsdn, arena_t *arena,
|
extent_alloc_cache_locked(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
||||||
size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
malloc_mutex_assert_owner(tsdn, &arena->extents_mtx);
|
malloc_mutex_assert_owner(tsdn, &arena->extents_mtx);
|
||||||
|
|
||||||
return (extent_alloc_cache_impl(tsdn, arena, r_extent_hooks, true,
|
return (extent_alloc_cache_impl(tsdn, arena, r_extent_hooks, true,
|
||||||
@ -640,16 +644,14 @@ extent_alloc_cache_locked(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_t *
|
extent_t *
|
||||||
extent_alloc_cache(tsdn_t *tsdn, arena_t *arena,
|
extent_alloc_cache(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
||||||
size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
return (extent_alloc_cache_impl(tsdn, arena, r_extent_hooks, false,
|
return (extent_alloc_cache_impl(tsdn, arena, r_extent_hooks, false,
|
||||||
new_addr, usize, pad, alignment, zero, commit, slab));
|
new_addr, usize, pad, alignment, zero, commit, slab));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
extent_alloc_default_impl(tsdn_t *tsdn, arena_t *arena, void *new_addr,
|
extent_alloc_default_impl(tsdn_t *tsdn, arena_t *arena, void *new_addr,
|
||||||
size_t size, size_t alignment, bool *zero, bool *commit)
|
size_t size, size_t alignment, bool *zero, bool *commit) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
ret = extent_alloc_core(tsdn, arena, new_addr, size, alignment, zero,
|
ret = extent_alloc_core(tsdn, arena, new_addr, size, alignment, zero,
|
||||||
@ -659,8 +661,7 @@ extent_alloc_default_impl(tsdn_t *tsdn, arena_t *arena, void *new_addr,
|
|||||||
|
|
||||||
static void *
|
static void *
|
||||||
extent_alloc_default(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
extent_alloc_default(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
||||||
size_t alignment, bool *zero, bool *commit, unsigned arena_ind)
|
size_t alignment, bool *zero, bool *commit, unsigned arena_ind) {
|
||||||
{
|
|
||||||
tsdn_t *tsdn;
|
tsdn_t *tsdn;
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
@ -680,10 +681,10 @@ extent_alloc_default(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
extent_retain(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
extent_retain(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
||||||
extent_t *extent)
|
extent_t *extent) {
|
||||||
{
|
if (config_stats) {
|
||||||
if (config_stats)
|
|
||||||
arena->stats.retained += extent_size_get(extent);
|
arena->stats.retained += extent_size_get(extent);
|
||||||
|
}
|
||||||
extent_record(tsdn, arena, r_extent_hooks, arena->extents_retained,
|
extent_record(tsdn, arena, r_extent_hooks, arena->extents_retained,
|
||||||
false, extent);
|
false, extent);
|
||||||
}
|
}
|
||||||
@ -696,8 +697,7 @@ extent_retain(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
static extent_t *
|
static extent_t *
|
||||||
extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
|
extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
||||||
size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
void *ptr;
|
void *ptr;
|
||||||
size_t size, alloc_size, alloc_size_min, leadsize, trailsize;
|
size_t size, alloc_size, alloc_size_min, leadsize, trailsize;
|
||||||
@ -713,13 +713,16 @@ extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
|
|||||||
alloc_size = pind2sz(arena->extent_grow_next);
|
alloc_size = pind2sz(arena->extent_grow_next);
|
||||||
alloc_size_min = size + PAGE_CEILING(alignment) - PAGE;
|
alloc_size_min = size + PAGE_CEILING(alignment) - PAGE;
|
||||||
/* Beware size_t wrap-around. */
|
/* Beware size_t wrap-around. */
|
||||||
if (alloc_size_min < usize)
|
if (alloc_size_min < usize) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
if (alloc_size < alloc_size_min)
|
}
|
||||||
|
if (alloc_size < alloc_size_min) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
extent = extent_alloc(tsdn, arena);
|
extent = extent_alloc(tsdn, arena);
|
||||||
if (extent == NULL)
|
if (extent == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
zeroed = false;
|
zeroed = false;
|
||||||
committed = false;
|
committed = false;
|
||||||
ptr = extent_alloc_core(tsdn, arena, new_addr, alloc_size, PAGE,
|
ptr = extent_alloc_core(tsdn, arena, new_addr, alloc_size, PAGE,
|
||||||
@ -741,10 +744,12 @@ extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
|
|||||||
assert(new_addr == NULL || leadsize == 0);
|
assert(new_addr == NULL || leadsize == 0);
|
||||||
assert(alloc_size >= leadsize + size);
|
assert(alloc_size >= leadsize + size);
|
||||||
trailsize = alloc_size - leadsize - size;
|
trailsize = alloc_size - leadsize - size;
|
||||||
if (extent_zeroed_get(extent))
|
if (extent_zeroed_get(extent)) {
|
||||||
*zero = true;
|
*zero = true;
|
||||||
if (extent_committed_get(extent))
|
}
|
||||||
|
if (extent_committed_get(extent)) {
|
||||||
*commit = true;
|
*commit = true;
|
||||||
|
}
|
||||||
|
|
||||||
/* Split the lead. */
|
/* Split the lead. */
|
||||||
if (leadsize != 0) {
|
if (leadsize != 0) {
|
||||||
@ -790,8 +795,9 @@ extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
|
|||||||
/* Adjust gprof stats now that extent is final size. */
|
/* Adjust gprof stats now that extent is final size. */
|
||||||
extent_gprof_add(tsdn, extent);
|
extent_gprof_add(tsdn, extent);
|
||||||
}
|
}
|
||||||
if (pad != 0)
|
if (pad != 0) {
|
||||||
extent_addr_randomize(tsdn, extent, alignment);
|
extent_addr_randomize(tsdn, extent, alignment);
|
||||||
|
}
|
||||||
if (slab) {
|
if (slab) {
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn,
|
||||||
@ -800,18 +806,19 @@ extent_grow_retained(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_slab_set(extent, true);
|
extent_slab_set(extent, true);
|
||||||
extent_interior_register(tsdn, rtree_ctx, extent);
|
extent_interior_register(tsdn, rtree_ctx, extent);
|
||||||
}
|
}
|
||||||
if (*zero && !extent_zeroed_get(extent))
|
if (*zero && !extent_zeroed_get(extent)) {
|
||||||
memset(extent_addr_get(extent), 0, extent_usize_get(extent));
|
memset(extent_addr_get(extent), 0, extent_usize_get(extent));
|
||||||
if (arena->extent_grow_next + 1 < NPSIZES)
|
}
|
||||||
|
if (arena->extent_grow_next + 1 < NPSIZES) {
|
||||||
arena->extent_grow_next++;
|
arena->extent_grow_next++;
|
||||||
|
}
|
||||||
return (extent);
|
return (extent);
|
||||||
}
|
}
|
||||||
|
|
||||||
static extent_t *
|
static extent_t *
|
||||||
extent_alloc_retained(tsdn_t *tsdn, arena_t *arena,
|
extent_alloc_retained(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
||||||
size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
assert(usize != 0);
|
assert(usize != 0);
|
||||||
@ -825,9 +832,10 @@ extent_alloc_retained(tsdn_t *tsdn, arena_t *arena,
|
|||||||
size_t size = usize + pad;
|
size_t size = usize + pad;
|
||||||
arena->stats.retained -= size;
|
arena->stats.retained -= size;
|
||||||
}
|
}
|
||||||
if (config_prof)
|
if (config_prof) {
|
||||||
extent_gprof_add(tsdn, extent);
|
extent_gprof_add(tsdn, extent);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if (!config_munmap && extent == NULL) {
|
if (!config_munmap && extent == NULL) {
|
||||||
extent = extent_grow_retained(tsdn, arena, r_extent_hooks,
|
extent = extent_grow_retained(tsdn, arena, r_extent_hooks,
|
||||||
new_addr, usize, pad, alignment, zero, commit, slab);
|
new_addr, usize, pad, alignment, zero, commit, slab);
|
||||||
@ -839,16 +847,16 @@ extent_alloc_retained(tsdn_t *tsdn, arena_t *arena,
|
|||||||
static extent_t *
|
static extent_t *
|
||||||
extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena,
|
extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
||||||
size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
size_t size;
|
size_t size;
|
||||||
void *addr;
|
void *addr;
|
||||||
|
|
||||||
size = usize + pad;
|
size = usize + pad;
|
||||||
extent = extent_alloc(tsdn, arena);
|
extent = extent_alloc(tsdn, arena);
|
||||||
if (extent == NULL)
|
if (extent == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
if (*r_extent_hooks == &extent_hooks_default) {
|
if (*r_extent_hooks == &extent_hooks_default) {
|
||||||
/* Call directly to propagate tsdn. */
|
/* Call directly to propagate tsdn. */
|
||||||
addr = extent_alloc_default_impl(tsdn, arena, new_addr, size,
|
addr = extent_alloc_default_impl(tsdn, arena, new_addr, size,
|
||||||
@ -863,8 +871,9 @@ extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena,
|
|||||||
}
|
}
|
||||||
extent_init(extent, arena, addr, size, usize,
|
extent_init(extent, arena, addr, size, usize,
|
||||||
arena_extent_sn_next(arena), true, zero, commit, slab);
|
arena_extent_sn_next(arena), true, zero, commit, slab);
|
||||||
if (pad != 0)
|
if (pad != 0) {
|
||||||
extent_addr_randomize(tsdn, extent, alignment);
|
extent_addr_randomize(tsdn, extent, alignment);
|
||||||
|
}
|
||||||
if (extent_register(tsdn, extent)) {
|
if (extent_register(tsdn, extent)) {
|
||||||
extent_leak(tsdn, arena, r_extent_hooks, false, extent);
|
extent_leak(tsdn, arena, r_extent_hooks, false, extent);
|
||||||
return (NULL);
|
return (NULL);
|
||||||
@ -876,8 +885,7 @@ extent_alloc_wrapper_hard(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_t *
|
extent_t *
|
||||||
extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
extent_hooks_t **r_extent_hooks, void *new_addr, size_t usize, size_t pad,
|
||||||
size_t alignment, bool *zero, bool *commit, bool slab)
|
size_t alignment, bool *zero, bool *commit, bool slab) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
@ -893,16 +901,19 @@ extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_can_coalesce(const extent_t *a, const extent_t *b)
|
extent_can_coalesce(const extent_t *a, const extent_t *b) {
|
||||||
{
|
if (extent_arena_get(a) != extent_arena_get(b)) {
|
||||||
if (extent_arena_get(a) != extent_arena_get(b))
|
|
||||||
return (false);
|
return (false);
|
||||||
if (extent_active_get(a) != extent_active_get(b))
|
}
|
||||||
|
if (extent_active_get(a) != extent_active_get(b)) {
|
||||||
return (false);
|
return (false);
|
||||||
if (extent_committed_get(a) != extent_committed_get(b))
|
}
|
||||||
|
if (extent_committed_get(a) != extent_committed_get(b)) {
|
||||||
return (false);
|
return (false);
|
||||||
if (extent_retained_get(a) != extent_retained_get(b))
|
}
|
||||||
|
if (extent_retained_get(a) != extent_retained_get(b)) {
|
||||||
return (false);
|
return (false);
|
||||||
|
}
|
||||||
|
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
@ -910,10 +921,10 @@ extent_can_coalesce(const extent_t *a, const extent_t *b)
|
|||||||
static void
|
static void
|
||||||
extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b,
|
extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b,
|
||||||
extent_heap_t extent_heaps[NPSIZES+1], bool cache)
|
extent_heap_t extent_heaps[NPSIZES+1], bool cache) {
|
||||||
{
|
if (!extent_can_coalesce(a, b)) {
|
||||||
if (!extent_can_coalesce(a, b))
|
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
extent_heaps_remove(tsdn, extent_heaps, a);
|
extent_heaps_remove(tsdn, extent_heaps, a);
|
||||||
extent_heaps_remove(tsdn, extent_heaps, b);
|
extent_heaps_remove(tsdn, extent_heaps, b);
|
||||||
@ -937,8 +948,7 @@ extent_try_coalesce(tsdn_t *tsdn, arena_t *arena,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
extent_record(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
extent_record(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
||||||
extent_heap_t extent_heaps[NPSIZES+1], bool cache, extent_t *extent)
|
extent_heap_t extent_heaps[NPSIZES+1], bool cache, extent_t *extent) {
|
||||||
{
|
|
||||||
extent_t *prev, *next;
|
extent_t *prev, *next;
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
@ -980,8 +990,7 @@ extent_record(tsdn_t *tsdn, arena_t *arena, extent_hooks_t **r_extent_hooks,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
extent_dalloc_gap(tsdn_t *tsdn, arena_t *arena, extent_t *extent)
|
extent_dalloc_gap(tsdn_t *tsdn, arena_t *arena, extent_t *extent) {
|
||||||
{
|
|
||||||
extent_hooks_t *extent_hooks = EXTENT_HOOKS_INITIALIZER;
|
extent_hooks_t *extent_hooks = EXTENT_HOOKS_INITIALIZER;
|
||||||
|
|
||||||
if (extent_register(tsdn, extent)) {
|
if (extent_register(tsdn, extent)) {
|
||||||
@ -993,8 +1002,7 @@ extent_dalloc_gap(tsdn_t *tsdn, arena_t *arena, extent_t *extent)
|
|||||||
|
|
||||||
void
|
void
|
||||||
extent_dalloc_cache(tsdn_t *tsdn, arena_t *arena,
|
extent_dalloc_cache(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent)
|
extent_hooks_t **r_extent_hooks, extent_t *extent) {
|
||||||
{
|
|
||||||
assert(extent_base_get(extent) != NULL);
|
assert(extent_base_get(extent) != NULL);
|
||||||
assert(extent_size_get(extent) != 0);
|
assert(extent_size_get(extent) != 0);
|
||||||
|
|
||||||
@ -1006,17 +1014,16 @@ extent_dalloc_cache(tsdn_t *tsdn, arena_t *arena,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_dalloc_default_impl(void *addr, size_t size)
|
extent_dalloc_default_impl(void *addr, size_t size) {
|
||||||
{
|
if (!have_dss || !extent_in_dss(addr)) {
|
||||||
if (!have_dss || !extent_in_dss(addr))
|
|
||||||
return (extent_dalloc_mmap(addr, size));
|
return (extent_dalloc_mmap(addr, size));
|
||||||
|
}
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_dalloc_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_dalloc_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
bool committed, unsigned arena_ind)
|
bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
|
|
||||||
return (extent_dalloc_default_impl(addr, size));
|
return (extent_dalloc_default_impl(addr, size));
|
||||||
@ -1024,8 +1031,7 @@ extent_dalloc_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
extent_dalloc_wrapper_try(tsdn_t *tsdn, arena_t *arena,
|
extent_dalloc_wrapper_try(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent)
|
extent_hooks_t **r_extent_hooks, extent_t *extent) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
assert(extent_base_get(extent) != NULL);
|
assert(extent_base_get(extent) != NULL);
|
||||||
@ -1050,46 +1056,50 @@ extent_dalloc_wrapper_try(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_committed_get(extent), arena_ind_get(arena)));
|
extent_committed_get(extent), arena_ind_get(arena)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!err)
|
if (!err) {
|
||||||
extent_dalloc(tsdn, arena, extent);
|
extent_dalloc(tsdn, arena, extent);
|
||||||
|
}
|
||||||
|
|
||||||
return (err);
|
return (err);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
extent_dalloc_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_dalloc_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent)
|
extent_hooks_t **r_extent_hooks, extent_t *extent) {
|
||||||
{
|
|
||||||
bool zeroed;
|
bool zeroed;
|
||||||
|
|
||||||
if (!extent_dalloc_wrapper_try(tsdn, arena, r_extent_hooks, extent))
|
if (!extent_dalloc_wrapper_try(tsdn, arena, r_extent_hooks, extent)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
extent_reregister(tsdn, extent);
|
extent_reregister(tsdn, extent);
|
||||||
/* Try to decommit; purge if that fails. */
|
/* Try to decommit; purge if that fails. */
|
||||||
if (!extent_committed_get(extent))
|
if (!extent_committed_get(extent)) {
|
||||||
zeroed = true;
|
zeroed = true;
|
||||||
else if (!extent_decommit_wrapper(tsdn, arena, r_extent_hooks, extent,
|
} else if (!extent_decommit_wrapper(tsdn, arena, r_extent_hooks, extent,
|
||||||
0, extent_size_get(extent)))
|
0, extent_size_get(extent))) {
|
||||||
zeroed = true;
|
zeroed = true;
|
||||||
else if ((*r_extent_hooks)->purge_lazy != NULL &&
|
} else if ((*r_extent_hooks)->purge_lazy != NULL &&
|
||||||
!(*r_extent_hooks)->purge_lazy(*r_extent_hooks,
|
!(*r_extent_hooks)->purge_lazy(*r_extent_hooks,
|
||||||
extent_base_get(extent), extent_size_get(extent), 0,
|
extent_base_get(extent), extent_size_get(extent), 0,
|
||||||
extent_size_get(extent), arena_ind_get(arena)))
|
extent_size_get(extent), arena_ind_get(arena))) {
|
||||||
zeroed = false;
|
zeroed = false;
|
||||||
else if ((*r_extent_hooks)->purge_forced != NULL &&
|
} else if ((*r_extent_hooks)->purge_forced != NULL &&
|
||||||
!(*r_extent_hooks)->purge_forced(*r_extent_hooks,
|
!(*r_extent_hooks)->purge_forced(*r_extent_hooks,
|
||||||
extent_base_get(extent), extent_size_get(extent), 0,
|
extent_base_get(extent), extent_size_get(extent), 0,
|
||||||
extent_size_get(extent), arena_ind_get(arena)))
|
extent_size_get(extent), arena_ind_get(arena))) {
|
||||||
zeroed = true;
|
zeroed = true;
|
||||||
else
|
} else {
|
||||||
zeroed = false;
|
zeroed = false;
|
||||||
|
}
|
||||||
extent_zeroed_set(extent, zeroed);
|
extent_zeroed_set(extent, zeroed);
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats) {
|
||||||
arena->stats.retained += extent_size_get(extent);
|
arena->stats.retained += extent_size_get(extent);
|
||||||
if (config_prof)
|
}
|
||||||
|
if (config_prof) {
|
||||||
extent_gprof_sub(tsdn, extent);
|
extent_gprof_sub(tsdn, extent);
|
||||||
|
}
|
||||||
|
|
||||||
extent_record(tsdn, arena, r_extent_hooks, arena->extents_retained,
|
extent_record(tsdn, arena, r_extent_hooks, arena->extents_retained,
|
||||||
false, extent);
|
false, extent);
|
||||||
@ -1097,8 +1107,7 @@ extent_dalloc_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_commit_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_commit_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
|
|
||||||
return (pages_commit((void *)((uintptr_t)addr + (uintptr_t)offset),
|
return (pages_commit((void *)((uintptr_t)addr + (uintptr_t)offset),
|
||||||
@ -1108,8 +1117,7 @@ extent_commit_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
bool
|
bool
|
||||||
extent_commit_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_commit_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
||||||
size_t length)
|
size_t length) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
@ -1122,8 +1130,7 @@ extent_commit_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_decommit_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_decommit_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
|
|
||||||
return (pages_decommit((void *)((uintptr_t)addr + (uintptr_t)offset),
|
return (pages_decommit((void *)((uintptr_t)addr + (uintptr_t)offset),
|
||||||
@ -1133,8 +1140,7 @@ extent_decommit_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
bool
|
bool
|
||||||
extent_decommit_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_decommit_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
||||||
size_t length)
|
size_t length) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
@ -1150,8 +1156,7 @@ extent_decommit_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
#ifdef PAGES_CAN_PURGE_LAZY
|
#ifdef PAGES_CAN_PURGE_LAZY
|
||||||
static bool
|
static bool
|
||||||
extent_purge_lazy_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_purge_lazy_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
assert(addr != NULL);
|
assert(addr != NULL);
|
||||||
assert((offset & PAGE_MASK) == 0);
|
assert((offset & PAGE_MASK) == 0);
|
||||||
@ -1166,8 +1171,7 @@ extent_purge_lazy_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
bool
|
bool
|
||||||
extent_purge_lazy_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_purge_lazy_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
||||||
size_t length)
|
size_t length) {
|
||||||
{
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
return ((*r_extent_hooks)->purge_lazy == NULL ||
|
return ((*r_extent_hooks)->purge_lazy == NULL ||
|
||||||
(*r_extent_hooks)->purge_lazy(*r_extent_hooks,
|
(*r_extent_hooks)->purge_lazy(*r_extent_hooks,
|
||||||
@ -1178,8 +1182,7 @@ extent_purge_lazy_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
#ifdef PAGES_CAN_PURGE_FORCED
|
#ifdef PAGES_CAN_PURGE_FORCED
|
||||||
static bool
|
static bool
|
||||||
extent_purge_forced_default(extent_hooks_t *extent_hooks, void *addr,
|
extent_purge_forced_default(extent_hooks_t *extent_hooks, void *addr,
|
||||||
size_t size, size_t offset, size_t length, unsigned arena_ind)
|
size_t size, size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
assert(addr != NULL);
|
assert(addr != NULL);
|
||||||
assert((offset & PAGE_MASK) == 0);
|
assert((offset & PAGE_MASK) == 0);
|
||||||
@ -1194,8 +1197,7 @@ extent_purge_forced_default(extent_hooks_t *extent_hooks, void *addr,
|
|||||||
bool
|
bool
|
||||||
extent_purge_forced_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_purge_forced_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t offset,
|
||||||
size_t length)
|
size_t length) {
|
||||||
{
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
return ((*r_extent_hooks)->purge_forced == NULL ||
|
return ((*r_extent_hooks)->purge_forced == NULL ||
|
||||||
(*r_extent_hooks)->purge_forced(*r_extent_hooks,
|
(*r_extent_hooks)->purge_forced(*r_extent_hooks,
|
||||||
@ -1206,12 +1208,12 @@ extent_purge_forced_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
#ifdef JEMALLOC_MAPS_COALESCE
|
#ifdef JEMALLOC_MAPS_COALESCE
|
||||||
static bool
|
static bool
|
||||||
extent_split_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_split_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t size_a, size_t size_b, bool committed, unsigned arena_ind)
|
size_t size_a, size_t size_b, bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
|
|
||||||
if (!maps_coalesce)
|
if (!maps_coalesce) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -1219,8 +1221,7 @@ extent_split_default(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
extent_t *
|
extent_t *
|
||||||
extent_split_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_split_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t size_a,
|
extent_hooks_t **r_extent_hooks, extent_t *extent, size_t size_a,
|
||||||
size_t usize_a, size_t size_b, size_t usize_b)
|
size_t usize_a, size_t size_b, size_t usize_b) {
|
||||||
{
|
|
||||||
extent_t *trail;
|
extent_t *trail;
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
@ -1230,12 +1231,14 @@ extent_split_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
|
|
||||||
if ((*r_extent_hooks)->split == NULL)
|
if ((*r_extent_hooks)->split == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
trail = extent_alloc(tsdn, arena);
|
trail = extent_alloc(tsdn, arena);
|
||||||
if (trail == NULL)
|
if (trail == NULL) {
|
||||||
goto label_error_a;
|
goto label_error_a;
|
||||||
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
extent_t lead;
|
extent_t lead;
|
||||||
@ -1246,22 +1249,25 @@ extent_split_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
extent_slab_get(extent));
|
extent_slab_get(extent));
|
||||||
|
|
||||||
if (extent_rtree_acquire(tsdn, rtree_ctx, &lead, false, true,
|
if (extent_rtree_acquire(tsdn, rtree_ctx, &lead, false, true,
|
||||||
&lead_elm_a, &lead_elm_b))
|
&lead_elm_a, &lead_elm_b)) {
|
||||||
goto label_error_b;
|
goto label_error_b;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
extent_init(trail, arena, (void *)((uintptr_t)extent_base_get(extent) +
|
extent_init(trail, arena, (void *)((uintptr_t)extent_base_get(extent) +
|
||||||
size_a), size_b, usize_b, extent_sn_get(extent),
|
size_a), size_b, usize_b, extent_sn_get(extent),
|
||||||
extent_active_get(extent), extent_zeroed_get(extent),
|
extent_active_get(extent), extent_zeroed_get(extent),
|
||||||
extent_committed_get(extent), extent_slab_get(extent));
|
extent_committed_get(extent), extent_slab_get(extent));
|
||||||
if (extent_rtree_acquire(tsdn, rtree_ctx, trail, false, true,
|
if (extent_rtree_acquire(tsdn, rtree_ctx, trail, false, true,
|
||||||
&trail_elm_a, &trail_elm_b))
|
&trail_elm_a, &trail_elm_b)) {
|
||||||
goto label_error_c;
|
goto label_error_c;
|
||||||
|
}
|
||||||
|
|
||||||
if ((*r_extent_hooks)->split(*r_extent_hooks, extent_base_get(extent),
|
if ((*r_extent_hooks)->split(*r_extent_hooks, extent_base_get(extent),
|
||||||
size_a + size_b, size_a, size_b, extent_committed_get(extent),
|
size_a + size_b, size_a, size_b, extent_committed_get(extent),
|
||||||
arena_ind_get(arena)))
|
arena_ind_get(arena))) {
|
||||||
goto label_error_d;
|
goto label_error_d;
|
||||||
|
}
|
||||||
|
|
||||||
extent_size_set(extent, size_a);
|
extent_size_set(extent, size_a);
|
||||||
extent_usize_set(extent, usize_a);
|
extent_usize_set(extent, usize_a);
|
||||||
@ -1284,12 +1290,13 @@ label_error_a:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_merge_default_impl(void *addr_a, void *addr_b)
|
extent_merge_default_impl(void *addr_a, void *addr_b) {
|
||||||
{
|
if (!maps_coalesce) {
|
||||||
if (!maps_coalesce)
|
|
||||||
return (true);
|
return (true);
|
||||||
if (have_dss && !extent_dss_mergeable(addr_a, addr_b))
|
}
|
||||||
|
if (have_dss && !extent_dss_mergeable(addr_a, addr_b)) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
@ -1297,8 +1304,7 @@ extent_merge_default_impl(void *addr_a, void *addr_b)
|
|||||||
#ifdef JEMALLOC_MAPS_COALESCE
|
#ifdef JEMALLOC_MAPS_COALESCE
|
||||||
static bool
|
static bool
|
||||||
extent_merge_default(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
extent_merge_default(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
||||||
void *addr_b, size_t size_b, bool committed, unsigned arena_ind)
|
void *addr_b, size_t size_b, bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
assert(extent_hooks == &extent_hooks_default);
|
assert(extent_hooks == &extent_hooks_default);
|
||||||
|
|
||||||
return (extent_merge_default_impl(addr_a, addr_b));
|
return (extent_merge_default_impl(addr_a, addr_b));
|
||||||
@ -1307,8 +1313,7 @@ extent_merge_default(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
||||||
extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b)
|
extent_hooks_t **r_extent_hooks, extent_t *a, extent_t *b) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
rtree_ctx_t rtree_ctx_fallback;
|
rtree_ctx_t rtree_ctx_fallback;
|
||||||
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
rtree_ctx_t *rtree_ctx = tsdn_rtree_ctx(tsdn, &rtree_ctx_fallback);
|
||||||
@ -1316,8 +1321,9 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
|
|
||||||
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
extent_hooks_assure_initialized(arena, r_extent_hooks);
|
||||||
|
|
||||||
if ((*r_extent_hooks)->merge == NULL)
|
if ((*r_extent_hooks)->merge == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
if (*r_extent_hooks == &extent_hooks_default) {
|
if (*r_extent_hooks == &extent_hooks_default) {
|
||||||
/* Call directly to propagate tsdn. */
|
/* Call directly to propagate tsdn. */
|
||||||
@ -1330,8 +1336,9 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
arena_ind_get(arena));
|
arena_ind_get(arena));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (err)
|
if (err) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* The rtree writes must happen while all the relevant elements are
|
* The rtree writes must happen while all the relevant elements are
|
||||||
@ -1350,8 +1357,9 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
if (b_elm_b != NULL) {
|
if (b_elm_b != NULL) {
|
||||||
rtree_elm_write_acquired(tsdn, &extents_rtree, b_elm_a, NULL);
|
rtree_elm_write_acquired(tsdn, &extents_rtree, b_elm_a, NULL);
|
||||||
rtree_elm_release(tsdn, &extents_rtree, b_elm_a);
|
rtree_elm_release(tsdn, &extents_rtree, b_elm_a);
|
||||||
} else
|
} else {
|
||||||
b_elm_b = b_elm_a;
|
b_elm_b = b_elm_a;
|
||||||
|
}
|
||||||
|
|
||||||
extent_size_set(a, extent_size_get(a) + extent_size_get(b));
|
extent_size_set(a, extent_size_get(a) + extent_size_get(b));
|
||||||
extent_usize_set(a, extent_usize_get(a) + extent_usize_get(b));
|
extent_usize_set(a, extent_usize_get(a) + extent_usize_get(b));
|
||||||
@ -1368,14 +1376,15 @@ extent_merge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
extent_boot(void)
|
extent_boot(void) {
|
||||||
{
|
|
||||||
if (rtree_new(&extents_rtree, (unsigned)((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
if (rtree_new(&extents_rtree, (unsigned)((ZU(1) << (LG_SIZEOF_PTR+3)) -
|
||||||
LG_PAGE)))
|
LG_PAGE))) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
if (have_dss)
|
if (have_dss) {
|
||||||
extent_dss_boot();
|
extent_dss_boot();
|
||||||
|
}
|
||||||
|
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
@ -30,8 +30,7 @@ static void *dss_max;
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
extent_dss_sbrk(intptr_t increment)
|
extent_dss_sbrk(intptr_t increment) {
|
||||||
{
|
|
||||||
#ifdef JEMALLOC_DSS
|
#ifdef JEMALLOC_DSS
|
||||||
return (sbrk(increment));
|
return (sbrk(increment));
|
||||||
#else
|
#else
|
||||||
@ -41,28 +40,27 @@ extent_dss_sbrk(intptr_t increment)
|
|||||||
}
|
}
|
||||||
|
|
||||||
dss_prec_t
|
dss_prec_t
|
||||||
extent_dss_prec_get(void)
|
extent_dss_prec_get(void) {
|
||||||
{
|
|
||||||
dss_prec_t ret;
|
dss_prec_t ret;
|
||||||
|
|
||||||
if (!have_dss)
|
if (!have_dss) {
|
||||||
return (dss_prec_disabled);
|
return (dss_prec_disabled);
|
||||||
|
}
|
||||||
ret = (dss_prec_t)atomic_read_u(&dss_prec_default);
|
ret = (dss_prec_t)atomic_read_u(&dss_prec_default);
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
extent_dss_prec_set(dss_prec_t dss_prec)
|
extent_dss_prec_set(dss_prec_t dss_prec) {
|
||||||
{
|
if (!have_dss) {
|
||||||
if (!have_dss)
|
|
||||||
return (dss_prec != dss_prec_disabled);
|
return (dss_prec != dss_prec_disabled);
|
||||||
|
}
|
||||||
atomic_write_u(&dss_prec_default, (unsigned)dss_prec);
|
atomic_write_u(&dss_prec_default, (unsigned)dss_prec);
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
extent_dss_max_update(void *new_addr)
|
extent_dss_max_update(void *new_addr) {
|
||||||
{
|
|
||||||
void *max_cur;
|
void *max_cur;
|
||||||
spin_t spinner;
|
spin_t spinner;
|
||||||
|
|
||||||
@ -83,20 +81,21 @@ extent_dss_max_update(void *new_addr)
|
|||||||
spin_adaptive(&spinner);
|
spin_adaptive(&spinner);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!atomic_cas_p(&dss_max, max_prev, max_cur))
|
if (!atomic_cas_p(&dss_max, max_prev, max_cur)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
/* Fixed new_addr can only be supported if it is at the edge of DSS. */
|
/* Fixed new_addr can only be supported if it is at the edge of DSS. */
|
||||||
if (new_addr != NULL && max_cur != new_addr)
|
if (new_addr != NULL && max_cur != new_addr) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
return (max_cur);
|
return (max_cur);
|
||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
||||||
size_t alignment, bool *zero, bool *commit)
|
size_t alignment, bool *zero, bool *commit) {
|
||||||
{
|
|
||||||
extent_t *gap;
|
extent_t *gap;
|
||||||
|
|
||||||
cassert(have_dss);
|
cassert(have_dss);
|
||||||
@ -107,12 +106,14 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
* sbrk() uses a signed increment argument, so take care not to
|
* sbrk() uses a signed increment argument, so take care not to
|
||||||
* interpret a large allocation request as a negative increment.
|
* interpret a large allocation request as a negative increment.
|
||||||
*/
|
*/
|
||||||
if ((intptr_t)size < 0)
|
if ((intptr_t)size < 0) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
gap = extent_alloc(tsdn, arena);
|
gap = extent_alloc(tsdn, arena);
|
||||||
if (gap == NULL)
|
if (gap == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
if (!atomic_read_u(&dss_exhausted)) {
|
if (!atomic_read_u(&dss_exhausted)) {
|
||||||
/*
|
/*
|
||||||
@ -126,8 +127,9 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
intptr_t incr;
|
intptr_t incr;
|
||||||
|
|
||||||
max_cur = extent_dss_max_update(new_addr);
|
max_cur = extent_dss_max_update(new_addr);
|
||||||
if (max_cur == NULL)
|
if (max_cur == NULL) {
|
||||||
goto label_oom;
|
goto label_oom;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Compute how much gap space (if any) is necessary to
|
* Compute how much gap space (if any) is necessary to
|
||||||
@ -145,8 +147,9 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
}
|
}
|
||||||
dss_next = (void *)((uintptr_t)ret + size);
|
dss_next = (void *)((uintptr_t)ret + size);
|
||||||
if ((uintptr_t)ret < (uintptr_t)max_cur ||
|
if ((uintptr_t)ret < (uintptr_t)max_cur ||
|
||||||
(uintptr_t)dss_next < (uintptr_t)max_cur)
|
(uintptr_t)dss_next < (uintptr_t)max_cur) {
|
||||||
goto label_oom; /* Wrap-around. */
|
goto label_oom; /* Wrap-around. */
|
||||||
|
}
|
||||||
incr = gap_size + size;
|
incr = gap_size + size;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -155,19 +158,22 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
* DSS while dss_max is greater than the current DSS
|
* DSS while dss_max is greater than the current DSS
|
||||||
* max reported by sbrk(0).
|
* max reported by sbrk(0).
|
||||||
*/
|
*/
|
||||||
if (atomic_cas_p(&dss_max, max_cur, dss_next))
|
if (atomic_cas_p(&dss_max, max_cur, dss_next)) {
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
/* Try to allocate. */
|
/* Try to allocate. */
|
||||||
dss_prev = extent_dss_sbrk(incr);
|
dss_prev = extent_dss_sbrk(incr);
|
||||||
if (dss_prev == max_cur) {
|
if (dss_prev == max_cur) {
|
||||||
/* Success. */
|
/* Success. */
|
||||||
if (gap_size != 0)
|
if (gap_size != 0) {
|
||||||
extent_dalloc_gap(tsdn, arena, gap);
|
extent_dalloc_gap(tsdn, arena, gap);
|
||||||
else
|
} else {
|
||||||
extent_dalloc(tsdn, arena, gap);
|
extent_dalloc(tsdn, arena, gap);
|
||||||
if (!*commit)
|
}
|
||||||
|
if (!*commit) {
|
||||||
*commit = pages_decommit(ret, size);
|
*commit = pages_decommit(ret, size);
|
||||||
|
}
|
||||||
if (*zero && *commit) {
|
if (*zero && *commit) {
|
||||||
extent_hooks_t *extent_hooks =
|
extent_hooks_t *extent_hooks =
|
||||||
EXTENT_HOOKS_INITIALIZER;
|
EXTENT_HOOKS_INITIALIZER;
|
||||||
@ -177,9 +183,10 @@ extent_alloc_dss(tsdn_t *tsdn, arena_t *arena, void *new_addr, size_t size,
|
|||||||
size, 0, true, false, true, false);
|
size, 0, true, false, true, false);
|
||||||
if (extent_purge_forced_wrapper(tsdn,
|
if (extent_purge_forced_wrapper(tsdn,
|
||||||
arena, &extent_hooks, &extent, 0,
|
arena, &extent_hooks, &extent, 0,
|
||||||
size))
|
size)) {
|
||||||
memset(ret, 0, size);
|
memset(ret, 0, size);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
/*
|
/*
|
||||||
@ -204,30 +211,28 @@ label_oom:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_in_dss_helper(void *addr, void *max)
|
extent_in_dss_helper(void *addr, void *max) {
|
||||||
{
|
|
||||||
return ((uintptr_t)addr >= (uintptr_t)dss_base && (uintptr_t)addr <
|
return ((uintptr_t)addr >= (uintptr_t)dss_base && (uintptr_t)addr <
|
||||||
(uintptr_t)max);
|
(uintptr_t)max);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
extent_in_dss(void *addr)
|
extent_in_dss(void *addr) {
|
||||||
{
|
|
||||||
cassert(have_dss);
|
cassert(have_dss);
|
||||||
|
|
||||||
return (extent_in_dss_helper(addr, atomic_read_p(&dss_max)));
|
return (extent_in_dss_helper(addr, atomic_read_p(&dss_max)));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
extent_dss_mergeable(void *addr_a, void *addr_b)
|
extent_dss_mergeable(void *addr_a, void *addr_b) {
|
||||||
{
|
|
||||||
void *max;
|
void *max;
|
||||||
|
|
||||||
cassert(have_dss);
|
cassert(have_dss);
|
||||||
|
|
||||||
if ((uintptr_t)addr_a < (uintptr_t)dss_base && (uintptr_t)addr_b <
|
if ((uintptr_t)addr_a < (uintptr_t)dss_base && (uintptr_t)addr_b <
|
||||||
(uintptr_t)dss_base)
|
(uintptr_t)dss_base) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
max = atomic_read_p(&dss_max);
|
max = atomic_read_p(&dss_max);
|
||||||
return (extent_in_dss_helper(addr_a, max) ==
|
return (extent_in_dss_helper(addr_a, max) ==
|
||||||
@ -235,8 +240,7 @@ extent_dss_mergeable(void *addr_a, void *addr_b)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
extent_dss_boot(void)
|
extent_dss_boot(void) {
|
||||||
{
|
|
||||||
cassert(have_dss);
|
cassert(have_dss);
|
||||||
|
|
||||||
dss_base = extent_dss_sbrk(0);
|
dss_base = extent_dss_sbrk(0);
|
||||||
|
@ -4,21 +4,23 @@
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
extent_alloc_mmap_slow(size_t size, size_t alignment, bool *zero, bool *commit)
|
extent_alloc_mmap_slow(size_t size, size_t alignment, bool *zero,
|
||||||
{
|
bool *commit) {
|
||||||
void *ret;
|
void *ret;
|
||||||
size_t alloc_size;
|
size_t alloc_size;
|
||||||
|
|
||||||
alloc_size = size + alignment - PAGE;
|
alloc_size = size + alignment - PAGE;
|
||||||
/* Beware size_t wrap-around. */
|
/* Beware size_t wrap-around. */
|
||||||
if (alloc_size < size)
|
if (alloc_size < size) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
do {
|
do {
|
||||||
void *pages;
|
void *pages;
|
||||||
size_t leadsize;
|
size_t leadsize;
|
||||||
pages = pages_map(NULL, alloc_size, commit);
|
pages = pages_map(NULL, alloc_size, commit);
|
||||||
if (pages == NULL)
|
if (pages == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
leadsize = ALIGNMENT_CEILING((uintptr_t)pages, alignment) -
|
leadsize = ALIGNMENT_CEILING((uintptr_t)pages, alignment) -
|
||||||
(uintptr_t)pages;
|
(uintptr_t)pages;
|
||||||
ret = pages_trim(pages, alloc_size, leadsize, size, commit);
|
ret = pages_trim(pages, alloc_size, leadsize, size, commit);
|
||||||
@ -31,8 +33,7 @@ extent_alloc_mmap_slow(size_t size, size_t alignment, bool *zero, bool *commit)
|
|||||||
|
|
||||||
void *
|
void *
|
||||||
extent_alloc_mmap(void *new_addr, size_t size, size_t alignment, bool *zero,
|
extent_alloc_mmap(void *new_addr, size_t size, size_t alignment, bool *zero,
|
||||||
bool *commit)
|
bool *commit) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
size_t offset;
|
size_t offset;
|
||||||
|
|
||||||
@ -52,8 +53,9 @@ extent_alloc_mmap(void *new_addr, size_t size, size_t alignment, bool *zero,
|
|||||||
assert(alignment != 0);
|
assert(alignment != 0);
|
||||||
|
|
||||||
ret = pages_map(new_addr, size, commit);
|
ret = pages_map(new_addr, size, commit);
|
||||||
if (ret == NULL || ret == new_addr)
|
if (ret == NULL || ret == new_addr) {
|
||||||
return (ret);
|
return (ret);
|
||||||
|
}
|
||||||
assert(new_addr == NULL);
|
assert(new_addr == NULL);
|
||||||
offset = ALIGNMENT_ADDR2OFFSET(ret, alignment);
|
offset = ALIGNMENT_ADDR2OFFSET(ret, alignment);
|
||||||
if (offset != 0) {
|
if (offset != 0) {
|
||||||
@ -67,9 +69,9 @@ extent_alloc_mmap(void *new_addr, size_t size, size_t alignment, bool *zero,
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
extent_dalloc_mmap(void *addr, size_t size)
|
extent_dalloc_mmap(void *addr, size_t size) {
|
||||||
{
|
if (config_munmap) {
|
||||||
if (config_munmap)
|
|
||||||
pages_unmap(addr, size);
|
pages_unmap(addr, size);
|
||||||
|
}
|
||||||
return (!config_munmap);
|
return (!config_munmap);
|
||||||
}
|
}
|
||||||
|
497
src/jemalloc.c
497
src/jemalloc.c
File diff suppressed because it is too large
Load Diff
@ -33,8 +33,7 @@ void operator delete[](void *ptr, std::size_t size) noexcept;
|
|||||||
template <bool IsNoExcept>
|
template <bool IsNoExcept>
|
||||||
JEMALLOC_INLINE
|
JEMALLOC_INLINE
|
||||||
void *
|
void *
|
||||||
newImpl(std::size_t size) noexcept(IsNoExcept)
|
newImpl(std::size_t size) noexcept(IsNoExcept) {
|
||||||
{
|
|
||||||
void *ptr = je_malloc(size);
|
void *ptr = je_malloc(size);
|
||||||
if (likely(ptr != nullptr))
|
if (likely(ptr != nullptr))
|
||||||
return (ptr);
|
return (ptr);
|
||||||
@ -67,65 +66,55 @@ newImpl(std::size_t size) noexcept(IsNoExcept)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
operator new(std::size_t size)
|
operator new(std::size_t size) {
|
||||||
{
|
|
||||||
return (newImpl<false>(size));
|
return (newImpl<false>(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
operator new[](std::size_t size)
|
operator new[](std::size_t size) {
|
||||||
{
|
|
||||||
return (newImpl<false>(size));
|
return (newImpl<false>(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
operator new(std::size_t size, const std::nothrow_t &) noexcept
|
operator new(std::size_t size, const std::nothrow_t &) noexcept {
|
||||||
{
|
|
||||||
return (newImpl<true>(size));
|
return (newImpl<true>(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
operator new[](std::size_t size, const std::nothrow_t &) noexcept
|
operator new[](std::size_t size, const std::nothrow_t &) noexcept {
|
||||||
{
|
|
||||||
return (newImpl<true>(size));
|
return (newImpl<true>(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
operator delete(void *ptr) noexcept
|
operator delete(void *ptr) noexcept {
|
||||||
{
|
|
||||||
je_free(ptr);
|
je_free(ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
operator delete[](void *ptr) noexcept
|
operator delete[](void *ptr) noexcept {
|
||||||
{
|
|
||||||
je_free(ptr);
|
je_free(ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
operator delete(void *ptr, const std::nothrow_t &) noexcept
|
operator delete(void *ptr, const std::nothrow_t &) noexcept {
|
||||||
{
|
|
||||||
je_free(ptr);
|
je_free(ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void operator delete[](void *ptr, const std::nothrow_t &) noexcept
|
void operator delete[](void *ptr, const std::nothrow_t &) noexcept {
|
||||||
{
|
|
||||||
je_free(ptr);
|
je_free(ptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
#if __cpp_sized_deallocation >= 201309
|
#if __cpp_sized_deallocation >= 201309
|
||||||
|
|
||||||
void
|
void
|
||||||
operator delete(void *ptr, std::size_t size) noexcept
|
operator delete(void *ptr, std::size_t size) noexcept {
|
||||||
{
|
|
||||||
if (unlikely(ptr == nullptr)) {
|
if (unlikely(ptr == nullptr)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
je_sdallocx(ptr, size, /*flags=*/0);
|
je_sdallocx(ptr, size, /*flags=*/0);
|
||||||
}
|
}
|
||||||
|
|
||||||
void operator delete[](void *ptr, std::size_t size) noexcept
|
void operator delete[](void *ptr, std::size_t size) noexcept {
|
||||||
{
|
|
||||||
if (unlikely(ptr == nullptr)) {
|
if (unlikely(ptr == nullptr)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
87
src/large.c
87
src/large.c
@ -4,8 +4,7 @@
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
void *
|
void *
|
||||||
large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero)
|
large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) {
|
||||||
{
|
|
||||||
assert(usize == s2u(usize));
|
assert(usize == s2u(usize));
|
||||||
|
|
||||||
return (large_palloc(tsdn, arena, usize, CACHELINE, zero));
|
return (large_palloc(tsdn, arena, usize, CACHELINE, zero));
|
||||||
@ -13,8 +12,7 @@ large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero)
|
|||||||
|
|
||||||
void *
|
void *
|
||||||
large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
||||||
bool zero)
|
bool zero) {
|
||||||
{
|
|
||||||
size_t ausize;
|
size_t ausize;
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
bool is_zeroed;
|
bool is_zeroed;
|
||||||
@ -23,27 +21,31 @@ large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
assert(!tsdn_null(tsdn) || arena != NULL);
|
assert(!tsdn_null(tsdn) || arena != NULL);
|
||||||
|
|
||||||
ausize = sa2u(usize, alignment);
|
ausize = sa2u(usize, alignment);
|
||||||
if (unlikely(ausize == 0 || ausize > LARGE_MAXCLASS))
|
if (unlikely(ausize == 0 || ausize > LARGE_MAXCLASS)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Copy zero into is_zeroed and pass the copy to extent_alloc(), so that
|
* Copy zero into is_zeroed and pass the copy to extent_alloc(), so that
|
||||||
* it is possible to make correct junk/zero fill decisions below.
|
* it is possible to make correct junk/zero fill decisions below.
|
||||||
*/
|
*/
|
||||||
is_zeroed = zero;
|
is_zeroed = zero;
|
||||||
if (likely(!tsdn_null(tsdn)))
|
if (likely(!tsdn_null(tsdn))) {
|
||||||
arena = arena_choose(tsdn_tsd(tsdn), arena);
|
arena = arena_choose(tsdn_tsd(tsdn), arena);
|
||||||
|
}
|
||||||
if (unlikely(arena == NULL) || (extent = arena_extent_alloc_large(tsdn,
|
if (unlikely(arena == NULL) || (extent = arena_extent_alloc_large(tsdn,
|
||||||
arena, usize, alignment, &is_zeroed)) == NULL)
|
arena, usize, alignment, &is_zeroed)) == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
/* Insert extent into large. */
|
/* Insert extent into large. */
|
||||||
malloc_mutex_lock(tsdn, &arena->large_mtx);
|
malloc_mutex_lock(tsdn, &arena->large_mtx);
|
||||||
ql_elm_new(extent, ql_link);
|
ql_elm_new(extent, ql_link);
|
||||||
ql_tail_insert(&arena->large, extent, ql_link);
|
ql_tail_insert(&arena->large, extent, ql_link);
|
||||||
malloc_mutex_unlock(tsdn, &arena->large_mtx);
|
malloc_mutex_unlock(tsdn, &arena->large_mtx);
|
||||||
if (config_prof && arena_prof_accum(tsdn, arena, usize))
|
if (config_prof && arena_prof_accum(tsdn, arena, usize)) {
|
||||||
prof_idump(tsdn);
|
prof_idump(tsdn);
|
||||||
|
}
|
||||||
|
|
||||||
if (zero || (config_fill && unlikely(opt_zero))) {
|
if (zero || (config_fill && unlikely(opt_zero))) {
|
||||||
if (!is_zeroed) {
|
if (!is_zeroed) {
|
||||||
@ -64,8 +66,7 @@ large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment,
|
|||||||
#define large_dalloc_junk JEMALLOC_N(n_large_dalloc_junk)
|
#define large_dalloc_junk JEMALLOC_N(n_large_dalloc_junk)
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
large_dalloc_junk(void *ptr, size_t usize)
|
large_dalloc_junk(void *ptr, size_t usize) {
|
||||||
{
|
|
||||||
memset(ptr, JEMALLOC_FREE_JUNK, usize);
|
memset(ptr, JEMALLOC_FREE_JUNK, usize);
|
||||||
}
|
}
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
@ -79,17 +80,17 @@ large_dalloc_junk_t *large_dalloc_junk = JEMALLOC_N(n_large_dalloc_junk);
|
|||||||
#define large_dalloc_maybe_junk JEMALLOC_N(n_large_dalloc_maybe_junk)
|
#define large_dalloc_maybe_junk JEMALLOC_N(n_large_dalloc_maybe_junk)
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
large_dalloc_maybe_junk(void *ptr, size_t usize)
|
large_dalloc_maybe_junk(void *ptr, size_t usize) {
|
||||||
{
|
|
||||||
if (config_fill && have_dss && unlikely(opt_junk_free)) {
|
if (config_fill && have_dss && unlikely(opt_junk_free)) {
|
||||||
/*
|
/*
|
||||||
* Only bother junk filling if the extent isn't about to be
|
* Only bother junk filling if the extent isn't about to be
|
||||||
* unmapped.
|
* unmapped.
|
||||||
*/
|
*/
|
||||||
if (!config_munmap || (have_dss && extent_in_dss(ptr)))
|
if (!config_munmap || (have_dss && extent_in_dss(ptr))) {
|
||||||
large_dalloc_junk(ptr, usize);
|
large_dalloc_junk(ptr, usize);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
#undef large_dalloc_maybe_junk
|
#undef large_dalloc_maybe_junk
|
||||||
#define large_dalloc_maybe_junk JEMALLOC_N(large_dalloc_maybe_junk)
|
#define large_dalloc_maybe_junk JEMALLOC_N(large_dalloc_maybe_junk)
|
||||||
@ -98,8 +99,7 @@ large_dalloc_maybe_junk_t *large_dalloc_maybe_junk =
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize)
|
large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize) {
|
||||||
{
|
|
||||||
arena_t *arena = extent_arena_get(extent);
|
arena_t *arena = extent_arena_get(extent);
|
||||||
size_t oldusize = extent_usize_get(extent);
|
size_t oldusize = extent_usize_get(extent);
|
||||||
extent_hooks_t *extent_hooks = extent_hooks_get(arena);
|
extent_hooks_t *extent_hooks = extent_hooks_get(arena);
|
||||||
@ -107,16 +107,18 @@ large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize)
|
|||||||
|
|
||||||
assert(oldusize > usize);
|
assert(oldusize > usize);
|
||||||
|
|
||||||
if (extent_hooks->split == NULL)
|
if (extent_hooks->split == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
/* Split excess pages. */
|
/* Split excess pages. */
|
||||||
if (diff != 0) {
|
if (diff != 0) {
|
||||||
extent_t *trail = extent_split_wrapper(tsdn, arena,
|
extent_t *trail = extent_split_wrapper(tsdn, arena,
|
||||||
&extent_hooks, extent, usize + large_pad, usize, diff,
|
&extent_hooks, extent, usize + large_pad, usize, diff,
|
||||||
diff);
|
diff);
|
||||||
if (trail == NULL)
|
if (trail == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
if (config_fill && unlikely(opt_junk_free)) {
|
if (config_fill && unlikely(opt_junk_free)) {
|
||||||
large_dalloc_maybe_junk(extent_addr_get(trail),
|
large_dalloc_maybe_junk(extent_addr_get(trail),
|
||||||
@ -133,8 +135,7 @@ large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize)
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize,
|
large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize,
|
||||||
bool zero)
|
bool zero) {
|
||||||
{
|
|
||||||
arena_t *arena = extent_arena_get(extent);
|
arena_t *arena = extent_arena_get(extent);
|
||||||
size_t oldusize = extent_usize_get(extent);
|
size_t oldusize = extent_usize_get(extent);
|
||||||
bool is_zeroed_trail = false;
|
bool is_zeroed_trail = false;
|
||||||
@ -142,8 +143,9 @@ large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize,
|
|||||||
size_t trailsize = usize - extent_usize_get(extent);
|
size_t trailsize = usize - extent_usize_get(extent);
|
||||||
extent_t *trail;
|
extent_t *trail;
|
||||||
|
|
||||||
if (extent_hooks->merge == NULL)
|
if (extent_hooks->merge == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
if ((trail = arena_extent_cache_alloc(tsdn, arena, &extent_hooks,
|
if ((trail = arena_extent_cache_alloc(tsdn, arena, &extent_hooks,
|
||||||
extent_past_get(extent), trailsize, CACHELINE, &is_zeroed_trail)) ==
|
extent_past_get(extent), trailsize, CACHELINE, &is_zeroed_trail)) ==
|
||||||
@ -151,9 +153,10 @@ large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize,
|
|||||||
bool commit = true;
|
bool commit = true;
|
||||||
if ((trail = extent_alloc_wrapper(tsdn, arena, &extent_hooks,
|
if ((trail = extent_alloc_wrapper(tsdn, arena, &extent_hooks,
|
||||||
extent_past_get(extent), trailsize, 0, CACHELINE,
|
extent_past_get(extent), trailsize, 0, CACHELINE,
|
||||||
&is_zeroed_trail, &commit, false)) == NULL)
|
&is_zeroed_trail, &commit, false)) == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (extent_merge_wrapper(tsdn, arena, &extent_hooks, extent, trail)) {
|
if (extent_merge_wrapper(tsdn, arena, &extent_hooks, extent, trail)) {
|
||||||
extent_dalloc_wrapper(tsdn, arena, &extent_hooks, trail);
|
extent_dalloc_wrapper(tsdn, arena, &extent_hooks, trail);
|
||||||
@ -193,8 +196,7 @@ large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize,
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min,
|
large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min,
|
||||||
size_t usize_max, bool zero)
|
size_t usize_max, bool zero) {
|
||||||
{
|
|
||||||
assert(s2u(extent_usize_get(extent)) == extent_usize_get(extent));
|
assert(s2u(extent_usize_get(extent)) == extent_usize_get(extent));
|
||||||
/* The following should have been caught by callers. */
|
/* The following should have been caught by callers. */
|
||||||
assert(usize_min > 0 && usize_max <= LARGE_MAXCLASS);
|
assert(usize_min > 0 && usize_max <= LARGE_MAXCLASS);
|
||||||
@ -241,17 +243,16 @@ large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min,
|
|||||||
|
|
||||||
static void *
|
static void *
|
||||||
large_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
large_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize,
|
||||||
size_t alignment, bool zero)
|
size_t alignment, bool zero) {
|
||||||
{
|
if (alignment <= CACHELINE) {
|
||||||
if (alignment <= CACHELINE)
|
|
||||||
return (large_malloc(tsdn, arena, usize, zero));
|
return (large_malloc(tsdn, arena, usize, zero));
|
||||||
|
}
|
||||||
return (large_palloc(tsdn, arena, usize, alignment, zero));
|
return (large_palloc(tsdn, arena, usize, alignment, zero));
|
||||||
}
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
||||||
size_t alignment, bool zero, tcache_t *tcache)
|
size_t alignment, bool zero, tcache_t *tcache) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
size_t copysize;
|
size_t copysize;
|
||||||
|
|
||||||
@ -262,8 +263,9 @@ large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
|||||||
LARGE_MINCLASS);
|
LARGE_MINCLASS);
|
||||||
|
|
||||||
/* Try to avoid moving the allocation. */
|
/* Try to avoid moving the allocation. */
|
||||||
if (!large_ralloc_no_move(tsdn, extent, usize, usize, zero))
|
if (!large_ralloc_no_move(tsdn, extent, usize, usize, zero)) {
|
||||||
return (extent_addr_get(extent));
|
return (extent_addr_get(extent));
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* usize and old size are different enough that we need to use a
|
* usize and old size are different enough that we need to use a
|
||||||
@ -271,8 +273,9 @@ large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
|||||||
* space and copying.
|
* space and copying.
|
||||||
*/
|
*/
|
||||||
ret = large_ralloc_move_helper(tsdn, arena, usize, alignment, zero);
|
ret = large_ralloc_move_helper(tsdn, arena, usize, alignment, zero);
|
||||||
if (ret == NULL)
|
if (ret == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
copysize = (usize < extent_usize_get(extent)) ? usize :
|
copysize = (usize < extent_usize_get(extent)) ? usize :
|
||||||
extent_usize_get(extent);
|
extent_usize_get(extent);
|
||||||
@ -288,8 +291,7 @@ large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
|||||||
* independent of these considerations.
|
* independent of these considerations.
|
||||||
*/
|
*/
|
||||||
static void
|
static void
|
||||||
large_dalloc_impl(tsdn_t *tsdn, extent_t *extent, bool junked_locked)
|
large_dalloc_impl(tsdn_t *tsdn, extent_t *extent, bool junked_locked) {
|
||||||
{
|
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
arena = extent_arena_get(extent);
|
arena = extent_arena_get(extent);
|
||||||
@ -302,42 +304,37 @@ large_dalloc_impl(tsdn_t *tsdn, extent_t *extent, bool junked_locked)
|
|||||||
}
|
}
|
||||||
arena_extent_dalloc_large(tsdn, arena, extent, junked_locked);
|
arena_extent_dalloc_large(tsdn, arena, extent, junked_locked);
|
||||||
|
|
||||||
if (!junked_locked)
|
if (!junked_locked) {
|
||||||
arena_decay_tick(tsdn, arena);
|
arena_decay_tick(tsdn, arena);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
large_dalloc_junked_locked(tsdn_t *tsdn, extent_t *extent)
|
large_dalloc_junked_locked(tsdn_t *tsdn, extent_t *extent) {
|
||||||
{
|
|
||||||
large_dalloc_impl(tsdn, extent, true);
|
large_dalloc_impl(tsdn, extent, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
large_dalloc(tsdn_t *tsdn, extent_t *extent)
|
large_dalloc(tsdn_t *tsdn, extent_t *extent) {
|
||||||
{
|
|
||||||
large_dalloc_impl(tsdn, extent, false);
|
large_dalloc_impl(tsdn, extent, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
large_salloc(tsdn_t *tsdn, const extent_t *extent)
|
large_salloc(tsdn_t *tsdn, const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent_usize_get(extent));
|
return (extent_usize_get(extent));
|
||||||
}
|
}
|
||||||
|
|
||||||
prof_tctx_t *
|
prof_tctx_t *
|
||||||
large_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent)
|
large_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent) {
|
||||||
{
|
|
||||||
return (extent_prof_tctx_get(extent));
|
return (extent_prof_tctx_get(extent));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
large_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, prof_tctx_t *tctx)
|
large_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, prof_tctx_t *tctx) {
|
||||||
{
|
|
||||||
extent_prof_tctx_set(extent, tctx);
|
extent_prof_tctx_set(extent, tctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
large_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent)
|
large_prof_tctx_reset(tsdn_t *tsdn, extent_t *extent) {
|
||||||
{
|
|
||||||
large_prof_tctx_set(tsdn, extent, (prof_tctx_t *)(uintptr_t)1U);
|
large_prof_tctx_set(tsdn, extent, (prof_tctx_t *)(uintptr_t)1U);
|
||||||
}
|
}
|
||||||
|
40
src/mutex.c
40
src/mutex.c
@ -35,8 +35,7 @@ static int (*pthread_create_fptr)(pthread_t *__restrict, const pthread_attr_t *,
|
|||||||
void *(*)(void *), void *__restrict);
|
void *(*)(void *), void *__restrict);
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pthread_create_once(void)
|
pthread_create_once(void) {
|
||||||
{
|
|
||||||
pthread_create_fptr = dlsym(RTLD_NEXT, "pthread_create");
|
pthread_create_fptr = dlsym(RTLD_NEXT, "pthread_create");
|
||||||
if (pthread_create_fptr == NULL) {
|
if (pthread_create_fptr == NULL) {
|
||||||
malloc_write("<jemalloc>: Error in dlsym(RTLD_NEXT, "
|
malloc_write("<jemalloc>: Error in dlsym(RTLD_NEXT, "
|
||||||
@ -50,8 +49,7 @@ pthread_create_once(void)
|
|||||||
JEMALLOC_EXPORT int
|
JEMALLOC_EXPORT int
|
||||||
pthread_create(pthread_t *__restrict thread,
|
pthread_create(pthread_t *__restrict thread,
|
||||||
const pthread_attr_t *__restrict attr, void *(*start_routine)(void *),
|
const pthread_attr_t *__restrict attr, void *(*start_routine)(void *),
|
||||||
void *__restrict arg)
|
void *__restrict arg) {
|
||||||
{
|
|
||||||
static pthread_once_t once_control = PTHREAD_ONCE_INIT;
|
static pthread_once_t once_control = PTHREAD_ONCE_INIT;
|
||||||
|
|
||||||
pthread_once(&once_control, pthread_create_once);
|
pthread_once(&once_control, pthread_create_once);
|
||||||
@ -68,15 +66,16 @@ JEMALLOC_EXPORT int _pthread_mutex_init_calloc_cb(pthread_mutex_t *mutex,
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
bool
|
bool
|
||||||
malloc_mutex_init(malloc_mutex_t *mutex, const char *name, witness_rank_t rank)
|
malloc_mutex_init(malloc_mutex_t *mutex, const char *name,
|
||||||
{
|
witness_rank_t rank) {
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
# if _WIN32_WINNT >= 0x0600
|
# if _WIN32_WINNT >= 0x0600
|
||||||
InitializeSRWLock(&mutex->lock);
|
InitializeSRWLock(&mutex->lock);
|
||||||
# else
|
# else
|
||||||
if (!InitializeCriticalSectionAndSpinCount(&mutex->lock,
|
if (!InitializeCriticalSectionAndSpinCount(&mutex->lock,
|
||||||
_CRT_SPINCOUNT))
|
_CRT_SPINCOUNT)) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
# endif
|
# endif
|
||||||
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
||||||
mutex->lock = OS_UNFAIR_LOCK_INIT;
|
mutex->lock = OS_UNFAIR_LOCK_INIT;
|
||||||
@ -88,14 +87,16 @@ malloc_mutex_init(malloc_mutex_t *mutex, const char *name, witness_rank_t rank)
|
|||||||
postponed_mutexes = mutex;
|
postponed_mutexes = mutex;
|
||||||
} else {
|
} else {
|
||||||
if (_pthread_mutex_init_calloc_cb(&mutex->lock,
|
if (_pthread_mutex_init_calloc_cb(&mutex->lock,
|
||||||
bootstrap_calloc) != 0)
|
bootstrap_calloc) != 0) {
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#else
|
#else
|
||||||
pthread_mutexattr_t attr;
|
pthread_mutexattr_t attr;
|
||||||
|
|
||||||
if (pthread_mutexattr_init(&attr) != 0)
|
if (pthread_mutexattr_init(&attr) != 0) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
pthread_mutexattr_settype(&attr, MALLOC_MUTEX_TYPE);
|
pthread_mutexattr_settype(&attr, MALLOC_MUTEX_TYPE);
|
||||||
if (pthread_mutex_init(&mutex->lock, &attr) != 0) {
|
if (pthread_mutex_init(&mutex->lock, &attr) != 0) {
|
||||||
pthread_mutexattr_destroy(&attr);
|
pthread_mutexattr_destroy(&attr);
|
||||||
@ -103,26 +104,24 @@ malloc_mutex_init(malloc_mutex_t *mutex, const char *name, witness_rank_t rank)
|
|||||||
}
|
}
|
||||||
pthread_mutexattr_destroy(&attr);
|
pthread_mutexattr_destroy(&attr);
|
||||||
#endif
|
#endif
|
||||||
if (config_debug)
|
if (config_debug) {
|
||||||
witness_init(&mutex->witness, name, rank, NULL, NULL);
|
witness_init(&mutex->witness, name, rank, NULL, NULL);
|
||||||
|
}
|
||||||
return (false);
|
return (false);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
|
||||||
malloc_mutex_lock(tsdn, mutex);
|
malloc_mutex_lock(tsdn, mutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
|
||||||
malloc_mutex_unlock(tsdn, mutex);
|
malloc_mutex_unlock(tsdn, mutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex) {
|
||||||
{
|
|
||||||
#ifdef JEMALLOC_MUTEX_INIT_CB
|
#ifdef JEMALLOC_MUTEX_INIT_CB
|
||||||
malloc_mutex_unlock(tsdn, mutex);
|
malloc_mutex_unlock(tsdn, mutex);
|
||||||
#else
|
#else
|
||||||
@ -130,21 +129,22 @@ malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
|||||||
mutex->witness.rank)) {
|
mutex->witness.rank)) {
|
||||||
malloc_printf("<jemalloc>: Error re-initializing mutex in "
|
malloc_printf("<jemalloc>: Error re-initializing mutex in "
|
||||||
"child\n");
|
"child\n");
|
||||||
if (opt_abort)
|
if (opt_abort) {
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
malloc_mutex_boot(void)
|
malloc_mutex_boot(void) {
|
||||||
{
|
|
||||||
#ifdef JEMALLOC_MUTEX_INIT_CB
|
#ifdef JEMALLOC_MUTEX_INIT_CB
|
||||||
postpone_init = false;
|
postpone_init = false;
|
||||||
while (postponed_mutexes != NULL) {
|
while (postponed_mutexes != NULL) {
|
||||||
if (_pthread_mutex_init_calloc_cb(&postponed_mutexes->lock,
|
if (_pthread_mutex_init_calloc_cb(&postponed_mutexes->lock,
|
||||||
bootstrap_calloc) != 0)
|
bootstrap_calloc) != 0) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
postponed_mutexes = postponed_mutexes->postponed_next;
|
postponed_mutexes = postponed_mutexes->postponed_next;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
57
src/nstime.c
57
src/nstime.c
@ -3,66 +3,56 @@
|
|||||||
#define BILLION UINT64_C(1000000000)
|
#define BILLION UINT64_C(1000000000)
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_init(nstime_t *time, uint64_t ns)
|
nstime_init(nstime_t *time, uint64_t ns) {
|
||||||
{
|
|
||||||
time->ns = ns;
|
time->ns = ns;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_init2(nstime_t *time, uint64_t sec, uint64_t nsec)
|
nstime_init2(nstime_t *time, uint64_t sec, uint64_t nsec) {
|
||||||
{
|
|
||||||
time->ns = sec * BILLION + nsec;
|
time->ns = sec * BILLION + nsec;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t
|
uint64_t
|
||||||
nstime_ns(const nstime_t *time)
|
nstime_ns(const nstime_t *time) {
|
||||||
{
|
|
||||||
return (time->ns);
|
return (time->ns);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t
|
uint64_t
|
||||||
nstime_sec(const nstime_t *time)
|
nstime_sec(const nstime_t *time) {
|
||||||
{
|
|
||||||
return (time->ns / BILLION);
|
return (time->ns / BILLION);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t
|
uint64_t
|
||||||
nstime_nsec(const nstime_t *time)
|
nstime_nsec(const nstime_t *time) {
|
||||||
{
|
|
||||||
return (time->ns % BILLION);
|
return (time->ns % BILLION);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_copy(nstime_t *time, const nstime_t *source)
|
nstime_copy(nstime_t *time, const nstime_t *source) {
|
||||||
{
|
|
||||||
*time = *source;
|
*time = *source;
|
||||||
}
|
}
|
||||||
|
|
||||||
int
|
int
|
||||||
nstime_compare(const nstime_t *a, const nstime_t *b)
|
nstime_compare(const nstime_t *a, const nstime_t *b) {
|
||||||
{
|
|
||||||
return ((a->ns > b->ns) - (a->ns < b->ns));
|
return ((a->ns > b->ns) - (a->ns < b->ns));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_add(nstime_t *time, const nstime_t *addend)
|
nstime_add(nstime_t *time, const nstime_t *addend) {
|
||||||
{
|
|
||||||
assert(UINT64_MAX - time->ns >= addend->ns);
|
assert(UINT64_MAX - time->ns >= addend->ns);
|
||||||
|
|
||||||
time->ns += addend->ns;
|
time->ns += addend->ns;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_subtract(nstime_t *time, const nstime_t *subtrahend)
|
nstime_subtract(nstime_t *time, const nstime_t *subtrahend) {
|
||||||
{
|
|
||||||
assert(nstime_compare(time, subtrahend) >= 0);
|
assert(nstime_compare(time, subtrahend) >= 0);
|
||||||
|
|
||||||
time->ns -= subtrahend->ns;
|
time->ns -= subtrahend->ns;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_imultiply(nstime_t *time, uint64_t multiplier)
|
nstime_imultiply(nstime_t *time, uint64_t multiplier) {
|
||||||
{
|
|
||||||
assert((((time->ns | multiplier) & (UINT64_MAX << (sizeof(uint64_t) <<
|
assert((((time->ns | multiplier) & (UINT64_MAX << (sizeof(uint64_t) <<
|
||||||
2))) == 0) || ((time->ns * multiplier) / multiplier == time->ns));
|
2))) == 0) || ((time->ns * multiplier) / multiplier == time->ns));
|
||||||
|
|
||||||
@ -70,16 +60,14 @@ nstime_imultiply(nstime_t *time, uint64_t multiplier)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
nstime_idivide(nstime_t *time, uint64_t divisor)
|
nstime_idivide(nstime_t *time, uint64_t divisor) {
|
||||||
{
|
|
||||||
assert(divisor != 0);
|
assert(divisor != 0);
|
||||||
|
|
||||||
time->ns /= divisor;
|
time->ns /= divisor;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t
|
uint64_t
|
||||||
nstime_divide(const nstime_t *time, const nstime_t *divisor)
|
nstime_divide(const nstime_t *time, const nstime_t *divisor) {
|
||||||
{
|
|
||||||
assert(divisor->ns != 0);
|
assert(divisor->ns != 0);
|
||||||
|
|
||||||
return (time->ns / divisor->ns);
|
return (time->ns / divisor->ns);
|
||||||
@ -88,8 +76,7 @@ nstime_divide(const nstime_t *time, const nstime_t *divisor)
|
|||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
# define NSTIME_MONOTONIC true
|
# define NSTIME_MONOTONIC true
|
||||||
static void
|
static void
|
||||||
nstime_get(nstime_t *time)
|
nstime_get(nstime_t *time) {
|
||||||
{
|
|
||||||
FILETIME ft;
|
FILETIME ft;
|
||||||
uint64_t ticks_100ns;
|
uint64_t ticks_100ns;
|
||||||
|
|
||||||
@ -101,8 +88,7 @@ nstime_get(nstime_t *time)
|
|||||||
#elif JEMALLOC_HAVE_CLOCK_MONOTONIC_COARSE
|
#elif JEMALLOC_HAVE_CLOCK_MONOTONIC_COARSE
|
||||||
# define NSTIME_MONOTONIC true
|
# define NSTIME_MONOTONIC true
|
||||||
static void
|
static void
|
||||||
nstime_get(nstime_t *time)
|
nstime_get(nstime_t *time) {
|
||||||
{
|
|
||||||
struct timespec ts;
|
struct timespec ts;
|
||||||
|
|
||||||
clock_gettime(CLOCK_MONOTONIC_COARSE, &ts);
|
clock_gettime(CLOCK_MONOTONIC_COARSE, &ts);
|
||||||
@ -111,8 +97,7 @@ nstime_get(nstime_t *time)
|
|||||||
#elif JEMALLOC_HAVE_CLOCK_MONOTONIC
|
#elif JEMALLOC_HAVE_CLOCK_MONOTONIC
|
||||||
# define NSTIME_MONOTONIC true
|
# define NSTIME_MONOTONIC true
|
||||||
static void
|
static void
|
||||||
nstime_get(nstime_t *time)
|
nstime_get(nstime_t *time) {
|
||||||
{
|
|
||||||
struct timespec ts;
|
struct timespec ts;
|
||||||
|
|
||||||
clock_gettime(CLOCK_MONOTONIC, &ts);
|
clock_gettime(CLOCK_MONOTONIC, &ts);
|
||||||
@ -121,15 +106,13 @@ nstime_get(nstime_t *time)
|
|||||||
#elif JEMALLOC_HAVE_MACH_ABSOLUTE_TIME
|
#elif JEMALLOC_HAVE_MACH_ABSOLUTE_TIME
|
||||||
# define NSTIME_MONOTONIC true
|
# define NSTIME_MONOTONIC true
|
||||||
static void
|
static void
|
||||||
nstime_get(nstime_t *time)
|
nstime_get(nstime_t *time) {
|
||||||
{
|
|
||||||
nstime_init(time, mach_absolute_time());
|
nstime_init(time, mach_absolute_time());
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
# define NSTIME_MONOTONIC false
|
# define NSTIME_MONOTONIC false
|
||||||
static void
|
static void
|
||||||
nstime_get(nstime_t *time)
|
nstime_get(nstime_t *time) {
|
||||||
{
|
|
||||||
struct timeval tv;
|
struct timeval tv;
|
||||||
|
|
||||||
gettimeofday(&tv, NULL);
|
gettimeofday(&tv, NULL);
|
||||||
@ -142,8 +125,7 @@ nstime_get(nstime_t *time)
|
|||||||
#define nstime_monotonic JEMALLOC_N(n_nstime_monotonic)
|
#define nstime_monotonic JEMALLOC_N(n_nstime_monotonic)
|
||||||
#endif
|
#endif
|
||||||
bool
|
bool
|
||||||
nstime_monotonic(void)
|
nstime_monotonic(void) {
|
||||||
{
|
|
||||||
return (NSTIME_MONOTONIC);
|
return (NSTIME_MONOTONIC);
|
||||||
#undef NSTIME_MONOTONIC
|
#undef NSTIME_MONOTONIC
|
||||||
}
|
}
|
||||||
@ -158,8 +140,7 @@ nstime_monotonic_t *nstime_monotonic = JEMALLOC_N(n_nstime_monotonic);
|
|||||||
#define nstime_update JEMALLOC_N(n_nstime_update)
|
#define nstime_update JEMALLOC_N(n_nstime_update)
|
||||||
#endif
|
#endif
|
||||||
bool
|
bool
|
||||||
nstime_update(nstime_t *time)
|
nstime_update(nstime_t *time) {
|
||||||
{
|
|
||||||
nstime_t old_time;
|
nstime_t old_time;
|
||||||
|
|
||||||
nstime_copy(&old_time, time);
|
nstime_copy(&old_time, time);
|
||||||
|
85
src/pages.c
85
src/pages.c
@ -18,14 +18,14 @@ static bool os_overcommits;
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
void *
|
void *
|
||||||
pages_map(void *addr, size_t size, bool *commit)
|
pages_map(void *addr, size_t size, bool *commit) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
assert(size != 0);
|
assert(size != 0);
|
||||||
|
|
||||||
if (os_overcommits)
|
if (os_overcommits) {
|
||||||
*commit = true;
|
*commit = true;
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
/*
|
/*
|
||||||
@ -46,9 +46,9 @@ pages_map(void *addr, size_t size, bool *commit)
|
|||||||
}
|
}
|
||||||
assert(ret != NULL);
|
assert(ret != NULL);
|
||||||
|
|
||||||
if (ret == MAP_FAILED)
|
if (ret == MAP_FAILED) {
|
||||||
ret = NULL;
|
ret = NULL;
|
||||||
else if (addr != NULL && ret != addr) {
|
} else if (addr != NULL && ret != addr) {
|
||||||
/*
|
/*
|
||||||
* We succeeded in mapping memory, but not in the right place.
|
* We succeeded in mapping memory, but not in the right place.
|
||||||
*/
|
*/
|
||||||
@ -62,8 +62,7 @@ pages_map(void *addr, size_t size, bool *commit)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
pages_unmap(void *addr, size_t size)
|
pages_unmap(void *addr, size_t size) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
if (VirtualFree(addr, 0, MEM_RELEASE) == 0)
|
if (VirtualFree(addr, 0, MEM_RELEASE) == 0)
|
||||||
#else
|
#else
|
||||||
@ -80,15 +79,15 @@ pages_unmap(void *addr, size_t size)
|
|||||||
"munmap"
|
"munmap"
|
||||||
#endif
|
#endif
|
||||||
"(): %s\n", buf);
|
"(): %s\n", buf);
|
||||||
if (opt_abort)
|
if (opt_abort) {
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
pages_trim(void *addr, size_t alloc_size, size_t leadsize, size_t size,
|
pages_trim(void *addr, size_t alloc_size, size_t leadsize, size_t size,
|
||||||
bool *commit)
|
bool *commit) {
|
||||||
{
|
|
||||||
void *ret = (void *)((uintptr_t)addr + leadsize);
|
void *ret = (void *)((uintptr_t)addr + leadsize);
|
||||||
|
|
||||||
assert(alloc_size >= leadsize + size);
|
assert(alloc_size >= leadsize + size);
|
||||||
@ -98,30 +97,34 @@ pages_trim(void *addr, size_t alloc_size, size_t leadsize, size_t size,
|
|||||||
|
|
||||||
pages_unmap(addr, alloc_size);
|
pages_unmap(addr, alloc_size);
|
||||||
new_addr = pages_map(ret, size, commit);
|
new_addr = pages_map(ret, size, commit);
|
||||||
if (new_addr == ret)
|
if (new_addr == ret) {
|
||||||
return (ret);
|
return (ret);
|
||||||
if (new_addr)
|
}
|
||||||
|
if (new_addr) {
|
||||||
pages_unmap(new_addr, size);
|
pages_unmap(new_addr, size);
|
||||||
|
}
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
{
|
{
|
||||||
size_t trailsize = alloc_size - leadsize - size;
|
size_t trailsize = alloc_size - leadsize - size;
|
||||||
|
|
||||||
if (leadsize != 0)
|
if (leadsize != 0) {
|
||||||
pages_unmap(addr, leadsize);
|
pages_unmap(addr, leadsize);
|
||||||
if (trailsize != 0)
|
}
|
||||||
|
if (trailsize != 0) {
|
||||||
pages_unmap((void *)((uintptr_t)ret + size), trailsize);
|
pages_unmap((void *)((uintptr_t)ret + size), trailsize);
|
||||||
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
pages_commit_impl(void *addr, size_t size, bool commit)
|
pages_commit_impl(void *addr, size_t size, bool commit) {
|
||||||
{
|
if (os_overcommits) {
|
||||||
if (os_overcommits)
|
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
return (commit ? (addr != VirtualAlloc(addr, size, MEM_COMMIT,
|
return (commit ? (addr != VirtualAlloc(addr, size, MEM_COMMIT,
|
||||||
@ -131,8 +134,9 @@ pages_commit_impl(void *addr, size_t size, bool commit)
|
|||||||
int prot = commit ? PAGES_PROT_COMMIT : PAGES_PROT_DECOMMIT;
|
int prot = commit ? PAGES_PROT_COMMIT : PAGES_PROT_DECOMMIT;
|
||||||
void *result = mmap(addr, size, prot, mmap_flags | MAP_FIXED,
|
void *result = mmap(addr, size, prot, mmap_flags | MAP_FIXED,
|
||||||
-1, 0);
|
-1, 0);
|
||||||
if (result == MAP_FAILED)
|
if (result == MAP_FAILED) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
if (result != addr) {
|
if (result != addr) {
|
||||||
/*
|
/*
|
||||||
* We succeeded in mapping memory, but not in the right
|
* We succeeded in mapping memory, but not in the right
|
||||||
@ -147,22 +151,20 @@ pages_commit_impl(void *addr, size_t size, bool commit)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
pages_commit(void *addr, size_t size)
|
pages_commit(void *addr, size_t size) {
|
||||||
{
|
|
||||||
return (pages_commit_impl(addr, size, true));
|
return (pages_commit_impl(addr, size, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
pages_decommit(void *addr, size_t size)
|
pages_decommit(void *addr, size_t size) {
|
||||||
{
|
|
||||||
return (pages_commit_impl(addr, size, false));
|
return (pages_commit_impl(addr, size, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
pages_purge_lazy(void *addr, size_t size)
|
pages_purge_lazy(void *addr, size_t size) {
|
||||||
{
|
if (!pages_can_purge_lazy) {
|
||||||
if (!pages_can_purge_lazy)
|
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
VirtualAlloc(addr, size, MEM_RESET, PAGE_READWRITE);
|
VirtualAlloc(addr, size, MEM_RESET, PAGE_READWRITE);
|
||||||
@ -175,10 +177,10 @@ pages_purge_lazy(void *addr, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
pages_purge_forced(void *addr, size_t size)
|
pages_purge_forced(void *addr, size_t size) {
|
||||||
{
|
if (!pages_can_purge_forced) {
|
||||||
if (!pages_can_purge_forced)
|
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
#if defined(JEMALLOC_PURGE_MADVISE_DONTNEED)
|
#if defined(JEMALLOC_PURGE_MADVISE_DONTNEED)
|
||||||
return (madvise(addr, size, MADV_DONTNEED) != 0);
|
return (madvise(addr, size, MADV_DONTNEED) != 0);
|
||||||
@ -188,8 +190,7 @@ pages_purge_forced(void *addr, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
pages_huge(void *addr, size_t size)
|
pages_huge(void *addr, size_t size) {
|
||||||
{
|
|
||||||
assert(HUGEPAGE_ADDR2BASE(addr) == addr);
|
assert(HUGEPAGE_ADDR2BASE(addr) == addr);
|
||||||
assert(HUGEPAGE_CEILING(size) == size);
|
assert(HUGEPAGE_CEILING(size) == size);
|
||||||
|
|
||||||
@ -201,8 +202,7 @@ pages_huge(void *addr, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
pages_nohuge(void *addr, size_t size)
|
pages_nohuge(void *addr, size_t size) {
|
||||||
{
|
|
||||||
assert(HUGEPAGE_ADDR2BASE(addr) == addr);
|
assert(HUGEPAGE_ADDR2BASE(addr) == addr);
|
||||||
assert(HUGEPAGE_CEILING(size) == size);
|
assert(HUGEPAGE_CEILING(size) == size);
|
||||||
|
|
||||||
@ -215,14 +215,14 @@ pages_nohuge(void *addr, size_t size)
|
|||||||
|
|
||||||
#ifdef JEMALLOC_SYSCTL_VM_OVERCOMMIT
|
#ifdef JEMALLOC_SYSCTL_VM_OVERCOMMIT
|
||||||
static bool
|
static bool
|
||||||
os_overcommits_sysctl(void)
|
os_overcommits_sysctl(void) {
|
||||||
{
|
|
||||||
int vm_overcommit;
|
int vm_overcommit;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
sz = sizeof(vm_overcommit);
|
sz = sizeof(vm_overcommit);
|
||||||
if (sysctlbyname("vm.overcommit", &vm_overcommit, &sz, NULL, 0) != 0)
|
if (sysctlbyname("vm.overcommit", &vm_overcommit, &sz, NULL, 0) != 0) {
|
||||||
return (false); /* Error. */
|
return (false); /* Error. */
|
||||||
|
}
|
||||||
|
|
||||||
return ((vm_overcommit & 0x3) == 0);
|
return ((vm_overcommit & 0x3) == 0);
|
||||||
}
|
}
|
||||||
@ -235,8 +235,7 @@ os_overcommits_sysctl(void)
|
|||||||
* wrappers.
|
* wrappers.
|
||||||
*/
|
*/
|
||||||
static bool
|
static bool
|
||||||
os_overcommits_proc(void)
|
os_overcommits_proc(void) {
|
||||||
{
|
|
||||||
int fd;
|
int fd;
|
||||||
char buf[1];
|
char buf[1];
|
||||||
ssize_t nread;
|
ssize_t nread;
|
||||||
@ -246,8 +245,9 @@ os_overcommits_proc(void)
|
|||||||
#else
|
#else
|
||||||
fd = open("/proc/sys/vm/overcommit_memory", O_RDONLY);
|
fd = open("/proc/sys/vm/overcommit_memory", O_RDONLY);
|
||||||
#endif
|
#endif
|
||||||
if (fd == -1)
|
if (fd == -1) {
|
||||||
return (false); /* Error. */
|
return (false); /* Error. */
|
||||||
|
}
|
||||||
|
|
||||||
#if defined(JEMALLOC_USE_SYSCALL) && defined(SYS_read)
|
#if defined(JEMALLOC_USE_SYSCALL) && defined(SYS_read)
|
||||||
nread = (ssize_t)syscall(SYS_read, fd, &buf, sizeof(buf));
|
nread = (ssize_t)syscall(SYS_read, fd, &buf, sizeof(buf));
|
||||||
@ -261,8 +261,9 @@ os_overcommits_proc(void)
|
|||||||
close(fd);
|
close(fd);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (nread < 1)
|
if (nread < 1) {
|
||||||
return (false); /* Error. */
|
return (false); /* Error. */
|
||||||
|
}
|
||||||
/*
|
/*
|
||||||
* /proc/sys/vm/overcommit_memory meanings:
|
* /proc/sys/vm/overcommit_memory meanings:
|
||||||
* 0: Heuristic overcommit.
|
* 0: Heuristic overcommit.
|
||||||
@ -274,8 +275,7 @@ os_overcommits_proc(void)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
void
|
void
|
||||||
pages_boot(void)
|
pages_boot(void) {
|
||||||
{
|
|
||||||
#ifndef _WIN32
|
#ifndef _WIN32
|
||||||
mmap_flags = MAP_PRIVATE | MAP_ANON;
|
mmap_flags = MAP_PRIVATE | MAP_ANON;
|
||||||
#endif
|
#endif
|
||||||
@ -285,8 +285,9 @@ pages_boot(void)
|
|||||||
#elif defined(JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY)
|
#elif defined(JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY)
|
||||||
os_overcommits = os_overcommits_proc();
|
os_overcommits = os_overcommits_proc();
|
||||||
# ifdef MAP_NORESERVE
|
# ifdef MAP_NORESERVE
|
||||||
if (os_overcommits)
|
if (os_overcommits) {
|
||||||
mmap_flags |= MAP_NORESERVE;
|
mmap_flags |= MAP_NORESERVE;
|
||||||
|
}
|
||||||
# endif
|
# endif
|
||||||
#else
|
#else
|
||||||
os_overcommits = false;
|
os_overcommits = false;
|
||||||
|
568
src/prof.c
568
src/prof.c
File diff suppressed because it is too large
Load Diff
70
src/rtree.c
70
src/rtree.c
@ -2,8 +2,7 @@
|
|||||||
#include "jemalloc/internal/jemalloc_internal.h"
|
#include "jemalloc/internal/jemalloc_internal.h"
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
hmin(unsigned ha, unsigned hb)
|
hmin(unsigned ha, unsigned hb) {
|
||||||
{
|
|
||||||
return (ha < hb ? ha : hb);
|
return (ha < hb ? ha : hb);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -12,8 +11,7 @@ hmin(unsigned ha, unsigned hb)
|
|||||||
* used.
|
* used.
|
||||||
*/
|
*/
|
||||||
bool
|
bool
|
||||||
rtree_new(rtree_t *rtree, unsigned bits)
|
rtree_new(rtree_t *rtree, unsigned bits) {
|
||||||
{
|
|
||||||
unsigned bits_in_leaf, height, i;
|
unsigned bits_in_leaf, height, i;
|
||||||
|
|
||||||
assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) /
|
assert(RTREE_HEIGHT_MAX == ((ZU(1) << (LG_SIZEOF_PTR+3)) /
|
||||||
@ -24,10 +22,12 @@ rtree_new(rtree_t *rtree, unsigned bits)
|
|||||||
: (bits % RTREE_BITS_PER_LEVEL);
|
: (bits % RTREE_BITS_PER_LEVEL);
|
||||||
if (bits > bits_in_leaf) {
|
if (bits > bits_in_leaf) {
|
||||||
height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL;
|
height = 1 + (bits - bits_in_leaf) / RTREE_BITS_PER_LEVEL;
|
||||||
if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits)
|
if ((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf != bits) {
|
||||||
height++;
|
height++;
|
||||||
} else
|
}
|
||||||
|
} else {
|
||||||
height = 1;
|
height = 1;
|
||||||
|
}
|
||||||
assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
|
assert((height-1) * RTREE_BITS_PER_LEVEL + bits_in_leaf == bits);
|
||||||
|
|
||||||
rtree->height = height;
|
rtree->height = height;
|
||||||
@ -68,8 +68,7 @@ rtree_new(rtree_t *rtree, unsigned bits)
|
|||||||
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
#define rtree_node_alloc JEMALLOC_N(rtree_node_alloc_impl)
|
||||||
#endif
|
#endif
|
||||||
static rtree_elm_t *
|
static rtree_elm_t *
|
||||||
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms)
|
rtree_node_alloc(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) {
|
||||||
{
|
|
||||||
return ((rtree_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
return ((rtree_elm_t *)base_alloc(tsdn, b0get(), nelms *
|
||||||
sizeof(rtree_elm_t), CACHELINE));
|
sizeof(rtree_elm_t), CACHELINE));
|
||||||
}
|
}
|
||||||
@ -84,8 +83,7 @@ rtree_node_alloc_t *rtree_node_alloc = JEMALLOC_N(rtree_node_alloc_impl);
|
|||||||
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
#define rtree_node_dalloc JEMALLOC_N(rtree_node_dalloc_impl)
|
||||||
#endif
|
#endif
|
||||||
UNUSED static void
|
UNUSED static void
|
||||||
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node)
|
rtree_node_dalloc(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node) {
|
||||||
{
|
|
||||||
/* Nodes are never deleted during normal operation. */
|
/* Nodes are never deleted during normal operation. */
|
||||||
not_reached();
|
not_reached();
|
||||||
}
|
}
|
||||||
@ -98,8 +96,7 @@ rtree_node_dalloc_t *rtree_node_dalloc = JEMALLOC_N(rtree_node_dalloc_impl);
|
|||||||
#ifdef JEMALLOC_JET
|
#ifdef JEMALLOC_JET
|
||||||
static void
|
static void
|
||||||
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
||||||
unsigned level)
|
unsigned level) {
|
||||||
{
|
|
||||||
if (level + 1 < rtree->height) {
|
if (level + 1 < rtree->height) {
|
||||||
size_t nchildren, i;
|
size_t nchildren, i;
|
||||||
|
|
||||||
@ -116,22 +113,21 @@ rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *node,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
rtree_delete(tsdn_t *tsdn, rtree_t *rtree)
|
rtree_delete(tsdn_t *tsdn, rtree_t *rtree) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < rtree->height; i++) {
|
for (i = 0; i < rtree->height; i++) {
|
||||||
rtree_elm_t *subtree = rtree->levels[i].subtree;
|
rtree_elm_t *subtree = rtree->levels[i].subtree;
|
||||||
if (subtree != NULL)
|
if (subtree != NULL) {
|
||||||
rtree_delete_subtree(tsdn, rtree, subtree, i);
|
rtree_delete_subtree(tsdn, rtree, subtree, i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static rtree_elm_t *
|
static rtree_elm_t *
|
||||||
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
||||||
rtree_elm_t **elmp)
|
rtree_elm_t **elmp) {
|
||||||
{
|
|
||||||
rtree_elm_t *node;
|
rtree_elm_t *node;
|
||||||
|
|
||||||
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
malloc_mutex_lock(tsdn, &rtree->init_lock);
|
||||||
@ -151,23 +147,20 @@ rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level,
|
|||||||
}
|
}
|
||||||
|
|
||||||
rtree_elm_t *
|
rtree_elm_t *
|
||||||
rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree, unsigned level)
|
rtree_subtree_read_hard(tsdn_t *tsdn, rtree_t *rtree, unsigned level) {
|
||||||
{
|
|
||||||
return (rtree_node_init(tsdn, rtree, level,
|
return (rtree_node_init(tsdn, rtree, level,
|
||||||
&rtree->levels[level].subtree));
|
&rtree->levels[level].subtree));
|
||||||
}
|
}
|
||||||
|
|
||||||
rtree_elm_t *
|
rtree_elm_t *
|
||||||
rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm,
|
rtree_child_read_hard(tsdn_t *tsdn, rtree_t *rtree, rtree_elm_t *elm,
|
||||||
unsigned level)
|
unsigned level) {
|
||||||
{
|
|
||||||
return (rtree_node_init(tsdn, rtree, level+1, &elm->child));
|
return (rtree_node_init(tsdn, rtree, level+1, &elm->child));
|
||||||
}
|
}
|
||||||
|
|
||||||
static int
|
static int
|
||||||
rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
||||||
void *ob)
|
void *ob) {
|
||||||
{
|
|
||||||
uintptr_t ka = (uintptr_t)oa;
|
uintptr_t ka = (uintptr_t)oa;
|
||||||
uintptr_t kb = (uintptr_t)ob;
|
uintptr_t kb = (uintptr_t)ob;
|
||||||
|
|
||||||
@ -178,8 +171,7 @@ rtree_elm_witness_comp(const witness_t *a, void *oa, const witness_t *b,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static witness_t *
|
static witness_t *
|
||||||
rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm)
|
rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
size_t i;
|
size_t i;
|
||||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
||||||
@ -204,8 +196,7 @@ rtree_elm_witness_alloc(tsd_t *tsd, uintptr_t key, const rtree_elm_t *elm)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static witness_t *
|
static witness_t *
|
||||||
rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm)
|
rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
||||||
|
|
||||||
@ -213,15 +204,16 @@ rtree_elm_witness_find(tsd_t *tsd, const rtree_elm_t *elm)
|
|||||||
i++) {
|
i++) {
|
||||||
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
rtree_elm_witness_t *rew = &witnesses->witnesses[i];
|
||||||
|
|
||||||
if (rew->elm == elm)
|
if (rew->elm == elm) {
|
||||||
return (&rew->witness);
|
return (&rew->witness);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
not_reached();
|
not_reached();
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness, const rtree_elm_t *elm)
|
rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness,
|
||||||
{
|
const rtree_elm_t *elm) {
|
||||||
size_t i;
|
size_t i;
|
||||||
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
rtree_elm_witness_tsd_t *witnesses = tsd_rtree_elm_witnessesp_get(tsd);
|
||||||
|
|
||||||
@ -242,12 +234,12 @@ rtree_elm_witness_dalloc(tsd_t *tsd, witness_t *witness, const rtree_elm_t *elm)
|
|||||||
|
|
||||||
void
|
void
|
||||||
rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
|
rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
|
||||||
const rtree_elm_t *elm)
|
const rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
witness = rtree_elm_witness_alloc(tsdn_tsd(tsdn), key, elm);
|
||||||
witness_lock(tsdn, witness);
|
witness_lock(tsdn, witness);
|
||||||
@ -255,12 +247,12 @@ rtree_elm_witness_acquire(tsdn_t *tsdn, const rtree_t *rtree, uintptr_t key,
|
|||||||
|
|
||||||
void
|
void
|
||||||
rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm)
|
const rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||||
witness_assert_owner(tsdn, witness);
|
witness_assert_owner(tsdn, witness);
|
||||||
@ -268,12 +260,12 @@ rtree_elm_witness_access(tsdn_t *tsdn, const rtree_t *rtree,
|
|||||||
|
|
||||||
void
|
void
|
||||||
rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
rtree_elm_witness_release(tsdn_t *tsdn, const rtree_t *rtree,
|
||||||
const rtree_elm_t *elm)
|
const rtree_elm_t *elm) {
|
||||||
{
|
|
||||||
witness_t *witness;
|
witness_t *witness;
|
||||||
|
|
||||||
if (tsdn_null(tsdn))
|
if (tsdn_null(tsdn)) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
witness = rtree_elm_witness_find(tsdn_tsd(tsdn), elm);
|
||||||
witness_unlock(tsdn, witness);
|
witness_unlock(tsdn, witness);
|
||||||
|
45
src/stats.c
45
src/stats.c
@ -34,8 +34,7 @@ bool opt_stats_print = false;
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
stats_arena_bins_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
stats_arena_bins_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
bool json, bool large, unsigned i)
|
bool json, bool large, unsigned i) {
|
||||||
{
|
|
||||||
size_t page;
|
size_t page;
|
||||||
bool in_gap, in_gap_prev;
|
bool in_gap, in_gap_prev;
|
||||||
unsigned nbins, j;
|
unsigned nbins, j;
|
||||||
@ -144,8 +143,9 @@ stats_arena_bins_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
} else if (milli < 1000) {
|
} else if (milli < 1000) {
|
||||||
malloc_snprintf(util, sizeof(util), "0.%zu",
|
malloc_snprintf(util, sizeof(util), "0.%zu",
|
||||||
milli);
|
milli);
|
||||||
} else
|
} else {
|
||||||
malloc_snprintf(util, sizeof(util), "1");
|
malloc_snprintf(util, sizeof(util), "1");
|
||||||
|
}
|
||||||
|
|
||||||
if (config_tcache) {
|
if (config_tcache) {
|
||||||
malloc_cprintf(write_cb, cbopaque,
|
malloc_cprintf(write_cb, cbopaque,
|
||||||
@ -183,8 +183,7 @@ stats_arena_bins_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
stats_arena_lextents_print(void (*write_cb)(void *, const char *),
|
stats_arena_lextents_print(void (*write_cb)(void *, const char *),
|
||||||
void *cbopaque, bool json, unsigned i)
|
void *cbopaque, bool json, unsigned i) {
|
||||||
{
|
|
||||||
unsigned nbins, nlextents, j;
|
unsigned nbins, nlextents, j;
|
||||||
bool in_gap, in_gap_prev;
|
bool in_gap, in_gap_prev;
|
||||||
|
|
||||||
@ -248,8 +247,7 @@ stats_arena_lextents_print(void (*write_cb)(void *, const char *),
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
stats_arena_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
stats_arena_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
bool json, unsigned i, bool bins, bool large)
|
bool json, unsigned i, bool bins, bool large) {
|
||||||
{
|
|
||||||
unsigned nthreads;
|
unsigned nthreads;
|
||||||
const char *dss;
|
const char *dss;
|
||||||
ssize_t decay_time;
|
ssize_t decay_time;
|
||||||
@ -290,9 +288,10 @@ stats_arena_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
if (decay_time >= 0) {
|
if (decay_time >= 0) {
|
||||||
malloc_cprintf(write_cb, cbopaque, "decay time: %zd\n",
|
malloc_cprintf(write_cb, cbopaque, "decay time: %zd\n",
|
||||||
decay_time);
|
decay_time);
|
||||||
} else
|
} else {
|
||||||
malloc_cprintf(write_cb, cbopaque, "decay time: N/A\n");
|
malloc_cprintf(write_cb, cbopaque, "decay time: N/A\n");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
CTL_M2_GET("stats.arenas.0.pactive", i, &pactive, size_t);
|
CTL_M2_GET("stats.arenas.0.pactive", i, &pactive, size_t);
|
||||||
CTL_M2_GET("stats.arenas.0.pdirty", i, &pdirty, size_t);
|
CTL_M2_GET("stats.arenas.0.pdirty", i, &pdirty, size_t);
|
||||||
@ -445,16 +444,17 @@ stats_arena_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
"resident: %12zu\n", resident);
|
"resident: %12zu\n", resident);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (bins)
|
if (bins) {
|
||||||
stats_arena_bins_print(write_cb, cbopaque, json, large, i);
|
stats_arena_bins_print(write_cb, cbopaque, json, large, i);
|
||||||
if (large)
|
}
|
||||||
|
if (large) {
|
||||||
stats_arena_lextents_print(write_cb, cbopaque, json, i);
|
stats_arena_lextents_print(write_cb, cbopaque, json, i);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
stats_general_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
stats_general_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
bool json, bool more)
|
bool json, bool more) {
|
||||||
{
|
|
||||||
const char *cpv;
|
const char *cpv;
|
||||||
bool bv;
|
bool bv;
|
||||||
unsigned uv;
|
unsigned uv;
|
||||||
@ -473,8 +473,9 @@ stats_general_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
if (json) {
|
if (json) {
|
||||||
malloc_cprintf(write_cb, cbopaque,
|
malloc_cprintf(write_cb, cbopaque,
|
||||||
"\t\t\"version\": \"%s\",\n", cpv);
|
"\t\t\"version\": \"%s\",\n", cpv);
|
||||||
} else
|
} else {
|
||||||
malloc_cprintf(write_cb, cbopaque, "Version: %s\n", cpv);
|
malloc_cprintf(write_cb, cbopaque, "Version: %s\n", cpv);
|
||||||
|
}
|
||||||
|
|
||||||
/* config. */
|
/* config. */
|
||||||
#define CONFIG_WRITE_BOOL_JSON(n, c) \
|
#define CONFIG_WRITE_BOOL_JSON(n, c) \
|
||||||
@ -655,8 +656,9 @@ stats_general_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
if (json) {
|
if (json) {
|
||||||
malloc_cprintf(write_cb, cbopaque,
|
malloc_cprintf(write_cb, cbopaque,
|
||||||
"\t\t\t\"narenas\": %u,\n", uv);
|
"\t\t\t\"narenas\": %u,\n", uv);
|
||||||
} else
|
} else {
|
||||||
malloc_cprintf(write_cb, cbopaque, "Arenas: %u\n", uv);
|
malloc_cprintf(write_cb, cbopaque, "Arenas: %u\n", uv);
|
||||||
|
}
|
||||||
|
|
||||||
CTL_GET("arenas.decay_time", &ssv, ssize_t);
|
CTL_GET("arenas.decay_time", &ssv, ssize_t);
|
||||||
if (json) {
|
if (json) {
|
||||||
@ -672,15 +674,17 @@ stats_general_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
if (json) {
|
if (json) {
|
||||||
malloc_cprintf(write_cb, cbopaque,
|
malloc_cprintf(write_cb, cbopaque,
|
||||||
"\t\t\t\"quantum\": %zu,\n", sv);
|
"\t\t\t\"quantum\": %zu,\n", sv);
|
||||||
} else
|
} else {
|
||||||
malloc_cprintf(write_cb, cbopaque, "Quantum size: %zu\n", sv);
|
malloc_cprintf(write_cb, cbopaque, "Quantum size: %zu\n", sv);
|
||||||
|
}
|
||||||
|
|
||||||
CTL_GET("arenas.page", &sv, size_t);
|
CTL_GET("arenas.page", &sv, size_t);
|
||||||
if (json) {
|
if (json) {
|
||||||
malloc_cprintf(write_cb, cbopaque,
|
malloc_cprintf(write_cb, cbopaque,
|
||||||
"\t\t\t\"page\": %zu,\n", sv);
|
"\t\t\t\"page\": %zu,\n", sv);
|
||||||
} else
|
} else {
|
||||||
malloc_cprintf(write_cb, cbopaque, "Page size: %zu\n", sv);
|
malloc_cprintf(write_cb, cbopaque, "Page size: %zu\n", sv);
|
||||||
|
}
|
||||||
|
|
||||||
if (je_mallctl("arenas.tcache_max", (void *)&sv, &ssz, NULL, 0) == 0) {
|
if (je_mallctl("arenas.tcache_max", (void *)&sv, &ssz, NULL, 0) == 0) {
|
||||||
if (json) {
|
if (json) {
|
||||||
@ -787,8 +791,7 @@ stats_general_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
static void
|
static void
|
||||||
stats_print_helper(void (*write_cb)(void *, const char *), void *cbopaque,
|
stats_print_helper(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
bool json, bool merged, bool destroyed, bool unmerged, bool bins,
|
bool json, bool merged, bool destroyed, bool unmerged, bool bins,
|
||||||
bool large)
|
bool large) {
|
||||||
{
|
|
||||||
size_t allocated, active, metadata, resident, mapped, retained;
|
size_t allocated, active, metadata, resident, mapped, retained;
|
||||||
|
|
||||||
CTL_GET("stats.allocated", &allocated, size_t);
|
CTL_GET("stats.allocated", &allocated, size_t);
|
||||||
@ -846,9 +849,10 @@ stats_print_helper(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
sz = sizeof(bool);
|
sz = sizeof(bool);
|
||||||
xmallctlbymib(mib, miblen, &initialized[i], &sz,
|
xmallctlbymib(mib, miblen, &initialized[i], &sz,
|
||||||
NULL, 0);
|
NULL, 0);
|
||||||
if (initialized[i])
|
if (initialized[i]) {
|
||||||
ninitialized++;
|
ninitialized++;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
mib[1] = MALLCTL_ARENAS_DESTROYED;
|
mib[1] = MALLCTL_ARENAS_DESTROYED;
|
||||||
sz = sizeof(bool);
|
sz = sizeof(bool);
|
||||||
xmallctlbymib(mib, miblen, &destroyed_initialized, &sz,
|
xmallctlbymib(mib, miblen, &destroyed_initialized, &sz,
|
||||||
@ -934,8 +938,7 @@ stats_print_helper(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
|
|
||||||
void
|
void
|
||||||
stats_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
stats_print(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
const char *opts)
|
const char *opts) {
|
||||||
{
|
|
||||||
int err;
|
int err;
|
||||||
uint64_t epoch;
|
uint64_t epoch;
|
||||||
size_t u64sz;
|
size_t u64sz;
|
||||||
|
120
src/tcache.c
120
src/tcache.c
@ -24,14 +24,12 @@ static tcaches_t *tcaches_avail;
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
tcache_salloc(tsdn_t *tsdn, const void *ptr)
|
tcache_salloc(tsdn_t *tsdn, const void *ptr) {
|
||||||
{
|
|
||||||
return (arena_salloc(tsdn, iealloc(tsdn, ptr), ptr));
|
return (arena_salloc(tsdn, iealloc(tsdn, ptr), ptr));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
tcache_event_hard(tsd_t *tsd, tcache_t *tcache) {
|
||||||
{
|
|
||||||
szind_t binind = tcache->next_gc_bin;
|
szind_t binind = tcache->next_gc_bin;
|
||||||
tcache_bin_t *tbin = &tcache->tbins[binind];
|
tcache_bin_t *tbin = &tcache->tbins[binind];
|
||||||
tcache_bin_info_t *tbin_info = &tcache_bin_info[binind];
|
tcache_bin_info_t *tbin_info = &tcache_bin_info[binind];
|
||||||
@ -52,33 +50,36 @@ tcache_event_hard(tsd_t *tsd, tcache_t *tcache)
|
|||||||
* Reduce fill count by 2X. Limit lg_fill_div such that the
|
* Reduce fill count by 2X. Limit lg_fill_div such that the
|
||||||
* fill count is always at least 1.
|
* fill count is always at least 1.
|
||||||
*/
|
*/
|
||||||
if ((tbin_info->ncached_max >> (tbin->lg_fill_div+1)) >= 1)
|
if ((tbin_info->ncached_max >> (tbin->lg_fill_div+1)) >= 1) {
|
||||||
tbin->lg_fill_div++;
|
tbin->lg_fill_div++;
|
||||||
|
}
|
||||||
} else if (tbin->low_water < 0) {
|
} else if (tbin->low_water < 0) {
|
||||||
/*
|
/*
|
||||||
* Increase fill count by 2X. Make sure lg_fill_div stays
|
* Increase fill count by 2X. Make sure lg_fill_div stays
|
||||||
* greater than 0.
|
* greater than 0.
|
||||||
*/
|
*/
|
||||||
if (tbin->lg_fill_div > 1)
|
if (tbin->lg_fill_div > 1) {
|
||||||
tbin->lg_fill_div--;
|
tbin->lg_fill_div--;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
tbin->low_water = tbin->ncached;
|
tbin->low_water = tbin->ncached;
|
||||||
|
|
||||||
tcache->next_gc_bin++;
|
tcache->next_gc_bin++;
|
||||||
if (tcache->next_gc_bin == nhbins)
|
if (tcache->next_gc_bin == nhbins) {
|
||||||
tcache->next_gc_bin = 0;
|
tcache->next_gc_bin = 0;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void *
|
void *
|
||||||
tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
|
tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
|
||||||
tcache_bin_t *tbin, szind_t binind, bool *tcache_success)
|
tcache_bin_t *tbin, szind_t binind, bool *tcache_success) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
arena_tcache_fill_small(tsdn, arena, tbin, binind, config_prof ?
|
arena_tcache_fill_small(tsdn, arena, tbin, binind, config_prof ?
|
||||||
tcache->prof_accumbytes : 0);
|
tcache->prof_accumbytes : 0);
|
||||||
if (config_prof)
|
if (config_prof) {
|
||||||
tcache->prof_accumbytes = 0;
|
tcache->prof_accumbytes = 0;
|
||||||
|
}
|
||||||
ret = tcache_alloc_easy(tbin, tcache_success);
|
ret = tcache_alloc_easy(tbin, tcache_success);
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
@ -86,8 +87,7 @@ tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache,
|
|||||||
|
|
||||||
void
|
void
|
||||||
tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
||||||
szind_t binind, unsigned rem)
|
szind_t binind, unsigned rem) {
|
||||||
{
|
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
void *ptr;
|
void *ptr;
|
||||||
unsigned i, nflush, ndeferred;
|
unsigned i, nflush, ndeferred;
|
||||||
@ -106,8 +106,9 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
|||||||
|
|
||||||
if (config_prof && bin_arena == arena) {
|
if (config_prof && bin_arena == arena) {
|
||||||
if (arena_prof_accum(tsd_tsdn(tsd), arena,
|
if (arena_prof_accum(tsd_tsdn(tsd), arena,
|
||||||
tcache->prof_accumbytes))
|
tcache->prof_accumbytes)) {
|
||||||
prof_idump(tsd_tsdn(tsd));
|
prof_idump(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
tcache->prof_accumbytes = 0;
|
tcache->prof_accumbytes = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,14 +159,14 @@ tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin,
|
|||||||
memmove(tbin->avail - rem, tbin->avail - tbin->ncached, rem *
|
memmove(tbin->avail - rem, tbin->avail - tbin->ncached, rem *
|
||||||
sizeof(void *));
|
sizeof(void *));
|
||||||
tbin->ncached = rem;
|
tbin->ncached = rem;
|
||||||
if ((int)tbin->ncached < tbin->low_water)
|
if ((int)tbin->ncached < tbin->low_water) {
|
||||||
tbin->low_water = tbin->ncached;
|
tbin->low_water = tbin->ncached;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
||||||
unsigned rem, tcache_t *tcache)
|
unsigned rem, tcache_t *tcache) {
|
||||||
{
|
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
void *ptr;
|
void *ptr;
|
||||||
unsigned i, nflush, ndeferred;
|
unsigned i, nflush, ndeferred;
|
||||||
@ -182,8 +183,9 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
|||||||
arena_t *locked_arena = extent_arena_get(extent);
|
arena_t *locked_arena = extent_arena_get(extent);
|
||||||
UNUSED bool idump;
|
UNUSED bool idump;
|
||||||
|
|
||||||
if (config_prof)
|
if (config_prof) {
|
||||||
idump = false;
|
idump = false;
|
||||||
|
}
|
||||||
malloc_mutex_lock(tsd_tsdn(tsd), &locked_arena->lock);
|
malloc_mutex_lock(tsd_tsdn(tsd), &locked_arena->lock);
|
||||||
if ((config_prof || config_stats) && locked_arena == arena) {
|
if ((config_prof || config_stats) && locked_arena == arena) {
|
||||||
if (config_prof) {
|
if (config_prof) {
|
||||||
@ -220,8 +222,9 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
malloc_mutex_unlock(tsd_tsdn(tsd), &locked_arena->lock);
|
malloc_mutex_unlock(tsd_tsdn(tsd), &locked_arena->lock);
|
||||||
if (config_prof && idump)
|
if (config_prof && idump) {
|
||||||
prof_idump(tsd_tsdn(tsd));
|
prof_idump(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
arena_decay_ticks(tsd_tsdn(tsd), locked_arena, nflush -
|
arena_decay_ticks(tsd_tsdn(tsd), locked_arena, nflush -
|
||||||
ndeferred);
|
ndeferred);
|
||||||
}
|
}
|
||||||
@ -241,13 +244,13 @@ tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind,
|
|||||||
memmove(tbin->avail - rem, tbin->avail - tbin->ncached, rem *
|
memmove(tbin->avail - rem, tbin->avail - tbin->ncached, rem *
|
||||||
sizeof(void *));
|
sizeof(void *));
|
||||||
tbin->ncached = rem;
|
tbin->ncached = rem;
|
||||||
if ((int)tbin->ncached < tbin->low_water)
|
if ((int)tbin->ncached < tbin->low_water) {
|
||||||
tbin->low_water = tbin->ncached;
|
tbin->low_water = tbin->ncached;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena)
|
tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) {
|
||||||
{
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
/* Link into list of extant tcaches. */
|
/* Link into list of extant tcaches. */
|
||||||
malloc_mutex_lock(tsdn, &arena->lock);
|
malloc_mutex_lock(tsdn, &arena->lock);
|
||||||
@ -258,8 +261,7 @@ tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
tcache_arena_dissociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena)
|
tcache_arena_dissociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) {
|
||||||
{
|
|
||||||
if (config_stats) {
|
if (config_stats) {
|
||||||
/* Unlink from list of extant tcaches. */
|
/* Unlink from list of extant tcaches. */
|
||||||
malloc_mutex_lock(tsdn, &arena->lock);
|
malloc_mutex_lock(tsdn, &arena->lock);
|
||||||
@ -282,31 +284,30 @@ tcache_arena_dissociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena)
|
|||||||
|
|
||||||
void
|
void
|
||||||
tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *oldarena,
|
tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, arena_t *oldarena,
|
||||||
arena_t *newarena)
|
arena_t *newarena) {
|
||||||
{
|
|
||||||
tcache_arena_dissociate(tsdn, tcache, oldarena);
|
tcache_arena_dissociate(tsdn, tcache, oldarena);
|
||||||
tcache_arena_associate(tsdn, tcache, newarena);
|
tcache_arena_associate(tsdn, tcache, newarena);
|
||||||
}
|
}
|
||||||
|
|
||||||
tcache_t *
|
tcache_t *
|
||||||
tcache_get_hard(tsd_t *tsd)
|
tcache_get_hard(tsd_t *tsd) {
|
||||||
{
|
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
|
|
||||||
if (!tcache_enabled_get()) {
|
if (!tcache_enabled_get()) {
|
||||||
if (tsd_nominal(tsd))
|
if (tsd_nominal(tsd)) {
|
||||||
tcache_enabled_set(false); /* Memoize. */
|
tcache_enabled_set(false); /* Memoize. */
|
||||||
|
}
|
||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
arena = arena_choose(tsd, NULL);
|
arena = arena_choose(tsd, NULL);
|
||||||
if (unlikely(arena == NULL))
|
if (unlikely(arena == NULL)) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
return (tcache_create(tsd_tsdn(tsd), arena));
|
return (tcache_create(tsd_tsdn(tsd), arena));
|
||||||
}
|
}
|
||||||
|
|
||||||
tcache_t *
|
tcache_t *
|
||||||
tcache_create(tsdn_t *tsdn, arena_t *arena)
|
tcache_create(tsdn_t *tsdn, arena_t *arena) {
|
||||||
{
|
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
size_t size, stack_offset;
|
size_t size, stack_offset;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
@ -321,8 +322,9 @@ tcache_create(tsdn_t *tsdn, arena_t *arena)
|
|||||||
|
|
||||||
tcache = ipallocztm(tsdn, size, CACHELINE, true, NULL, true,
|
tcache = ipallocztm(tsdn, size, CACHELINE, true, NULL, true,
|
||||||
arena_get(TSDN_NULL, 0, true));
|
arena_get(TSDN_NULL, 0, true));
|
||||||
if (tcache == NULL)
|
if (tcache == NULL) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
|
|
||||||
tcache_arena_associate(tsdn, tcache, arena);
|
tcache_arena_associate(tsdn, tcache, arena);
|
||||||
|
|
||||||
@ -345,8 +347,7 @@ tcache_create(tsdn_t *tsdn, arena_t *arena)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
tcache_destroy(tsd_t *tsd, tcache_t *tcache)
|
tcache_destroy(tsd_t *tsd, tcache_t *tcache) {
|
||||||
{
|
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -372,20 +373,21 @@ tcache_destroy(tsd_t *tsd, tcache_t *tcache)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (config_prof && tcache->prof_accumbytes > 0 &&
|
if (config_prof && tcache->prof_accumbytes > 0 &&
|
||||||
arena_prof_accum(tsd_tsdn(tsd), arena, tcache->prof_accumbytes))
|
arena_prof_accum(tsd_tsdn(tsd), arena, tcache->prof_accumbytes)) {
|
||||||
prof_idump(tsd_tsdn(tsd));
|
prof_idump(tsd_tsdn(tsd));
|
||||||
|
}
|
||||||
|
|
||||||
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tcache), tcache, NULL,
|
idalloctm(tsd_tsdn(tsd), iealloc(tsd_tsdn(tsd), tcache), tcache, NULL,
|
||||||
true, true);
|
true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcache_cleanup(tsd_t *tsd)
|
tcache_cleanup(tsd_t *tsd) {
|
||||||
{
|
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
|
|
||||||
if (!config_tcache)
|
if (!config_tcache) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if ((tcache = tsd_tcache_get(tsd)) != NULL) {
|
if ((tcache = tsd_tcache_get(tsd)) != NULL) {
|
||||||
tcache_destroy(tsd, tcache);
|
tcache_destroy(tsd, tcache);
|
||||||
@ -394,8 +396,7 @@ tcache_cleanup(tsd_t *tsd)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena)
|
tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
cassert(config_stats);
|
cassert(config_stats);
|
||||||
@ -422,8 +423,7 @@ tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
tcaches_create(tsd_t *tsd, unsigned *r_ind)
|
tcaches_create(tsd_t *tsd, unsigned *r_ind) {
|
||||||
{
|
|
||||||
arena_t *arena;
|
arena_t *arena;
|
||||||
tcache_t *tcache;
|
tcache_t *tcache;
|
||||||
tcaches_t *elm;
|
tcaches_t *elm;
|
||||||
@ -431,18 +431,22 @@ tcaches_create(tsd_t *tsd, unsigned *r_ind)
|
|||||||
if (tcaches == NULL) {
|
if (tcaches == NULL) {
|
||||||
tcaches = base_alloc(tsd_tsdn(tsd), b0get(), sizeof(tcache_t *)
|
tcaches = base_alloc(tsd_tsdn(tsd), b0get(), sizeof(tcache_t *)
|
||||||
* (MALLOCX_TCACHE_MAX+1), CACHELINE);
|
* (MALLOCX_TCACHE_MAX+1), CACHELINE);
|
||||||
if (tcaches == NULL)
|
if (tcaches == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (tcaches_avail == NULL && tcaches_past > MALLOCX_TCACHE_MAX)
|
if (tcaches_avail == NULL && tcaches_past > MALLOCX_TCACHE_MAX) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
arena = arena_ichoose(tsd, NULL);
|
arena = arena_ichoose(tsd, NULL);
|
||||||
if (unlikely(arena == NULL))
|
if (unlikely(arena == NULL)) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
tcache = tcache_create(tsd_tsdn(tsd), arena);
|
tcache = tcache_create(tsd_tsdn(tsd), arena);
|
||||||
if (tcache == NULL)
|
if (tcache == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
if (tcaches_avail != NULL) {
|
if (tcaches_avail != NULL) {
|
||||||
elm = tcaches_avail;
|
elm = tcaches_avail;
|
||||||
@ -460,23 +464,21 @@ tcaches_create(tsd_t *tsd, unsigned *r_ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
tcaches_elm_flush(tsd_t *tsd, tcaches_t *elm)
|
tcaches_elm_flush(tsd_t *tsd, tcaches_t *elm) {
|
||||||
{
|
if (elm->tcache == NULL) {
|
||||||
if (elm->tcache == NULL)
|
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
tcache_destroy(tsd, elm->tcache);
|
tcache_destroy(tsd, elm->tcache);
|
||||||
elm->tcache = NULL;
|
elm->tcache = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcaches_flush(tsd_t *tsd, unsigned ind)
|
tcaches_flush(tsd_t *tsd, unsigned ind) {
|
||||||
{
|
|
||||||
tcaches_elm_flush(tsd, &tcaches[ind]);
|
tcaches_elm_flush(tsd, &tcaches[ind]);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tcaches_destroy(tsd_t *tsd, unsigned ind)
|
tcaches_destroy(tsd_t *tsd, unsigned ind) {
|
||||||
{
|
|
||||||
tcaches_t *elm = &tcaches[ind];
|
tcaches_t *elm = &tcaches[ind];
|
||||||
tcaches_elm_flush(tsd, elm);
|
tcaches_elm_flush(tsd, elm);
|
||||||
elm->next = tcaches_avail;
|
elm->next = tcaches_avail;
|
||||||
@ -484,23 +486,25 @@ tcaches_destroy(tsd_t *tsd, unsigned ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
tcache_boot(tsdn_t *tsdn)
|
tcache_boot(tsdn_t *tsdn) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
/* If necessary, clamp opt_lg_tcache_max. */
|
/* If necessary, clamp opt_lg_tcache_max. */
|
||||||
if (opt_lg_tcache_max < 0 || (ZU(1) << opt_lg_tcache_max) < SMALL_MAXCLASS)
|
if (opt_lg_tcache_max < 0 || (ZU(1) << opt_lg_tcache_max) <
|
||||||
|
SMALL_MAXCLASS) {
|
||||||
tcache_maxclass = SMALL_MAXCLASS;
|
tcache_maxclass = SMALL_MAXCLASS;
|
||||||
else
|
} else {
|
||||||
tcache_maxclass = (ZU(1) << opt_lg_tcache_max);
|
tcache_maxclass = (ZU(1) << opt_lg_tcache_max);
|
||||||
|
}
|
||||||
|
|
||||||
nhbins = size2index(tcache_maxclass) + 1;
|
nhbins = size2index(tcache_maxclass) + 1;
|
||||||
|
|
||||||
/* Initialize tcache_bin_info. */
|
/* Initialize tcache_bin_info. */
|
||||||
tcache_bin_info = (tcache_bin_info_t *)base_alloc(tsdn, b0get(), nhbins
|
tcache_bin_info = (tcache_bin_info_t *)base_alloc(tsdn, b0get(), nhbins
|
||||||
* sizeof(tcache_bin_info_t), CACHELINE);
|
* sizeof(tcache_bin_info_t), CACHELINE);
|
||||||
if (tcache_bin_info == NULL)
|
if (tcache_bin_info == NULL) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
stack_nelms = 0;
|
stack_nelms = 0;
|
||||||
for (i = 0; i < NBINS; i++) {
|
for (i = 0; i < NBINS; i++) {
|
||||||
if ((arena_bin_info[i].nregs << 1) <= TCACHE_NSLOTS_SMALL_MIN) {
|
if ((arena_bin_info[i].nregs << 1) <= TCACHE_NSLOTS_SMALL_MIN) {
|
||||||
|
42
src/tsd.c
42
src/tsd.c
@ -12,20 +12,17 @@ malloc_tsd_data(, , tsd_t, TSD_INITIALIZER)
|
|||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
|
|
||||||
void *
|
void *
|
||||||
malloc_tsd_malloc(size_t size)
|
malloc_tsd_malloc(size_t size) {
|
||||||
{
|
|
||||||
return (a0malloc(CACHELINE_CEILING(size)));
|
return (a0malloc(CACHELINE_CEILING(size)));
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_tsd_dalloc(void *wrapper)
|
malloc_tsd_dalloc(void *wrapper) {
|
||||||
{
|
|
||||||
a0dalloc(wrapper);
|
a0dalloc(wrapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_tsd_no_cleanup(void *arg)
|
malloc_tsd_no_cleanup(void *arg) {
|
||||||
{
|
|
||||||
not_reached();
|
not_reached();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,38 +31,37 @@ malloc_tsd_no_cleanup(void *arg)
|
|||||||
JEMALLOC_EXPORT
|
JEMALLOC_EXPORT
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
_malloc_thread_cleanup(void)
|
_malloc_thread_cleanup(void) {
|
||||||
{
|
|
||||||
bool pending[MALLOC_TSD_CLEANUPS_MAX], again;
|
bool pending[MALLOC_TSD_CLEANUPS_MAX], again;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < ncleanups; i++)
|
for (i = 0; i < ncleanups; i++) {
|
||||||
pending[i] = true;
|
pending[i] = true;
|
||||||
|
}
|
||||||
|
|
||||||
do {
|
do {
|
||||||
again = false;
|
again = false;
|
||||||
for (i = 0; i < ncleanups; i++) {
|
for (i = 0; i < ncleanups; i++) {
|
||||||
if (pending[i]) {
|
if (pending[i]) {
|
||||||
pending[i] = cleanups[i]();
|
pending[i] = cleanups[i]();
|
||||||
if (pending[i])
|
if (pending[i]) {
|
||||||
again = true;
|
again = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} while (again);
|
} while (again);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_tsd_cleanup_register(bool (*f)(void))
|
malloc_tsd_cleanup_register(bool (*f)(void)) {
|
||||||
{
|
|
||||||
assert(ncleanups < MALLOC_TSD_CLEANUPS_MAX);
|
assert(ncleanups < MALLOC_TSD_CLEANUPS_MAX);
|
||||||
cleanups[ncleanups] = f;
|
cleanups[ncleanups] = f;
|
||||||
ncleanups++;
|
ncleanups++;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tsd_cleanup(void *arg)
|
tsd_cleanup(void *arg) {
|
||||||
{
|
|
||||||
tsd_t *tsd = (tsd_t *)arg;
|
tsd_t *tsd = (tsd_t *)arg;
|
||||||
|
|
||||||
switch (tsd->state) {
|
switch (tsd->state) {
|
||||||
@ -108,29 +104,27 @@ MALLOC_TSD
|
|||||||
}
|
}
|
||||||
|
|
||||||
tsd_t *
|
tsd_t *
|
||||||
malloc_tsd_boot0(void)
|
malloc_tsd_boot0(void) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
|
|
||||||
ncleanups = 0;
|
ncleanups = 0;
|
||||||
if (tsd_boot0())
|
if (tsd_boot0()) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
tsd = tsd_fetch();
|
tsd = tsd_fetch();
|
||||||
*tsd_arenas_tdata_bypassp_get(tsd) = true;
|
*tsd_arenas_tdata_bypassp_get(tsd) = true;
|
||||||
return (tsd);
|
return (tsd);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
malloc_tsd_boot1(void)
|
malloc_tsd_boot1(void) {
|
||||||
{
|
|
||||||
tsd_boot1();
|
tsd_boot1();
|
||||||
*tsd_arenas_tdata_bypassp_get(tsd_fetch()) = false;
|
*tsd_arenas_tdata_bypassp_get(tsd_fetch()) = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
static BOOL WINAPI
|
static BOOL WINAPI
|
||||||
_tls_callback(HINSTANCE hinstDLL, DWORD fdwReason, LPVOID lpvReserved)
|
_tls_callback(HINSTANCE hinstDLL, DWORD fdwReason, LPVOID lpvReserved) {
|
||||||
{
|
|
||||||
switch (fdwReason) {
|
switch (fdwReason) {
|
||||||
#ifdef JEMALLOC_LAZY_LOCK
|
#ifdef JEMALLOC_LAZY_LOCK
|
||||||
case DLL_THREAD_ATTACH:
|
case DLL_THREAD_ATTACH:
|
||||||
@ -164,8 +158,7 @@ BOOL (WINAPI *const tls_callback)(HINSTANCE hinstDLL,
|
|||||||
#if (!defined(JEMALLOC_MALLOC_THREAD_CLEANUP) && !defined(JEMALLOC_TLS) && \
|
#if (!defined(JEMALLOC_MALLOC_THREAD_CLEANUP) && !defined(JEMALLOC_TLS) && \
|
||||||
!defined(_WIN32))
|
!defined(_WIN32))
|
||||||
void *
|
void *
|
||||||
tsd_init_check_recursion(tsd_init_head_t *head, tsd_init_block_t *block)
|
tsd_init_check_recursion(tsd_init_head_t *head, tsd_init_block_t *block) {
|
||||||
{
|
|
||||||
pthread_t self = pthread_self();
|
pthread_t self = pthread_self();
|
||||||
tsd_init_block_t *iter;
|
tsd_init_block_t *iter;
|
||||||
|
|
||||||
@ -186,8 +179,7 @@ tsd_init_check_recursion(tsd_init_head_t *head, tsd_init_block_t *block)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
tsd_init_finish(tsd_init_head_t *head, tsd_init_block_t *block)
|
tsd_init_finish(tsd_init_head_t *head, tsd_init_block_t *block) {
|
||||||
{
|
|
||||||
malloc_mutex_lock(TSDN_NULL, &head->lock);
|
malloc_mutex_lock(TSDN_NULL, &head->lock);
|
||||||
ql_remove(&head->blocks, block, link);
|
ql_remove(&head->blocks, block, link);
|
||||||
malloc_mutex_unlock(TSDN_NULL, &head->lock);
|
malloc_mutex_unlock(TSDN_NULL, &head->lock);
|
||||||
|
96
src/util.c
96
src/util.c
@ -46,8 +46,7 @@ static char *x2s(uintmax_t x, bool alt_form, bool uppercase, char *s,
|
|||||||
|
|
||||||
/* malloc_message() setup. */
|
/* malloc_message() setup. */
|
||||||
static void
|
static void
|
||||||
wrtmessage(void *cbopaque, const char *s)
|
wrtmessage(void *cbopaque, const char *s) {
|
||||||
{
|
|
||||||
#if defined(JEMALLOC_USE_SYSCALL) && defined(SYS_write)
|
#if defined(JEMALLOC_USE_SYSCALL) && defined(SYS_write)
|
||||||
/*
|
/*
|
||||||
* Use syscall(2) rather than write(2) when possible in order to avoid
|
* Use syscall(2) rather than write(2) when possible in order to avoid
|
||||||
@ -71,21 +70,20 @@ JEMALLOC_EXPORT void (*je_malloc_message)(void *, const char *s);
|
|||||||
* je_malloc_message(...) throughout the code.
|
* je_malloc_message(...) throughout the code.
|
||||||
*/
|
*/
|
||||||
void
|
void
|
||||||
malloc_write(const char *s)
|
malloc_write(const char *s) {
|
||||||
{
|
if (je_malloc_message != NULL) {
|
||||||
if (je_malloc_message != NULL)
|
|
||||||
je_malloc_message(NULL, s);
|
je_malloc_message(NULL, s);
|
||||||
else
|
} else {
|
||||||
wrtmessage(NULL, s);
|
wrtmessage(NULL, s);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* glibc provides a non-standard strerror_r() when _GNU_SOURCE is defined, so
|
* glibc provides a non-standard strerror_r() when _GNU_SOURCE is defined, so
|
||||||
* provide a wrapper.
|
* provide a wrapper.
|
||||||
*/
|
*/
|
||||||
int
|
int
|
||||||
buferror(int err, char *buf, size_t buflen)
|
buferror(int err, char *buf, size_t buflen) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM, NULL, err, 0,
|
FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM, NULL, err, 0,
|
||||||
(LPSTR)buf, (DWORD)buflen, NULL);
|
(LPSTR)buf, (DWORD)buflen, NULL);
|
||||||
@ -103,8 +101,7 @@ buferror(int err, char *buf, size_t buflen)
|
|||||||
}
|
}
|
||||||
|
|
||||||
uintmax_t
|
uintmax_t
|
||||||
malloc_strtoumax(const char *restrict nptr, char **restrict endptr, int base)
|
malloc_strtoumax(const char *restrict nptr, char **restrict endptr, int base) {
|
||||||
{
|
|
||||||
uintmax_t ret, digit;
|
uintmax_t ret, digit;
|
||||||
unsigned b;
|
unsigned b;
|
||||||
bool neg;
|
bool neg;
|
||||||
@ -149,10 +146,12 @@ malloc_strtoumax(const char *restrict nptr, char **restrict endptr, int base)
|
|||||||
switch (p[1]) {
|
switch (p[1]) {
|
||||||
case '0': case '1': case '2': case '3': case '4': case '5':
|
case '0': case '1': case '2': case '3': case '4': case '5':
|
||||||
case '6': case '7':
|
case '6': case '7':
|
||||||
if (b == 0)
|
if (b == 0) {
|
||||||
b = 8;
|
b = 8;
|
||||||
if (b == 8)
|
}
|
||||||
|
if (b == 8) {
|
||||||
p++;
|
p++;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case 'X': case 'x':
|
case 'X': case 'x':
|
||||||
switch (p[2]) {
|
switch (p[2]) {
|
||||||
@ -162,10 +161,12 @@ malloc_strtoumax(const char *restrict nptr, char **restrict endptr, int base)
|
|||||||
case 'F':
|
case 'F':
|
||||||
case 'a': case 'b': case 'c': case 'd': case 'e':
|
case 'a': case 'b': case 'c': case 'd': case 'e':
|
||||||
case 'f':
|
case 'f':
|
||||||
if (b == 0)
|
if (b == 0) {
|
||||||
b = 16;
|
b = 16;
|
||||||
if (b == 16)
|
}
|
||||||
|
if (b == 16) {
|
||||||
p += 2;
|
p += 2;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
@ -177,8 +178,9 @@ malloc_strtoumax(const char *restrict nptr, char **restrict endptr, int base)
|
|||||||
goto label_return;
|
goto label_return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (b == 0)
|
if (b == 0) {
|
||||||
b = 10;
|
b = 10;
|
||||||
|
}
|
||||||
|
|
||||||
/* Convert. */
|
/* Convert. */
|
||||||
ret = 0;
|
ret = 0;
|
||||||
@ -196,8 +198,9 @@ malloc_strtoumax(const char *restrict nptr, char **restrict endptr, int base)
|
|||||||
}
|
}
|
||||||
p++;
|
p++;
|
||||||
}
|
}
|
||||||
if (neg)
|
if (neg) {
|
||||||
ret = (uintmax_t)(-((intmax_t)ret));
|
ret = (uintmax_t)(-((intmax_t)ret));
|
||||||
|
}
|
||||||
|
|
||||||
if (p == ns) {
|
if (p == ns) {
|
||||||
/* No conversion performed. */
|
/* No conversion performed. */
|
||||||
@ -211,15 +214,15 @@ label_return:
|
|||||||
if (p == ns) {
|
if (p == ns) {
|
||||||
/* No characters were converted. */
|
/* No characters were converted. */
|
||||||
*endptr = (char *)nptr;
|
*endptr = (char *)nptr;
|
||||||
} else
|
} else {
|
||||||
*endptr = (char *)p;
|
*endptr = (char *)p;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
static char *
|
static char *
|
||||||
u2s(uintmax_t x, unsigned base, bool uppercase, char *s, size_t *slen_p)
|
u2s(uintmax_t x, unsigned base, bool uppercase, char *s, size_t *slen_p) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
i = U2S_BUFSIZE - 1;
|
i = U2S_BUFSIZE - 1;
|
||||||
@ -261,19 +264,21 @@ u2s(uintmax_t x, unsigned base, bool uppercase, char *s, size_t *slen_p)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static char *
|
static char *
|
||||||
d2s(intmax_t x, char sign, char *s, size_t *slen_p)
|
d2s(intmax_t x, char sign, char *s, size_t *slen_p) {
|
||||||
{
|
|
||||||
bool neg;
|
bool neg;
|
||||||
|
|
||||||
if ((neg = (x < 0)))
|
if ((neg = (x < 0))) {
|
||||||
x = -x;
|
x = -x;
|
||||||
|
}
|
||||||
s = u2s(x, 10, false, s, slen_p);
|
s = u2s(x, 10, false, s, slen_p);
|
||||||
if (neg)
|
if (neg) {
|
||||||
sign = '-';
|
sign = '-';
|
||||||
|
}
|
||||||
switch (sign) {
|
switch (sign) {
|
||||||
case '-':
|
case '-':
|
||||||
if (!neg)
|
if (!neg) {
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
/* Fall through. */
|
/* Fall through. */
|
||||||
case ' ':
|
case ' ':
|
||||||
case '+':
|
case '+':
|
||||||
@ -287,8 +292,7 @@ d2s(intmax_t x, char sign, char *s, size_t *slen_p)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static char *
|
static char *
|
||||||
o2s(uintmax_t x, bool alt_form, char *s, size_t *slen_p)
|
o2s(uintmax_t x, bool alt_form, char *s, size_t *slen_p) {
|
||||||
{
|
|
||||||
s = u2s(x, 8, false, s, slen_p);
|
s = u2s(x, 8, false, s, slen_p);
|
||||||
if (alt_form && *s != '0') {
|
if (alt_form && *s != '0') {
|
||||||
s--;
|
s--;
|
||||||
@ -299,8 +303,7 @@ o2s(uintmax_t x, bool alt_form, char *s, size_t *slen_p)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static char *
|
static char *
|
||||||
x2s(uintmax_t x, bool alt_form, bool uppercase, char *s, size_t *slen_p)
|
x2s(uintmax_t x, bool alt_form, bool uppercase, char *s, size_t *slen_p) {
|
||||||
{
|
|
||||||
s = u2s(x, 16, uppercase, s, slen_p);
|
s = u2s(x, 16, uppercase, s, slen_p);
|
||||||
if (alt_form) {
|
if (alt_form) {
|
||||||
s -= 2;
|
s -= 2;
|
||||||
@ -311,14 +314,14 @@ x2s(uintmax_t x, bool alt_form, bool uppercase, char *s, size_t *slen_p)
|
|||||||
}
|
}
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap)
|
malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
const char *f;
|
const char *f;
|
||||||
|
|
||||||
#define APPEND_C(c) do { \
|
#define APPEND_C(c) do { \
|
||||||
if (i < size) \
|
if (i < size) { \
|
||||||
str[i] = (c); \
|
str[i] = (c); \
|
||||||
|
} \
|
||||||
i++; \
|
i++; \
|
||||||
} while (0)
|
} while (0)
|
||||||
#define APPEND_S(s, slen) do { \
|
#define APPEND_S(s, slen) do { \
|
||||||
@ -334,17 +337,19 @@ malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap)
|
|||||||
(size_t)width - slen : 0); \
|
(size_t)width - slen : 0); \
|
||||||
if (!left_justify && pad_len != 0) { \
|
if (!left_justify && pad_len != 0) { \
|
||||||
size_t j; \
|
size_t j; \
|
||||||
for (j = 0; j < pad_len; j++) \
|
for (j = 0; j < pad_len; j++) { \
|
||||||
APPEND_C(' '); \
|
APPEND_C(' '); \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
/* Value. */ \
|
/* Value. */ \
|
||||||
APPEND_S(s, slen); \
|
APPEND_S(s, slen); \
|
||||||
/* Right padding. */ \
|
/* Right padding. */ \
|
||||||
if (left_justify && pad_len != 0) { \
|
if (left_justify && pad_len != 0) { \
|
||||||
size_t j; \
|
size_t j; \
|
||||||
for (j = 0; j < pad_len; j++) \
|
for (j = 0; j < pad_len; j++) { \
|
||||||
APPEND_C(' '); \
|
APPEND_C(' '); \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
#define GET_ARG_NUMERIC(val, len) do { \
|
#define GET_ARG_NUMERIC(val, len) do { \
|
||||||
switch (len) { \
|
switch (len) { \
|
||||||
@ -454,10 +459,11 @@ malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
/* Width/precision separator. */
|
/* Width/precision separator. */
|
||||||
if (*f == '.')
|
if (*f == '.') {
|
||||||
f++;
|
f++;
|
||||||
else
|
} else {
|
||||||
goto label_length;
|
goto label_length;
|
||||||
|
}
|
||||||
/* Precision. */
|
/* Precision. */
|
||||||
switch (*f) {
|
switch (*f) {
|
||||||
case '*':
|
case '*':
|
||||||
@ -484,8 +490,9 @@ malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap)
|
|||||||
if (*f == 'l') {
|
if (*f == 'l') {
|
||||||
len = 'q';
|
len = 'q';
|
||||||
f++;
|
f++;
|
||||||
} else
|
} else {
|
||||||
len = 'l';
|
len = 'l';
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case 'q': case 'j': case 't': case 'z':
|
case 'q': case 'j': case 't': case 'z':
|
||||||
len = *f;
|
len = *f;
|
||||||
@ -576,10 +583,11 @@ malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap)
|
|||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
label_out:
|
label_out:
|
||||||
if (i < size)
|
if (i < size) {
|
||||||
str[i] = '\0';
|
str[i] = '\0';
|
||||||
else
|
} else {
|
||||||
str[size - 1] = '\0';
|
str[size - 1] = '\0';
|
||||||
|
}
|
||||||
|
|
||||||
#undef APPEND_C
|
#undef APPEND_C
|
||||||
#undef APPEND_S
|
#undef APPEND_S
|
||||||
@ -590,8 +598,7 @@ malloc_vsnprintf(char *str, size_t size, const char *format, va_list ap)
|
|||||||
|
|
||||||
JEMALLOC_FORMAT_PRINTF(3, 4)
|
JEMALLOC_FORMAT_PRINTF(3, 4)
|
||||||
size_t
|
size_t
|
||||||
malloc_snprintf(char *str, size_t size, const char *format, ...)
|
malloc_snprintf(char *str, size_t size, const char *format, ...) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
@ -604,8 +611,7 @@ malloc_snprintf(char *str, size_t size, const char *format, ...)
|
|||||||
|
|
||||||
void
|
void
|
||||||
malloc_vcprintf(void (*write_cb)(void *, const char *), void *cbopaque,
|
malloc_vcprintf(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
const char *format, va_list ap)
|
const char *format, va_list ap) {
|
||||||
{
|
|
||||||
char buf[MALLOC_PRINTF_BUFSIZE];
|
char buf[MALLOC_PRINTF_BUFSIZE];
|
||||||
|
|
||||||
if (write_cb == NULL) {
|
if (write_cb == NULL) {
|
||||||
@ -630,8 +636,7 @@ malloc_vcprintf(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
JEMALLOC_FORMAT_PRINTF(3, 4)
|
JEMALLOC_FORMAT_PRINTF(3, 4)
|
||||||
void
|
void
|
||||||
malloc_cprintf(void (*write_cb)(void *, const char *), void *cbopaque,
|
malloc_cprintf(void (*write_cb)(void *, const char *), void *cbopaque,
|
||||||
const char *format, ...)
|
const char *format, ...) {
|
||||||
{
|
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
va_start(ap, format);
|
va_start(ap, format);
|
||||||
@ -642,8 +647,7 @@ malloc_cprintf(void (*write_cb)(void *, const char *), void *cbopaque,
|
|||||||
/* Print to stderr in such a way as to avoid memory allocation. */
|
/* Print to stderr in such a way as to avoid memory allocation. */
|
||||||
JEMALLOC_FORMAT_PRINTF(1, 2)
|
JEMALLOC_FORMAT_PRINTF(1, 2)
|
||||||
void
|
void
|
||||||
malloc_printf(const char *format, ...)
|
malloc_printf(const char *format, ...) {
|
||||||
{
|
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
va_start(ap, format);
|
va_start(ap, format);
|
||||||
|
@ -3,8 +3,7 @@
|
|||||||
|
|
||||||
void
|
void
|
||||||
witness_init(witness_t *witness, const char *name, witness_rank_t rank,
|
witness_init(witness_t *witness, const char *name, witness_rank_t rank,
|
||||||
witness_comp_t *comp, void *opaque)
|
witness_comp_t *comp, void *opaque) {
|
||||||
{
|
|
||||||
witness->name = name;
|
witness->name = name;
|
||||||
witness->rank = rank;
|
witness->rank = rank;
|
||||||
witness->comp = comp;
|
witness->comp = comp;
|
||||||
@ -16,8 +15,7 @@ witness_init(witness_t *witness, const char *name, witness_rank_t rank,
|
|||||||
#define witness_lock_error JEMALLOC_N(n_witness_lock_error)
|
#define witness_lock_error JEMALLOC_N(n_witness_lock_error)
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
witness_lock_error(const witness_list_t *witnesses, const witness_t *witness)
|
witness_lock_error(const witness_list_t *witnesses, const witness_t *witness) {
|
||||||
{
|
|
||||||
witness_t *w;
|
witness_t *w;
|
||||||
|
|
||||||
malloc_printf("<jemalloc>: Lock rank order reversal:");
|
malloc_printf("<jemalloc>: Lock rank order reversal:");
|
||||||
@ -38,8 +36,7 @@ witness_lock_error_t *witness_lock_error = JEMALLOC_N(n_witness_lock_error);
|
|||||||
#define witness_owner_error JEMALLOC_N(n_witness_owner_error)
|
#define witness_owner_error JEMALLOC_N(n_witness_owner_error)
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
witness_owner_error(const witness_t *witness)
|
witness_owner_error(const witness_t *witness) {
|
||||||
{
|
|
||||||
malloc_printf("<jemalloc>: Should own %s(%u)\n", witness->name,
|
malloc_printf("<jemalloc>: Should own %s(%u)\n", witness->name,
|
||||||
witness->rank);
|
witness->rank);
|
||||||
abort();
|
abort();
|
||||||
@ -55,8 +52,7 @@ witness_owner_error_t *witness_owner_error = JEMALLOC_N(n_witness_owner_error);
|
|||||||
#define witness_not_owner_error JEMALLOC_N(n_witness_not_owner_error)
|
#define witness_not_owner_error JEMALLOC_N(n_witness_not_owner_error)
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
witness_not_owner_error(const witness_t *witness)
|
witness_not_owner_error(const witness_t *witness) {
|
||||||
{
|
|
||||||
malloc_printf("<jemalloc>: Should not own %s(%u)\n", witness->name,
|
malloc_printf("<jemalloc>: Should not own %s(%u)\n", witness->name,
|
||||||
witness->rank);
|
witness->rank);
|
||||||
abort();
|
abort();
|
||||||
@ -73,8 +69,7 @@ witness_not_owner_error_t *witness_not_owner_error =
|
|||||||
#define witness_lockless_error JEMALLOC_N(n_witness_lockless_error)
|
#define witness_lockless_error JEMALLOC_N(n_witness_lockless_error)
|
||||||
#endif
|
#endif
|
||||||
void
|
void
|
||||||
witness_lockless_error(const witness_list_t *witnesses)
|
witness_lockless_error(const witness_list_t *witnesses) {
|
||||||
{
|
|
||||||
witness_t *w;
|
witness_t *w;
|
||||||
|
|
||||||
malloc_printf("<jemalloc>: Should not own any locks:");
|
malloc_printf("<jemalloc>: Should not own any locks:");
|
||||||
@ -92,28 +87,24 @@ witness_lockless_error_t *witness_lockless_error =
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
void
|
void
|
||||||
witnesses_cleanup(tsd_t *tsd)
|
witnesses_cleanup(tsd_t *tsd) {
|
||||||
{
|
|
||||||
witness_assert_lockless(tsd_tsdn(tsd));
|
witness_assert_lockless(tsd_tsdn(tsd));
|
||||||
|
|
||||||
/* Do nothing. */
|
/* Do nothing. */
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
witness_prefork(tsd_t *tsd)
|
witness_prefork(tsd_t *tsd) {
|
||||||
{
|
|
||||||
tsd_witness_fork_set(tsd, true);
|
tsd_witness_fork_set(tsd, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
witness_postfork_parent(tsd_t *tsd)
|
witness_postfork_parent(tsd_t *tsd) {
|
||||||
{
|
|
||||||
tsd_witness_fork_set(tsd, false);
|
tsd_witness_fork_set(tsd, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
witness_postfork_child(tsd_t *tsd)
|
witness_postfork_child(tsd_t *tsd) {
|
||||||
{
|
|
||||||
#ifndef JEMALLOC_MUTEX_INIT_CB
|
#ifndef JEMALLOC_MUTEX_INIT_CB
|
||||||
witness_list_t *witnesses;
|
witness_list_t *witnesses;
|
||||||
|
|
||||||
|
96
src/zone.c
96
src/zone.c
@ -125,8 +125,7 @@ static void zone_reinit_lock(malloc_zone_t *zone);
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
zone_size(malloc_zone_t *zone, const void *ptr)
|
zone_size(malloc_zone_t *zone, const void *ptr) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* There appear to be places within Darwin (such as setenv(3)) that
|
* There appear to be places within Darwin (such as setenv(3)) that
|
||||||
* cause calls to this function with pointers that *no* zone owns. If
|
* cause calls to this function with pointers that *no* zone owns. If
|
||||||
@ -140,20 +139,17 @@ zone_size(malloc_zone_t *zone, const void *ptr)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
zone_malloc(malloc_zone_t *zone, size_t size)
|
zone_malloc(malloc_zone_t *zone, size_t size) {
|
||||||
{
|
|
||||||
return (je_malloc(size));
|
return (je_malloc(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
zone_calloc(malloc_zone_t *zone, size_t num, size_t size)
|
zone_calloc(malloc_zone_t *zone, size_t num, size_t size) {
|
||||||
{
|
|
||||||
return (je_calloc(num, size));
|
return (je_calloc(num, size));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
zone_valloc(malloc_zone_t *zone, size_t size)
|
zone_valloc(malloc_zone_t *zone, size_t size) {
|
||||||
{
|
|
||||||
void *ret = NULL; /* Assignment avoids useless compiler warning. */
|
void *ret = NULL; /* Assignment avoids useless compiler warning. */
|
||||||
|
|
||||||
je_posix_memalign(&ret, PAGE, size);
|
je_posix_memalign(&ret, PAGE, size);
|
||||||
@ -162,8 +158,7 @@ zone_valloc(malloc_zone_t *zone, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_free(malloc_zone_t *zone, void *ptr)
|
zone_free(malloc_zone_t *zone, void *ptr) {
|
||||||
{
|
|
||||||
if (ivsalloc(tsdn_fetch(), ptr) != 0) {
|
if (ivsalloc(tsdn_fetch(), ptr) != 0) {
|
||||||
je_free(ptr);
|
je_free(ptr);
|
||||||
return;
|
return;
|
||||||
@ -173,17 +168,16 @@ zone_free(malloc_zone_t *zone, void *ptr)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
zone_realloc(malloc_zone_t *zone, void *ptr, size_t size)
|
zone_realloc(malloc_zone_t *zone, void *ptr, size_t size) {
|
||||||
{
|
if (ivsalloc(tsdn_fetch(), ptr) != 0) {
|
||||||
if (ivsalloc(tsdn_fetch(), ptr) != 0)
|
|
||||||
return (je_realloc(ptr, size));
|
return (je_realloc(ptr, size));
|
||||||
|
}
|
||||||
|
|
||||||
return (realloc(ptr, size));
|
return (realloc(ptr, size));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
zone_memalign(malloc_zone_t *zone, size_t alignment, size_t size)
|
zone_memalign(malloc_zone_t *zone, size_t alignment, size_t size) {
|
||||||
{
|
|
||||||
void *ret = NULL; /* Assignment avoids useless compiler warning. */
|
void *ret = NULL; /* Assignment avoids useless compiler warning. */
|
||||||
|
|
||||||
je_posix_memalign(&ret, alignment, size);
|
je_posix_memalign(&ret, alignment, size);
|
||||||
@ -192,8 +186,7 @@ zone_memalign(malloc_zone_t *zone, size_t alignment, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_free_definite_size(malloc_zone_t *zone, void *ptr, size_t size)
|
zone_free_definite_size(malloc_zone_t *zone, void *ptr, size_t size) {
|
||||||
{
|
|
||||||
size_t alloc_size;
|
size_t alloc_size;
|
||||||
|
|
||||||
alloc_size = ivsalloc(tsdn_fetch(), ptr);
|
alloc_size = ivsalloc(tsdn_fetch(), ptr);
|
||||||
@ -207,16 +200,14 @@ zone_free_definite_size(malloc_zone_t *zone, void *ptr, size_t size)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_destroy(malloc_zone_t *zone)
|
zone_destroy(malloc_zone_t *zone) {
|
||||||
{
|
|
||||||
/* This function should never be called. */
|
/* This function should never be called. */
|
||||||
not_reached();
|
not_reached();
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
zone_batch_malloc(struct _malloc_zone_t *zone, size_t size, void **results,
|
zone_batch_malloc(struct _malloc_zone_t *zone, size_t size, void **results,
|
||||||
unsigned num_requested)
|
unsigned num_requested) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < num_requested; i++) {
|
for (i = 0; i < num_requested; i++) {
|
||||||
@ -230,8 +221,7 @@ zone_batch_malloc(struct _malloc_zone_t *zone, size_t size, void **results,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
zone_batch_free(struct _malloc_zone_t *zone, void **to_be_freed,
|
zone_batch_free(struct _malloc_zone_t *zone, void **to_be_freed,
|
||||||
unsigned num_to_be_freed)
|
unsigned num_to_be_freed) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < num_to_be_freed; i++) {
|
for (i = 0; i < num_to_be_freed; i++) {
|
||||||
@ -241,53 +231,47 @@ zone_batch_free(struct _malloc_zone_t *zone, void **to_be_freed,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
zone_pressure_relief(struct _malloc_zone_t *zone, size_t goal)
|
zone_pressure_relief(struct _malloc_zone_t *zone, size_t goal) {
|
||||||
{
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
zone_good_size(malloc_zone_t *zone, size_t size)
|
zone_good_size(malloc_zone_t *zone, size_t size) {
|
||||||
{
|
if (size == 0) {
|
||||||
if (size == 0)
|
|
||||||
size = 1;
|
size = 1;
|
||||||
|
}
|
||||||
return (s2u(size));
|
return (s2u(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
static kern_return_t
|
static kern_return_t
|
||||||
zone_enumerator(task_t task, void *data, unsigned type_mask,
|
zone_enumerator(task_t task, void *data, unsigned type_mask,
|
||||||
vm_address_t zone_address, memory_reader_t reader,
|
vm_address_t zone_address, memory_reader_t reader,
|
||||||
vm_range_recorder_t recorder)
|
vm_range_recorder_t recorder) {
|
||||||
{
|
|
||||||
return KERN_SUCCESS;
|
return KERN_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean_t
|
static boolean_t
|
||||||
zone_check(malloc_zone_t *zone)
|
zone_check(malloc_zone_t *zone) {
|
||||||
{
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_print(malloc_zone_t *zone, boolean_t verbose)
|
zone_print(malloc_zone_t *zone, boolean_t verbose) {
|
||||||
{
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_log(malloc_zone_t *zone, void *address)
|
zone_log(malloc_zone_t *zone, void *address) {
|
||||||
{
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_force_lock(malloc_zone_t *zone)
|
zone_force_lock(malloc_zone_t *zone) {
|
||||||
{
|
if (isthreaded) {
|
||||||
if (isthreaded)
|
|
||||||
jemalloc_prefork();
|
jemalloc_prefork();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_force_unlock(malloc_zone_t *zone)
|
zone_force_unlock(malloc_zone_t *zone) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* Call jemalloc_postfork_child() rather than
|
* Call jemalloc_postfork_child() rather than
|
||||||
* jemalloc_postfork_parent(), because this function is executed by both
|
* jemalloc_postfork_parent(), because this function is executed by both
|
||||||
@ -295,13 +279,13 @@ zone_force_unlock(malloc_zone_t *zone)
|
|||||||
* reinitialized, but the child cannot unlock mutexes that were locked
|
* reinitialized, but the child cannot unlock mutexes that were locked
|
||||||
* by the parent.
|
* by the parent.
|
||||||
*/
|
*/
|
||||||
if (isthreaded)
|
if (isthreaded) {
|
||||||
jemalloc_postfork_child();
|
jemalloc_postfork_child();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_statistics(malloc_zone_t *zone, malloc_statistics_t *stats)
|
zone_statistics(malloc_zone_t *zone, malloc_statistics_t *stats) {
|
||||||
{
|
|
||||||
/* We make no effort to actually fill the values */
|
/* We make no effort to actually fill the values */
|
||||||
stats->blocks_in_use = 0;
|
stats->blocks_in_use = 0;
|
||||||
stats->size_in_use = 0;
|
stats->size_in_use = 0;
|
||||||
@ -310,23 +294,20 @@ zone_statistics(malloc_zone_t *zone, malloc_statistics_t *stats)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static boolean_t
|
static boolean_t
|
||||||
zone_locked(malloc_zone_t *zone)
|
zone_locked(malloc_zone_t *zone) {
|
||||||
{
|
|
||||||
/* Pretend no lock is being held */
|
/* Pretend no lock is being held */
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_reinit_lock(malloc_zone_t *zone)
|
zone_reinit_lock(malloc_zone_t *zone) {
|
||||||
{
|
|
||||||
/* As of OSX 10.12, this function is only used when force_unlock would
|
/* As of OSX 10.12, this function is only used when force_unlock would
|
||||||
* be used if the zone version were < 9. So just use force_unlock. */
|
* be used if the zone version were < 9. So just use force_unlock. */
|
||||||
zone_force_unlock(zone);
|
zone_force_unlock(zone);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
zone_init(void)
|
zone_init(void) {
|
||||||
{
|
|
||||||
jemalloc_zone.size = zone_size;
|
jemalloc_zone.size = zone_size;
|
||||||
jemalloc_zone.malloc = zone_malloc;
|
jemalloc_zone.malloc = zone_malloc;
|
||||||
jemalloc_zone.calloc = zone_calloc;
|
jemalloc_zone.calloc = zone_calloc;
|
||||||
@ -364,8 +345,7 @@ zone_init(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static malloc_zone_t *
|
static malloc_zone_t *
|
||||||
zone_default_get(void)
|
zone_default_get(void) {
|
||||||
{
|
|
||||||
malloc_zone_t **zones = NULL;
|
malloc_zone_t **zones = NULL;
|
||||||
unsigned int num_zones = 0;
|
unsigned int num_zones = 0;
|
||||||
|
|
||||||
@ -387,16 +367,16 @@ zone_default_get(void)
|
|||||||
num_zones = 0;
|
num_zones = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (num_zones)
|
if (num_zones) {
|
||||||
return (zones[0]);
|
return (zones[0]);
|
||||||
|
}
|
||||||
|
|
||||||
return (malloc_default_zone());
|
return (malloc_default_zone());
|
||||||
}
|
}
|
||||||
|
|
||||||
/* As written, this function can only promote jemalloc_zone. */
|
/* As written, this function can only promote jemalloc_zone. */
|
||||||
static void
|
static void
|
||||||
zone_promote(void)
|
zone_promote(void) {
|
||||||
{
|
|
||||||
malloc_zone_t *zone;
|
malloc_zone_t *zone;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
@ -433,16 +413,16 @@ zone_promote(void)
|
|||||||
|
|
||||||
JEMALLOC_ATTR(constructor)
|
JEMALLOC_ATTR(constructor)
|
||||||
void
|
void
|
||||||
zone_register(void)
|
zone_register(void) {
|
||||||
{
|
|
||||||
/*
|
/*
|
||||||
* If something else replaced the system default zone allocator, don't
|
* If something else replaced the system default zone allocator, don't
|
||||||
* register jemalloc's.
|
* register jemalloc's.
|
||||||
*/
|
*/
|
||||||
default_zone = zone_default_get();
|
default_zone = zone_default_get();
|
||||||
if (!default_zone->zone_name || strcmp(default_zone->zone_name,
|
if (!default_zone->zone_name || strcmp(default_zone->zone_name,
|
||||||
"DefaultMallocZone") != 0)
|
"DefaultMallocZone") != 0) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* The default purgeable zone is created lazily by OSX's libc. It uses
|
* The default purgeable zone is created lazily by OSX's libc. It uses
|
||||||
|
@ -97,70 +97,60 @@ double genrand_res53_mix(sfmt_t *ctx);
|
|||||||
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(SFMT_C_))
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(SFMT_C_))
|
||||||
/* These real versions are due to Isaku Wada */
|
/* These real versions are due to Isaku Wada */
|
||||||
/** generates a random number on [0,1]-real-interval */
|
/** generates a random number on [0,1]-real-interval */
|
||||||
JEMALLOC_INLINE double to_real1(uint32_t v)
|
JEMALLOC_INLINE double to_real1(uint32_t v) {
|
||||||
{
|
|
||||||
return v * (1.0/4294967295.0);
|
return v * (1.0/4294967295.0);
|
||||||
/* divided by 2^32-1 */
|
/* divided by 2^32-1 */
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on [0,1]-real-interval */
|
/** generates a random number on [0,1]-real-interval */
|
||||||
JEMALLOC_INLINE double genrand_real1(sfmt_t *ctx)
|
JEMALLOC_INLINE double genrand_real1(sfmt_t *ctx) {
|
||||||
{
|
|
||||||
return to_real1(gen_rand32(ctx));
|
return to_real1(gen_rand32(ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on [0,1)-real-interval */
|
/** generates a random number on [0,1)-real-interval */
|
||||||
JEMALLOC_INLINE double to_real2(uint32_t v)
|
JEMALLOC_INLINE double to_real2(uint32_t v) {
|
||||||
{
|
|
||||||
return v * (1.0/4294967296.0);
|
return v * (1.0/4294967296.0);
|
||||||
/* divided by 2^32 */
|
/* divided by 2^32 */
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on [0,1)-real-interval */
|
/** generates a random number on [0,1)-real-interval */
|
||||||
JEMALLOC_INLINE double genrand_real2(sfmt_t *ctx)
|
JEMALLOC_INLINE double genrand_real2(sfmt_t *ctx) {
|
||||||
{
|
|
||||||
return to_real2(gen_rand32(ctx));
|
return to_real2(gen_rand32(ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on (0,1)-real-interval */
|
/** generates a random number on (0,1)-real-interval */
|
||||||
JEMALLOC_INLINE double to_real3(uint32_t v)
|
JEMALLOC_INLINE double to_real3(uint32_t v) {
|
||||||
{
|
|
||||||
return (((double)v) + 0.5)*(1.0/4294967296.0);
|
return (((double)v) + 0.5)*(1.0/4294967296.0);
|
||||||
/* divided by 2^32 */
|
/* divided by 2^32 */
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on (0,1)-real-interval */
|
/** generates a random number on (0,1)-real-interval */
|
||||||
JEMALLOC_INLINE double genrand_real3(sfmt_t *ctx)
|
JEMALLOC_INLINE double genrand_real3(sfmt_t *ctx) {
|
||||||
{
|
|
||||||
return to_real3(gen_rand32(ctx));
|
return to_real3(gen_rand32(ctx));
|
||||||
}
|
}
|
||||||
/** These real versions are due to Isaku Wada */
|
/** These real versions are due to Isaku Wada */
|
||||||
|
|
||||||
/** generates a random number on [0,1) with 53-bit resolution*/
|
/** generates a random number on [0,1) with 53-bit resolution*/
|
||||||
JEMALLOC_INLINE double to_res53(uint64_t v)
|
JEMALLOC_INLINE double to_res53(uint64_t v) {
|
||||||
{
|
|
||||||
return v * (1.0/18446744073709551616.0L);
|
return v * (1.0/18446744073709551616.0L);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on [0,1) with 53-bit resolution from two
|
/** generates a random number on [0,1) with 53-bit resolution from two
|
||||||
* 32 bit integers */
|
* 32 bit integers */
|
||||||
JEMALLOC_INLINE double to_res53_mix(uint32_t x, uint32_t y)
|
JEMALLOC_INLINE double to_res53_mix(uint32_t x, uint32_t y) {
|
||||||
{
|
|
||||||
return to_res53(x | ((uint64_t)y << 32));
|
return to_res53(x | ((uint64_t)y << 32));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on [0,1) with 53-bit resolution
|
/** generates a random number on [0,1) with 53-bit resolution
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double genrand_res53(sfmt_t *ctx)
|
JEMALLOC_INLINE double genrand_res53(sfmt_t *ctx) {
|
||||||
{
|
|
||||||
return to_res53(gen_rand64(ctx));
|
return to_res53(gen_rand64(ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** generates a random number on [0,1) with 53-bit resolution
|
/** generates a random number on [0,1) with 53-bit resolution
|
||||||
using 32bit integer.
|
using 32bit integer.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double genrand_res53_mix(sfmt_t *ctx)
|
JEMALLOC_INLINE double genrand_res53_mix(sfmt_t *ctx) {
|
||||||
{
|
|
||||||
uint32_t x, y;
|
uint32_t x, y;
|
||||||
|
|
||||||
x = gen_rand32(ctx);
|
x = gen_rand32(ctx);
|
||||||
|
@ -8,13 +8,12 @@ btalloc_n_proto(1)
|
|||||||
|
|
||||||
#define btalloc_n_gen(n) \
|
#define btalloc_n_gen(n) \
|
||||||
void * \
|
void * \
|
||||||
btalloc_##n(size_t size, unsigned bits) \
|
btalloc_##n(size_t size, unsigned bits) { \
|
||||||
{ \
|
|
||||||
void *p; \
|
void *p; \
|
||||||
\
|
\
|
||||||
if (bits == 0) \
|
if (bits == 0) { \
|
||||||
p = mallocx(size, 0); \
|
p = mallocx(size, 0); \
|
||||||
else { \
|
} else { \
|
||||||
switch (bits & 0x1U) { \
|
switch (bits & 0x1U) { \
|
||||||
case 0: \
|
case 0: \
|
||||||
p = (btalloc_0(size, bits >> 1)); \
|
p = (btalloc_0(size, bits >> 1)); \
|
||||||
|
@ -73,8 +73,7 @@ static bool did_merge;
|
|||||||
|
|
||||||
static void *
|
static void *
|
||||||
extent_alloc_hook(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
extent_alloc_hook(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
||||||
size_t alignment, bool *zero, bool *commit, unsigned arena_ind)
|
size_t alignment, bool *zero, bool *commit, unsigned arena_ind) {
|
||||||
{
|
|
||||||
void *ret;
|
void *ret;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, new_addr=%p, size=%zu, alignment=%zu, "
|
TRACE_HOOK("%s(extent_hooks=%p, new_addr=%p, size=%zu, alignment=%zu, "
|
||||||
@ -86,8 +85,9 @@ extent_alloc_hook(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->alloc, extent_alloc_hook,
|
assert_ptr_eq(extent_hooks->alloc, extent_alloc_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_alloc = true;
|
called_alloc = true;
|
||||||
if (!try_alloc)
|
if (!try_alloc) {
|
||||||
return (NULL);
|
return (NULL);
|
||||||
|
}
|
||||||
ret = default_hooks->alloc(default_hooks, new_addr, size, alignment,
|
ret = default_hooks->alloc(default_hooks, new_addr, size, alignment,
|
||||||
zero, commit, 0);
|
zero, commit, 0);
|
||||||
did_alloc = (ret != NULL);
|
did_alloc = (ret != NULL);
|
||||||
@ -96,8 +96,7 @@ extent_alloc_hook(extent_hooks_t *extent_hooks, void *new_addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_dalloc_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_dalloc_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
bool committed, unsigned arena_ind)
|
bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, committed=%s, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, committed=%s, "
|
||||||
@ -108,8 +107,9 @@ extent_dalloc_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->dalloc, extent_dalloc_hook,
|
assert_ptr_eq(extent_hooks->dalloc, extent_dalloc_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_dalloc = true;
|
called_dalloc = true;
|
||||||
if (!try_dalloc)
|
if (!try_dalloc) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = default_hooks->dalloc(default_hooks, addr, size, committed, 0);
|
err = default_hooks->dalloc(default_hooks, addr, size, committed, 0);
|
||||||
did_dalloc = !err;
|
did_dalloc = !err;
|
||||||
return (err);
|
return (err);
|
||||||
@ -117,8 +117,7 @@ extent_dalloc_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_commit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_commit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
||||||
@ -129,8 +128,9 @@ extent_commit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->commit, extent_commit_hook,
|
assert_ptr_eq(extent_hooks->commit, extent_commit_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_commit = true;
|
called_commit = true;
|
||||||
if (!try_commit)
|
if (!try_commit) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = default_hooks->commit(default_hooks, addr, size, offset, length,
|
err = default_hooks->commit(default_hooks, addr, size, offset, length,
|
||||||
0);
|
0);
|
||||||
did_commit = !err;
|
did_commit = !err;
|
||||||
@ -139,8 +139,7 @@ extent_commit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_decommit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_decommit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
||||||
@ -151,8 +150,9 @@ extent_decommit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->decommit, extent_decommit_hook,
|
assert_ptr_eq(extent_hooks->decommit, extent_decommit_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_decommit = true;
|
called_decommit = true;
|
||||||
if (!try_decommit)
|
if (!try_decommit) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = default_hooks->decommit(default_hooks, addr, size, offset, length,
|
err = default_hooks->decommit(default_hooks, addr, size, offset, length,
|
||||||
0);
|
0);
|
||||||
did_decommit = !err;
|
did_decommit = !err;
|
||||||
@ -161,8 +161,7 @@ extent_decommit_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_purge_lazy_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_purge_lazy_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
||||||
@ -173,8 +172,9 @@ extent_purge_lazy_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->purge_lazy, extent_purge_lazy_hook,
|
assert_ptr_eq(extent_hooks->purge_lazy, extent_purge_lazy_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_purge_lazy = true;
|
called_purge_lazy = true;
|
||||||
if (!try_purge_lazy)
|
if (!try_purge_lazy) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = default_hooks->purge_lazy == NULL ||
|
err = default_hooks->purge_lazy == NULL ||
|
||||||
default_hooks->purge_lazy(default_hooks, addr, size, offset, length,
|
default_hooks->purge_lazy(default_hooks, addr, size, offset, length,
|
||||||
0);
|
0);
|
||||||
@ -184,8 +184,7 @@ extent_purge_lazy_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_purge_forced_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_purge_forced_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t offset, size_t length, unsigned arena_ind)
|
size_t offset, size_t length, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, offset=%zu, "
|
||||||
@ -196,8 +195,9 @@ extent_purge_forced_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->purge_forced, extent_purge_forced_hook,
|
assert_ptr_eq(extent_hooks->purge_forced, extent_purge_forced_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_purge_forced = true;
|
called_purge_forced = true;
|
||||||
if (!try_purge_forced)
|
if (!try_purge_forced) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = default_hooks->purge_forced == NULL ||
|
err = default_hooks->purge_forced == NULL ||
|
||||||
default_hooks->purge_forced(default_hooks, addr, size, offset,
|
default_hooks->purge_forced(default_hooks, addr, size, offset,
|
||||||
length, 0);
|
length, 0);
|
||||||
@ -207,8 +207,7 @@ extent_purge_forced_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_split_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_split_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
size_t size_a, size_t size_b, bool committed, unsigned arena_ind)
|
size_t size_a, size_t size_b, bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, size_a=%zu, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, size_a=%zu, "
|
||||||
@ -220,8 +219,9 @@ extent_split_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->split, extent_split_hook,
|
assert_ptr_eq(extent_hooks->split, extent_split_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_split = true;
|
called_split = true;
|
||||||
if (!try_split)
|
if (!try_split) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = (default_hooks->split == NULL ||
|
err = (default_hooks->split == NULL ||
|
||||||
default_hooks->split(default_hooks, addr, size, size_a, size_b,
|
default_hooks->split(default_hooks, addr, size, size_a, size_b,
|
||||||
committed, 0));
|
committed, 0));
|
||||||
@ -231,8 +231,7 @@ extent_split_hook(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
|
|
||||||
static bool
|
static bool
|
||||||
extent_merge_hook(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
extent_merge_hook(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
||||||
void *addr_b, size_t size_b, bool committed, unsigned arena_ind)
|
void *addr_b, size_t size_b, bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
bool err;
|
bool err;
|
||||||
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr_a=%p, size_a=%zu, addr_b=%p "
|
TRACE_HOOK("%s(extent_hooks=%p, addr_a=%p, size_a=%zu, addr_b=%p "
|
||||||
@ -244,8 +243,9 @@ extent_merge_hook(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
|||||||
assert_ptr_eq(extent_hooks->merge, extent_merge_hook,
|
assert_ptr_eq(extent_hooks->merge, extent_merge_hook,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_merge = true;
|
called_merge = true;
|
||||||
if (!try_merge)
|
if (!try_merge) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
err = (default_hooks->merge == NULL ||
|
err = (default_hooks->merge == NULL ||
|
||||||
default_hooks->merge(default_hooks, addr_a, size_a, addr_b, size_b,
|
default_hooks->merge(default_hooks, addr_a, size_a, addr_b, size_b,
|
||||||
committed, 0));
|
committed, 0));
|
||||||
@ -254,8 +254,7 @@ extent_merge_hook(extent_hooks_t *extent_hooks, void *addr_a, size_t size_a,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
extent_hooks_prep(void)
|
extent_hooks_prep(void) {
|
||||||
{
|
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
sz = sizeof(default_hooks);
|
sz = sizeof(default_hooks);
|
||||||
|
@ -159,8 +159,9 @@ static const bool config_debug =
|
|||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define assert_not_implemented(e) do { \
|
#define assert_not_implemented(e) do { \
|
||||||
if (!(e)) \
|
if (!(e)) { \
|
||||||
not_implemented(); \
|
not_implemented(); \
|
||||||
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
@ -16,8 +16,7 @@ double pt_gamma(double p, double shape, double scale, double ln_gamma_shape);
|
|||||||
* [S14]. Communications of the ACM 9(9):684.
|
* [S14]. Communications of the ACM 9(9):684.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double
|
JEMALLOC_INLINE double
|
||||||
ln_gamma(double x)
|
ln_gamma(double x) {
|
||||||
{
|
|
||||||
double f, z;
|
double f, z;
|
||||||
|
|
||||||
assert(x > 0.0);
|
assert(x > 0.0);
|
||||||
@ -31,8 +30,9 @@ ln_gamma(double x)
|
|||||||
}
|
}
|
||||||
x = z;
|
x = z;
|
||||||
f = -log(f);
|
f = -log(f);
|
||||||
} else
|
} else {
|
||||||
f = 0.0;
|
f = 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
z = 1.0 / (x * x);
|
z = 1.0 / (x * x);
|
||||||
|
|
||||||
@ -51,8 +51,7 @@ ln_gamma(double x)
|
|||||||
* Applied Statistics 19:285-287.
|
* Applied Statistics 19:285-287.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double
|
JEMALLOC_INLINE double
|
||||||
i_gamma(double x, double p, double ln_gamma_p)
|
i_gamma(double x, double p, double ln_gamma_p) {
|
||||||
{
|
|
||||||
double acu, factor, oflo, gin, term, rn, a, b, an, dif;
|
double acu, factor, oflo, gin, term, rn, a, b, an, dif;
|
||||||
double pn[6];
|
double pn[6];
|
||||||
unsigned i;
|
unsigned i;
|
||||||
@ -60,8 +59,9 @@ i_gamma(double x, double p, double ln_gamma_p)
|
|||||||
assert(p > 0.0);
|
assert(p > 0.0);
|
||||||
assert(x >= 0.0);
|
assert(x >= 0.0);
|
||||||
|
|
||||||
if (x == 0.0)
|
if (x == 0.0) {
|
||||||
return (0.0);
|
return (0.0);
|
||||||
|
}
|
||||||
|
|
||||||
acu = 1.0e-10;
|
acu = 1.0e-10;
|
||||||
oflo = 1.0e30;
|
oflo = 1.0e30;
|
||||||
@ -99,8 +99,9 @@ i_gamma(double x, double p, double ln_gamma_p)
|
|||||||
b += 2.0;
|
b += 2.0;
|
||||||
term += 1.0;
|
term += 1.0;
|
||||||
an = a * term;
|
an = a * term;
|
||||||
for (i = 0; i < 2; i++)
|
for (i = 0; i < 2; i++) {
|
||||||
pn[i+4] = b * pn[i+2] - an * pn[i];
|
pn[i+4] = b * pn[i+2] - an * pn[i];
|
||||||
|
}
|
||||||
if (pn[5] != 0.0) {
|
if (pn[5] != 0.0) {
|
||||||
rn = pn[4] / pn[5];
|
rn = pn[4] / pn[5];
|
||||||
dif = fabs(gin - rn);
|
dif = fabs(gin - rn);
|
||||||
@ -110,16 +111,18 @@ i_gamma(double x, double p, double ln_gamma_p)
|
|||||||
}
|
}
|
||||||
gin = rn;
|
gin = rn;
|
||||||
}
|
}
|
||||||
for (i = 0; i < 4; i++)
|
for (i = 0; i < 4; i++) {
|
||||||
pn[i] = pn[i+2];
|
pn[i] = pn[i+2];
|
||||||
|
}
|
||||||
|
|
||||||
if (fabs(pn[4]) >= oflo) {
|
if (fabs(pn[4]) >= oflo) {
|
||||||
for (i = 0; i < 4; i++)
|
for (i = 0; i < 4; i++) {
|
||||||
pn[i] /= oflo;
|
pn[i] /= oflo;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Given a value p in [0..1] of the lower tail area of the normal distribution,
|
* Given a value p in [0..1] of the lower tail area of the normal distribution,
|
||||||
@ -132,8 +135,7 @@ i_gamma(double x, double p, double ln_gamma_p)
|
|||||||
* distribution. Applied Statistics 37(3):477-484.
|
* distribution. Applied Statistics 37(3):477-484.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double
|
JEMALLOC_INLINE double
|
||||||
pt_norm(double p)
|
pt_norm(double p) {
|
||||||
{
|
|
||||||
double q, r, ret;
|
double q, r, ret;
|
||||||
|
|
||||||
assert(p > 0.0 && p < 1.0);
|
assert(p > 0.0 && p < 1.0);
|
||||||
@ -153,10 +155,11 @@ pt_norm(double p)
|
|||||||
r + 6.8718700749205790830e2) * r + 4.2313330701600911252e1)
|
r + 6.8718700749205790830e2) * r + 4.2313330701600911252e1)
|
||||||
* r + 1.0));
|
* r + 1.0));
|
||||||
} else {
|
} else {
|
||||||
if (q < 0.0)
|
if (q < 0.0) {
|
||||||
r = p;
|
r = p;
|
||||||
else
|
} else {
|
||||||
r = 1.0 - p;
|
r = 1.0 - p;
|
||||||
|
}
|
||||||
assert(r > 0.0);
|
assert(r > 0.0);
|
||||||
|
|
||||||
r = sqrt(-log(r));
|
r = sqrt(-log(r));
|
||||||
@ -198,8 +201,9 @@ pt_norm(double p)
|
|||||||
5.99832206555887937690e-1)
|
5.99832206555887937690e-1)
|
||||||
* r + 1.0));
|
* r + 1.0));
|
||||||
}
|
}
|
||||||
if (q < 0.0)
|
if (q < 0.0) {
|
||||||
ret = -ret;
|
ret = -ret;
|
||||||
|
}
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -219,8 +223,7 @@ pt_norm(double p)
|
|||||||
* points of the Chi^2 distribution. Applied Statistics 40(1):233-235.
|
* points of the Chi^2 distribution. Applied Statistics 40(1):233-235.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double
|
JEMALLOC_INLINE double
|
||||||
pt_chi2(double p, double df, double ln_gamma_df_2)
|
pt_chi2(double p, double df, double ln_gamma_df_2) {
|
||||||
{
|
|
||||||
double e, aa, xx, c, ch, a, q, p1, p2, t, x, b, s1, s2, s3, s4, s5, s6;
|
double e, aa, xx, c, ch, a, q, p1, p2, t, x, b, s1, s2, s3, s4, s5, s6;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -236,8 +239,9 @@ pt_chi2(double p, double df, double ln_gamma_df_2)
|
|||||||
if (df < -1.24 * log(p)) {
|
if (df < -1.24 * log(p)) {
|
||||||
/* Starting approximation for small Chi^2. */
|
/* Starting approximation for small Chi^2. */
|
||||||
ch = pow(p * xx * exp(ln_gamma_df_2 + xx * aa), 1.0 / xx);
|
ch = pow(p * xx * exp(ln_gamma_df_2 + xx * aa), 1.0 / xx);
|
||||||
if (ch - e < 0.0)
|
if (ch - e < 0.0) {
|
||||||
return (ch);
|
return (ch);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if (df > 0.32) {
|
if (df > 0.32) {
|
||||||
x = pt_norm(p);
|
x = pt_norm(p);
|
||||||
@ -263,18 +267,20 @@ pt_chi2(double p, double df, double ln_gamma_df_2)
|
|||||||
* (13.32 + 3.0 * ch)) / p2;
|
* (13.32 + 3.0 * ch)) / p2;
|
||||||
ch -= (1.0 - exp(a + ln_gamma_df_2 + 0.5 * ch +
|
ch -= (1.0 - exp(a + ln_gamma_df_2 + 0.5 * ch +
|
||||||
c * aa) * p2 / p1) / t;
|
c * aa) * p2 / p1) / t;
|
||||||
if (fabs(q / ch - 1.0) - 0.01 <= 0.0)
|
if (fabs(q / ch - 1.0) - 0.01 <= 0.0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (i = 0; i < 20; i++) {
|
for (i = 0; i < 20; i++) {
|
||||||
/* Calculation of seven-term Taylor series. */
|
/* Calculation of seven-term Taylor series. */
|
||||||
q = ch;
|
q = ch;
|
||||||
p1 = 0.5 * ch;
|
p1 = 0.5 * ch;
|
||||||
if (p1 < 0.0)
|
if (p1 < 0.0) {
|
||||||
return (-1.0);
|
return (-1.0);
|
||||||
|
}
|
||||||
p2 = p - i_gamma(p1, xx, ln_gamma_df_2);
|
p2 = p - i_gamma(p1, xx, ln_gamma_df_2);
|
||||||
t = p2 * exp(xx * aa + ln_gamma_df_2 + p1 - c * log(ch));
|
t = p2 * exp(xx * aa + ln_gamma_df_2 + p1 - c * log(ch));
|
||||||
b = t / ch;
|
b = t / ch;
|
||||||
@ -290,9 +296,10 @@ pt_chi2(double p, double df, double ln_gamma_df_2)
|
|||||||
s6 = (120.0 + c * (346.0 + 127.0 * c)) / 5040.0;
|
s6 = (120.0 + c * (346.0 + 127.0 * c)) / 5040.0;
|
||||||
ch += t * (1.0 + 0.5 * t * s1 - b * c * (s1 - b * (s2 - b * (s3
|
ch += t * (1.0 + 0.5 * t * s1 - b * c * (s1 - b * (s2 - b * (s3
|
||||||
- b * (s4 - b * (s5 - b * s6))))));
|
- b * (s4 - b * (s5 - b * s6))))));
|
||||||
if (fabs(q / ch - 1.0) <= e)
|
if (fabs(q / ch - 1.0) <= e) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (ch);
|
return (ch);
|
||||||
}
|
}
|
||||||
@ -303,8 +310,7 @@ pt_chi2(double p, double df, double ln_gamma_df_2)
|
|||||||
* p.
|
* p.
|
||||||
*/
|
*/
|
||||||
JEMALLOC_INLINE double
|
JEMALLOC_INLINE double
|
||||||
pt_gamma(double p, double shape, double scale, double ln_gamma_shape)
|
pt_gamma(double p, double shape, double scale, double ln_gamma_shape) {
|
||||||
{
|
|
||||||
return (pt_chi2(p, shape * 2.0, ln_gamma_shape) * 0.5 * scale);
|
return (pt_chi2(p, shape * 2.0, ln_gamma_shape) * 0.5 * scale);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -37,20 +37,19 @@ typedef struct { \
|
|||||||
a_attr bool \
|
a_attr bool \
|
||||||
a_prefix##init(a_mq_type *mq) { \
|
a_prefix##init(a_mq_type *mq) { \
|
||||||
\
|
\
|
||||||
if (mtx_init(&mq->lock)) \
|
if (mtx_init(&mq->lock)) { \
|
||||||
return (true); \
|
return (true); \
|
||||||
|
} \
|
||||||
ql_new(&mq->msgs); \
|
ql_new(&mq->msgs); \
|
||||||
mq->count = 0; \
|
mq->count = 0; \
|
||||||
return (false); \
|
return (false); \
|
||||||
} \
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_prefix##fini(a_mq_type *mq) \
|
a_prefix##fini(a_mq_type *mq) { \
|
||||||
{ \
|
|
||||||
mtx_fini(&mq->lock); \
|
mtx_fini(&mq->lock); \
|
||||||
} \
|
} \
|
||||||
a_attr unsigned \
|
a_attr unsigned \
|
||||||
a_prefix##count(a_mq_type *mq) \
|
a_prefix##count(a_mq_type *mq) { \
|
||||||
{ \
|
|
||||||
unsigned count; \
|
unsigned count; \
|
||||||
\
|
\
|
||||||
mtx_lock(&mq->lock); \
|
mtx_lock(&mq->lock); \
|
||||||
@ -59,8 +58,7 @@ a_prefix##count(a_mq_type *mq) \
|
|||||||
return (count); \
|
return (count); \
|
||||||
} \
|
} \
|
||||||
a_attr a_mq_msg_type * \
|
a_attr a_mq_msg_type * \
|
||||||
a_prefix##tryget(a_mq_type *mq) \
|
a_prefix##tryget(a_mq_type *mq) { \
|
||||||
{ \
|
|
||||||
a_mq_msg_type *msg; \
|
a_mq_msg_type *msg; \
|
||||||
\
|
\
|
||||||
mtx_lock(&mq->lock); \
|
mtx_lock(&mq->lock); \
|
||||||
@ -73,32 +71,33 @@ a_prefix##tryget(a_mq_type *mq) \
|
|||||||
return (msg); \
|
return (msg); \
|
||||||
} \
|
} \
|
||||||
a_attr a_mq_msg_type * \
|
a_attr a_mq_msg_type * \
|
||||||
a_prefix##get(a_mq_type *mq) \
|
a_prefix##get(a_mq_type *mq) { \
|
||||||
{ \
|
|
||||||
a_mq_msg_type *msg; \
|
a_mq_msg_type *msg; \
|
||||||
unsigned ns; \
|
unsigned ns; \
|
||||||
\
|
\
|
||||||
msg = a_prefix##tryget(mq); \
|
msg = a_prefix##tryget(mq); \
|
||||||
if (msg != NULL) \
|
if (msg != NULL) { \
|
||||||
return (msg); \
|
return (msg); \
|
||||||
|
} \
|
||||||
\
|
\
|
||||||
ns = 1; \
|
ns = 1; \
|
||||||
while (true) { \
|
while (true) { \
|
||||||
mq_nanosleep(ns); \
|
mq_nanosleep(ns); \
|
||||||
msg = a_prefix##tryget(mq); \
|
msg = a_prefix##tryget(mq); \
|
||||||
if (msg != NULL) \
|
if (msg != NULL) { \
|
||||||
return (msg); \
|
return (msg); \
|
||||||
|
} \
|
||||||
if (ns < 1000*1000*1000) { \
|
if (ns < 1000*1000*1000) { \
|
||||||
/* Double sleep time, up to max 1 second. */ \
|
/* Double sleep time, up to max 1 second. */ \
|
||||||
ns <<= 1; \
|
ns <<= 1; \
|
||||||
if (ns > 1000*1000*1000) \
|
if (ns > 1000*1000*1000) { \
|
||||||
ns = 1000*1000*1000; \
|
ns = 1000*1000*1000; \
|
||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
} \
|
} \
|
||||||
|
} \
|
||||||
a_attr void \
|
a_attr void \
|
||||||
a_prefix##put(a_mq_type *mq, a_mq_msg_type *msg) \
|
a_prefix##put(a_mq_type *mq, a_mq_msg_type *msg) { \
|
||||||
{ \
|
|
||||||
\
|
\
|
||||||
mtx_lock(&mq->lock); \
|
mtx_lock(&mq->lock); \
|
||||||
ql_elm_new(msg, a_field); \
|
ql_elm_new(msg, a_field); \
|
||||||
|
@ -298,8 +298,7 @@ typedef void (test_t)(void);
|
|||||||
|
|
||||||
#define TEST_BEGIN(f) \
|
#define TEST_BEGIN(f) \
|
||||||
static void \
|
static void \
|
||||||
f(void) \
|
f(void) { \
|
||||||
{ \
|
|
||||||
p_test_init(#f);
|
p_test_init(#f);
|
||||||
|
|
||||||
#define TEST_END \
|
#define TEST_END \
|
||||||
|
@ -11,8 +11,7 @@ static bool have_dss =
|
|||||||
;
|
;
|
||||||
|
|
||||||
void *
|
void *
|
||||||
thd_start(void *arg)
|
thd_start(void *arg) {
|
||||||
{
|
|
||||||
unsigned thread_ind = (unsigned)(uintptr_t)arg;
|
unsigned thread_ind = (unsigned)(uintptr_t)arg;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
void *p;
|
void *p;
|
||||||
@ -45,8 +44,7 @@ thd_start(void *arg)
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_MALLOCX_ARENA)
|
TEST_BEGIN(test_MALLOCX_ARENA) {
|
||||||
{
|
|
||||||
thd_t thds[NTHREADS];
|
thd_t thds[NTHREADS];
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -55,14 +53,14 @@ TEST_BEGIN(test_MALLOCX_ARENA)
|
|||||||
(void *)(uintptr_t)i);
|
(void *)(uintptr_t)i);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i = 0; i < NTHREADS; i++)
|
for (i = 0; i < NTHREADS; i++) {
|
||||||
thd_join(thds[i], NULL);
|
thd_join(thds[i], NULL);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_MALLOCX_ARENA));
|
test_MALLOCX_ARENA));
|
||||||
}
|
}
|
||||||
|
@ -8,14 +8,12 @@
|
|||||||
* potential OOM on e.g. 32-bit Windows.
|
* potential OOM on e.g. 32-bit Windows.
|
||||||
*/
|
*/
|
||||||
static void
|
static void
|
||||||
purge(void)
|
purge(void) {
|
||||||
{
|
|
||||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||||
"Unexpected mallctl error");
|
"Unexpected mallctl error");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_alignment_errors)
|
TEST_BEGIN(test_alignment_errors) {
|
||||||
{
|
|
||||||
size_t alignment;
|
size_t alignment;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -36,8 +34,7 @@ TEST_BEGIN(test_alignment_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_oom_errors)
|
TEST_BEGIN(test_oom_errors) {
|
||||||
{
|
|
||||||
size_t alignment, size;
|
size_t alignment, size;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -81,15 +78,15 @@ TEST_BEGIN(test_oom_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_alignment_and_size)
|
TEST_BEGIN(test_alignment_and_size) {
|
||||||
{
|
|
||||||
#define NITER 4
|
#define NITER 4
|
||||||
size_t alignment, size, total;
|
size_t alignment, size, total;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
void *ps[NITER];
|
void *ps[NITER];
|
||||||
|
|
||||||
for (i = 0; i < NITER; i++)
|
for (i = 0; i < NITER; i++) {
|
||||||
ps[i] = NULL;
|
ps[i] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
for (alignment = 8;
|
for (alignment = 8;
|
||||||
alignment <= MAXALIGN;
|
alignment <= MAXALIGN;
|
||||||
@ -110,9 +107,10 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
alignment, size, size, buf);
|
alignment, size, size, buf);
|
||||||
}
|
}
|
||||||
total += malloc_usable_size(ps[i]);
|
total += malloc_usable_size(ps[i]);
|
||||||
if (total >= (MAXALIGN << 1))
|
if (total >= (MAXALIGN << 1)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (i = 0; i < NITER; i++) {
|
for (i = 0; i < NITER; i++) {
|
||||||
if (ps[i] != NULL) {
|
if (ps[i] != NULL) {
|
||||||
free(ps[i]);
|
free(ps[i]);
|
||||||
@ -127,8 +125,7 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_alignment_errors,
|
test_alignment_errors,
|
||||||
test_oom_errors,
|
test_oom_errors,
|
||||||
|
@ -9,8 +9,7 @@ static const bool config_stats =
|
|||||||
;
|
;
|
||||||
|
|
||||||
void *
|
void *
|
||||||
thd_start(void *arg)
|
thd_start(void *arg) {
|
||||||
{
|
|
||||||
int err;
|
int err;
|
||||||
void *p;
|
void *p;
|
||||||
uint64_t a0, a1, d0, d1;
|
uint64_t a0, a1, d0, d1;
|
||||||
@ -19,15 +18,17 @@ thd_start(void *arg)
|
|||||||
|
|
||||||
sz = sizeof(a0);
|
sz = sizeof(a0);
|
||||||
if ((err = mallctl("thread.allocated", (void *)&a0, &sz, NULL, 0))) {
|
if ((err = mallctl("thread.allocated", (void *)&a0, &sz, NULL, 0))) {
|
||||||
if (err == ENOENT)
|
if (err == ENOENT) {
|
||||||
goto label_ENOENT;
|
goto label_ENOENT;
|
||||||
|
}
|
||||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||||
strerror(err));
|
strerror(err));
|
||||||
}
|
}
|
||||||
sz = sizeof(ap0);
|
sz = sizeof(ap0);
|
||||||
if ((err = mallctl("thread.allocatedp", (void *)&ap0, &sz, NULL, 0))) {
|
if ((err = mallctl("thread.allocatedp", (void *)&ap0, &sz, NULL, 0))) {
|
||||||
if (err == ENOENT)
|
if (err == ENOENT) {
|
||||||
goto label_ENOENT;
|
goto label_ENOENT;
|
||||||
|
}
|
||||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||||
strerror(err));
|
strerror(err));
|
||||||
}
|
}
|
||||||
@ -37,16 +38,18 @@ thd_start(void *arg)
|
|||||||
|
|
||||||
sz = sizeof(d0);
|
sz = sizeof(d0);
|
||||||
if ((err = mallctl("thread.deallocated", (void *)&d0, &sz, NULL, 0))) {
|
if ((err = mallctl("thread.deallocated", (void *)&d0, &sz, NULL, 0))) {
|
||||||
if (err == ENOENT)
|
if (err == ENOENT) {
|
||||||
goto label_ENOENT;
|
goto label_ENOENT;
|
||||||
|
}
|
||||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||||
strerror(err));
|
strerror(err));
|
||||||
}
|
}
|
||||||
sz = sizeof(dp0);
|
sz = sizeof(dp0);
|
||||||
if ((err = mallctl("thread.deallocatedp", (void *)&dp0, &sz, NULL,
|
if ((err = mallctl("thread.deallocatedp", (void *)&dp0, &sz, NULL,
|
||||||
0))) {
|
0))) {
|
||||||
if (err == ENOENT)
|
if (err == ENOENT) {
|
||||||
goto label_ENOENT;
|
goto label_ENOENT;
|
||||||
|
}
|
||||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||||
strerror(err));
|
strerror(err));
|
||||||
}
|
}
|
||||||
@ -96,14 +99,12 @@ label_ENOENT:
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_main_thread)
|
TEST_BEGIN(test_main_thread) {
|
||||||
{
|
|
||||||
thd_start(NULL);
|
thd_start(NULL);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_subthread)
|
TEST_BEGIN(test_subthread) {
|
||||||
{
|
|
||||||
thd_t thd;
|
thd_t thd;
|
||||||
|
|
||||||
thd_create(&thd, thd_start, NULL);
|
thd_create(&thd, thd_start, NULL);
|
||||||
@ -112,8 +113,7 @@ TEST_BEGIN(test_subthread)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
/* Run tests multiple times to check for bad interactions. */
|
/* Run tests multiple times to check for bad interactions. */
|
||||||
return (test(
|
return (test(
|
||||||
test_main_thread,
|
test_main_thread,
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_basic)
|
TEST_BEGIN(test_basic) {
|
||||||
{
|
|
||||||
auto foo = new long(4);
|
auto foo = new long(4);
|
||||||
assert_ptr_not_null(foo, "Unexpected new[] failure");
|
assert_ptr_not_null(foo, "Unexpected new[] failure");
|
||||||
delete foo;
|
delete foo;
|
||||||
@ -20,8 +19,7 @@ TEST_BEGIN(test_basic)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main()
|
main() {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_basic));
|
test_basic));
|
||||||
}
|
}
|
||||||
|
@ -7,8 +7,7 @@ const char *malloc_conf = "junk:false";
|
|||||||
#include "test/extent_hooks.h"
|
#include "test/extent_hooks.h"
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_extent_body(unsigned arena_ind)
|
test_extent_body(unsigned arena_ind) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
size_t large0, large1, large2, sz;
|
size_t large0, large1, large2, sz;
|
||||||
size_t purge_mib[3];
|
size_t purge_mib[3];
|
||||||
@ -67,15 +66,17 @@ test_extent_body(unsigned arena_ind)
|
|||||||
xallocx_success_b = (xallocx(p, large0, 0, flags) == large0);
|
xallocx_success_b = (xallocx(p, large0, 0, flags) == large0);
|
||||||
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
|
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
|
||||||
0, "Unexpected arena.%u.purge error", arena_ind);
|
0, "Unexpected arena.%u.purge error", arena_ind);
|
||||||
if (xallocx_success_b)
|
if (xallocx_success_b) {
|
||||||
assert_true(did_split, "Expected split");
|
assert_true(did_split, "Expected split");
|
||||||
|
}
|
||||||
xallocx_success_c = (xallocx(p, large0 * 2, 0, flags) == large0 * 2);
|
xallocx_success_c = (xallocx(p, large0 * 2, 0, flags) == large0 * 2);
|
||||||
if (did_split) {
|
if (did_split) {
|
||||||
assert_b_eq(did_decommit, did_commit,
|
assert_b_eq(did_decommit, did_commit,
|
||||||
"Expected decommit/commit match");
|
"Expected decommit/commit match");
|
||||||
}
|
}
|
||||||
if (xallocx_success_b && xallocx_success_c)
|
if (xallocx_success_b && xallocx_success_c) {
|
||||||
assert_true(did_merge, "Expected merge");
|
assert_true(did_merge, "Expected merge");
|
||||||
|
}
|
||||||
dallocx(p, flags);
|
dallocx(p, flags);
|
||||||
try_dalloc = true;
|
try_dalloc = true;
|
||||||
try_decommit = false;
|
try_decommit = false;
|
||||||
@ -86,8 +87,7 @@ test_extent_body(unsigned arena_ind)
|
|||||||
dallocx(p, flags);
|
dallocx(p, flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_extent_manual_hook)
|
TEST_BEGIN(test_extent_manual_hook) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
size_t old_size, new_size, sz;
|
size_t old_size, new_size, sz;
|
||||||
size_t hooks_mib[3];
|
size_t hooks_mib[3];
|
||||||
@ -155,8 +155,7 @@ TEST_BEGIN(test_extent_manual_hook)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_extent_auto_hook)
|
TEST_BEGIN(test_extent_auto_hook) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
size_t new_size, sz;
|
size_t new_size, sz;
|
||||||
extent_hooks_t *new_hooks;
|
extent_hooks_t *new_hooks;
|
||||||
@ -174,8 +173,7 @@ TEST_BEGIN(test_extent_auto_hook)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_extent_manual_hook,
|
test_extent_manual_hook,
|
||||||
test_extent_auto_hook));
|
test_extent_auto_hook));
|
||||||
|
@ -5,8 +5,7 @@ const char *malloc_conf = "junk:false";
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nsizes_impl(const char *cmd)
|
get_nsizes_impl(const char *cmd) {
|
||||||
{
|
|
||||||
unsigned ret;
|
unsigned ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
|
|
||||||
@ -18,14 +17,12 @@ get_nsizes_impl(const char *cmd)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nlarge(void)
|
get_nlarge(void) {
|
||||||
{
|
|
||||||
return (get_nsizes_impl("arenas.nlextents"));
|
return (get_nsizes_impl("arenas.nlextents"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_size_impl(const char *cmd, size_t ind)
|
get_size_impl(const char *cmd, size_t ind) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
@ -43,8 +40,7 @@ get_size_impl(const char *cmd, size_t ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_large_size(size_t ind)
|
get_large_size(size_t ind) {
|
||||||
{
|
|
||||||
return (get_size_impl("arenas.lextent.0.size", ind));
|
return (get_size_impl("arenas.lextent.0.size", ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,14 +50,12 @@ get_large_size(size_t ind)
|
|||||||
* potential OOM on e.g. 32-bit Windows.
|
* potential OOM on e.g. 32-bit Windows.
|
||||||
*/
|
*/
|
||||||
static void
|
static void
|
||||||
purge(void)
|
purge(void) {
|
||||||
{
|
|
||||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||||
"Unexpected mallctl error");
|
"Unexpected mallctl error");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_overflow)
|
TEST_BEGIN(test_overflow) {
|
||||||
{
|
|
||||||
size_t largemax;
|
size_t largemax;
|
||||||
|
|
||||||
largemax = get_large_size(get_nlarge()-1);
|
largemax = get_large_size(get_nlarge()-1);
|
||||||
@ -81,8 +75,7 @@ TEST_BEGIN(test_overflow)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_oom)
|
TEST_BEGIN(test_oom) {
|
||||||
{
|
|
||||||
size_t largemax;
|
size_t largemax;
|
||||||
bool oom;
|
bool oom;
|
||||||
void *ptrs[3];
|
void *ptrs[3];
|
||||||
@ -96,16 +89,18 @@ TEST_BEGIN(test_oom)
|
|||||||
oom = false;
|
oom = false;
|
||||||
for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
|
for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
|
||||||
ptrs[i] = mallocx(largemax, 0);
|
ptrs[i] = mallocx(largemax, 0);
|
||||||
if (ptrs[i] == NULL)
|
if (ptrs[i] == NULL) {
|
||||||
oom = true;
|
oom = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
assert_true(oom,
|
assert_true(oom,
|
||||||
"Expected OOM during series of calls to mallocx(size=%zu, 0)",
|
"Expected OOM during series of calls to mallocx(size=%zu, 0)",
|
||||||
largemax);
|
largemax);
|
||||||
for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
|
for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
|
||||||
if (ptrs[i] != NULL)
|
if (ptrs[i] != NULL) {
|
||||||
dallocx(ptrs[i], 0);
|
dallocx(ptrs[i], 0);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
purge();
|
purge();
|
||||||
|
|
||||||
#if LG_SIZEOF_PTR == 3
|
#if LG_SIZEOF_PTR == 3
|
||||||
@ -122,8 +117,7 @@ TEST_BEGIN(test_oom)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_basic)
|
TEST_BEGIN(test_basic) {
|
||||||
{
|
|
||||||
#define MAXSZ (((size_t)1) << 23)
|
#define MAXSZ (((size_t)1) << 23)
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
@ -160,16 +154,16 @@ TEST_BEGIN(test_basic)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_alignment_and_size)
|
TEST_BEGIN(test_alignment_and_size) {
|
||||||
{
|
|
||||||
#define MAXALIGN (((size_t)1) << 23)
|
#define MAXALIGN (((size_t)1) << 23)
|
||||||
#define NITER 4
|
#define NITER 4
|
||||||
size_t nsz, rsz, sz, alignment, total;
|
size_t nsz, rsz, sz, alignment, total;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
void *ps[NITER];
|
void *ps[NITER];
|
||||||
|
|
||||||
for (i = 0; i < NITER; i++)
|
for (i = 0; i < NITER; i++) {
|
||||||
ps[i] = NULL;
|
ps[i] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
for (alignment = 8;
|
for (alignment = 8;
|
||||||
alignment <= MAXALIGN;
|
alignment <= MAXALIGN;
|
||||||
@ -202,9 +196,10 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
" alignment=%zu, size=%zu", ps[i],
|
" alignment=%zu, size=%zu", ps[i],
|
||||||
alignment, sz);
|
alignment, sz);
|
||||||
total += rsz;
|
total += rsz;
|
||||||
if (total >= (MAXALIGN << 1))
|
if (total >= (MAXALIGN << 1)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (i = 0; i < NITER; i++) {
|
for (i = 0; i < NITER; i++) {
|
||||||
if (ps[i] != NULL) {
|
if (ps[i] != NULL) {
|
||||||
dallocx(ps[i], 0);
|
dallocx(ps[i], 0);
|
||||||
@ -220,8 +215,7 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_overflow,
|
test_overflow,
|
||||||
test_oom,
|
test_oom,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_overflow)
|
TEST_BEGIN(test_overflow) {
|
||||||
{
|
|
||||||
unsigned nlextents;
|
unsigned nlextents;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
size_t sz, miblen, max_size_class;
|
size_t sz, miblen, max_size_class;
|
||||||
@ -41,8 +40,7 @@ TEST_BEGIN(test_overflow)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_overflow));
|
test_overflow));
|
||||||
}
|
}
|
||||||
|
@ -8,14 +8,12 @@
|
|||||||
* potential OOM on e.g. 32-bit Windows.
|
* potential OOM on e.g. 32-bit Windows.
|
||||||
*/
|
*/
|
||||||
static void
|
static void
|
||||||
purge(void)
|
purge(void) {
|
||||||
{
|
|
||||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||||
"Unexpected mallctl error");
|
"Unexpected mallctl error");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_alignment_errors)
|
TEST_BEGIN(test_alignment_errors) {
|
||||||
{
|
|
||||||
size_t alignment;
|
size_t alignment;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -34,8 +32,7 @@ TEST_BEGIN(test_alignment_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_oom_errors)
|
TEST_BEGIN(test_oom_errors) {
|
||||||
{
|
|
||||||
size_t alignment, size;
|
size_t alignment, size;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -73,16 +70,16 @@ TEST_BEGIN(test_oom_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_alignment_and_size)
|
TEST_BEGIN(test_alignment_and_size) {
|
||||||
{
|
|
||||||
#define NITER 4
|
#define NITER 4
|
||||||
size_t alignment, size, total;
|
size_t alignment, size, total;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
int err;
|
int err;
|
||||||
void *ps[NITER];
|
void *ps[NITER];
|
||||||
|
|
||||||
for (i = 0; i < NITER; i++)
|
for (i = 0; i < NITER; i++) {
|
||||||
ps[i] = NULL;
|
ps[i] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
for (alignment = 8;
|
for (alignment = 8;
|
||||||
alignment <= MAXALIGN;
|
alignment <= MAXALIGN;
|
||||||
@ -104,9 +101,10 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
alignment, size, size, buf);
|
alignment, size, size, buf);
|
||||||
}
|
}
|
||||||
total += malloc_usable_size(ps[i]);
|
total += malloc_usable_size(ps[i]);
|
||||||
if (total >= (MAXALIGN << 1))
|
if (total >= (MAXALIGN << 1)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (i = 0; i < NITER; i++) {
|
for (i = 0; i < NITER; i++) {
|
||||||
if (ps[i] != NULL) {
|
if (ps[i] != NULL) {
|
||||||
free(ps[i]);
|
free(ps[i]);
|
||||||
@ -121,8 +119,7 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_alignment_errors,
|
test_alignment_errors,
|
||||||
test_oom_errors,
|
test_oom_errors,
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nsizes_impl(const char *cmd)
|
get_nsizes_impl(const char *cmd) {
|
||||||
{
|
|
||||||
unsigned ret;
|
unsigned ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
|
|
||||||
@ -14,14 +13,12 @@ get_nsizes_impl(const char *cmd)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nlarge(void)
|
get_nlarge(void) {
|
||||||
{
|
|
||||||
return (get_nsizes_impl("arenas.nlextents"));
|
return (get_nsizes_impl("arenas.nlextents"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_size_impl(const char *cmd, size_t ind)
|
get_size_impl(const char *cmd, size_t ind) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
@ -39,13 +36,11 @@ get_size_impl(const char *cmd, size_t ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_large_size(size_t ind)
|
get_large_size(size_t ind) {
|
||||||
{
|
|
||||||
return (get_size_impl("arenas.lextent.0.size", ind));
|
return (get_size_impl("arenas.lextent.0.size", ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_grow_and_shrink)
|
TEST_BEGIN(test_grow_and_shrink) {
|
||||||
{
|
|
||||||
void *p, *q;
|
void *p, *q;
|
||||||
size_t tsz;
|
size_t tsz;
|
||||||
#define NCYCLES 3
|
#define NCYCLES 3
|
||||||
@ -90,8 +85,7 @@ TEST_BEGIN(test_grow_and_shrink)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
validate_fill(const void *p, uint8_t c, size_t offset, size_t len)
|
validate_fill(const void *p, uint8_t c, size_t offset, size_t len) {
|
||||||
{
|
|
||||||
bool ret = false;
|
bool ret = false;
|
||||||
const uint8_t *buf = (const uint8_t *)p;
|
const uint8_t *buf = (const uint8_t *)p;
|
||||||
size_t i;
|
size_t i;
|
||||||
@ -109,8 +103,7 @@ validate_fill(const void *p, uint8_t c, size_t offset, size_t len)
|
|||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_zero)
|
TEST_BEGIN(test_zero) {
|
||||||
{
|
|
||||||
void *p, *q;
|
void *p, *q;
|
||||||
size_t psz, qsz, i, j;
|
size_t psz, qsz, i, j;
|
||||||
size_t start_sizes[] = {1, 3*1024, 63*1024, 4095*1024};
|
size_t start_sizes[] = {1, 3*1024, 63*1024, 4095*1024};
|
||||||
@ -154,8 +147,7 @@ TEST_BEGIN(test_zero)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_align)
|
TEST_BEGIN(test_align) {
|
||||||
{
|
|
||||||
void *p, *q;
|
void *p, *q;
|
||||||
size_t align;
|
size_t align;
|
||||||
#define MAX_ALIGN (ZU(1) << 25)
|
#define MAX_ALIGN (ZU(1) << 25)
|
||||||
@ -179,8 +171,7 @@ TEST_BEGIN(test_align)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_lg_align_and_zero)
|
TEST_BEGIN(test_lg_align_and_zero) {
|
||||||
{
|
|
||||||
void *p, *q;
|
void *p, *q;
|
||||||
unsigned lg_align;
|
unsigned lg_align;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
@ -217,8 +208,7 @@ TEST_BEGIN(test_lg_align_and_zero)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_overflow)
|
TEST_BEGIN(test_overflow) {
|
||||||
{
|
|
||||||
size_t largemax;
|
size_t largemax;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -245,8 +235,7 @@ TEST_BEGIN(test_overflow)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_grow_and_shrink,
|
test_grow_and_shrink,
|
||||||
test_zero,
|
test_zero,
|
||||||
|
@ -3,21 +3,20 @@
|
|||||||
#define MAXALIGN (((size_t)1) << 22)
|
#define MAXALIGN (((size_t)1) << 22)
|
||||||
#define NITER 3
|
#define NITER 3
|
||||||
|
|
||||||
TEST_BEGIN(test_basic)
|
TEST_BEGIN(test_basic) {
|
||||||
{
|
|
||||||
void *ptr = mallocx(64, 0);
|
void *ptr = mallocx(64, 0);
|
||||||
sdallocx(ptr, 64, 0);
|
sdallocx(ptr, 64, 0);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_alignment_and_size)
|
TEST_BEGIN(test_alignment_and_size) {
|
||||||
{
|
|
||||||
size_t nsz, sz, alignment, total;
|
size_t nsz, sz, alignment, total;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
void *ps[NITER];
|
void *ps[NITER];
|
||||||
|
|
||||||
for (i = 0; i < NITER; i++)
|
for (i = 0; i < NITER; i++) {
|
||||||
ps[i] = NULL;
|
ps[i] = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
for (alignment = 8;
|
for (alignment = 8;
|
||||||
alignment <= MAXALIGN;
|
alignment <= MAXALIGN;
|
||||||
@ -32,9 +31,10 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
ps[i] = mallocx(sz, MALLOCX_ALIGN(alignment) |
|
ps[i] = mallocx(sz, MALLOCX_ALIGN(alignment) |
|
||||||
MALLOCX_ZERO);
|
MALLOCX_ZERO);
|
||||||
total += nsz;
|
total += nsz;
|
||||||
if (total >= (MAXALIGN << 1))
|
if (total >= (MAXALIGN << 1)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (i = 0; i < NITER; i++) {
|
for (i = 0; i < NITER; i++) {
|
||||||
if (ps[i] != NULL) {
|
if (ps[i] != NULL) {
|
||||||
sdallocx(ps[i], sz,
|
sdallocx(ps[i], sz,
|
||||||
@ -48,8 +48,7 @@ TEST_BEGIN(test_alignment_and_size)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_basic,
|
test_basic,
|
||||||
test_alignment_and_size));
|
test_alignment_and_size));
|
||||||
|
@ -3,8 +3,7 @@
|
|||||||
#define NTHREADS 10
|
#define NTHREADS 10
|
||||||
|
|
||||||
void *
|
void *
|
||||||
thd_start(void *arg)
|
thd_start(void *arg) {
|
||||||
{
|
|
||||||
unsigned main_arena_ind = *(unsigned *)arg;
|
unsigned main_arena_ind = *(unsigned *)arg;
|
||||||
void *p;
|
void *p;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
@ -38,8 +37,7 @@ thd_start(void *arg)
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_thread_arena)
|
TEST_BEGIN(test_thread_arena) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
size_t size;
|
size_t size;
|
||||||
@ -73,8 +71,7 @@ TEST_BEGIN(test_thread_arena)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_thread_arena));
|
test_thread_arena));
|
||||||
}
|
}
|
||||||
|
@ -9,8 +9,7 @@ static const bool config_tcache =
|
|||||||
;
|
;
|
||||||
|
|
||||||
void *
|
void *
|
||||||
thd_start(void *arg)
|
thd_start(void *arg) {
|
||||||
{
|
|
||||||
int err;
|
int err;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
bool e0, e1;
|
bool e0, e1;
|
||||||
@ -84,14 +83,12 @@ label_ENOENT:
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_main_thread)
|
TEST_BEGIN(test_main_thread) {
|
||||||
{
|
|
||||||
thd_start(NULL);
|
thd_start(NULL);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_subthread)
|
TEST_BEGIN(test_subthread) {
|
||||||
{
|
|
||||||
thd_t thd;
|
thd_t thd;
|
||||||
|
|
||||||
thd_create(&thd, thd_start, NULL);
|
thd_create(&thd, thd_start, NULL);
|
||||||
@ -100,8 +97,7 @@ TEST_BEGIN(test_subthread)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
/* Run tests multiple times to check for bad interactions. */
|
/* Run tests multiple times to check for bad interactions. */
|
||||||
return (test(
|
return (test(
|
||||||
test_main_thread,
|
test_main_thread,
|
||||||
|
@ -10,8 +10,7 @@ const char *malloc_conf = "junk:false";
|
|||||||
* xallocx() would ordinarily be able to extend.
|
* xallocx() would ordinarily be able to extend.
|
||||||
*/
|
*/
|
||||||
static unsigned
|
static unsigned
|
||||||
arena_ind(void)
|
arena_ind(void) {
|
||||||
{
|
|
||||||
static unsigned ind = 0;
|
static unsigned ind = 0;
|
||||||
|
|
||||||
if (ind == 0) {
|
if (ind == 0) {
|
||||||
@ -23,8 +22,7 @@ arena_ind(void)
|
|||||||
return (ind);
|
return (ind);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_same_size)
|
TEST_BEGIN(test_same_size) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
size_t sz, tsz;
|
size_t sz, tsz;
|
||||||
|
|
||||||
@ -39,8 +37,7 @@ TEST_BEGIN(test_same_size)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_extra_no_move)
|
TEST_BEGIN(test_extra_no_move) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
size_t sz, tsz;
|
size_t sz, tsz;
|
||||||
|
|
||||||
@ -55,8 +52,7 @@ TEST_BEGIN(test_extra_no_move)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_no_move_fail)
|
TEST_BEGIN(test_no_move_fail) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
size_t sz, tsz;
|
size_t sz, tsz;
|
||||||
|
|
||||||
@ -72,8 +68,7 @@ TEST_BEGIN(test_no_move_fail)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nsizes_impl(const char *cmd)
|
get_nsizes_impl(const char *cmd) {
|
||||||
{
|
|
||||||
unsigned ret;
|
unsigned ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
|
|
||||||
@ -85,20 +80,17 @@ get_nsizes_impl(const char *cmd)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nsmall(void)
|
get_nsmall(void) {
|
||||||
{
|
|
||||||
return (get_nsizes_impl("arenas.nbins"));
|
return (get_nsizes_impl("arenas.nbins"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nlarge(void)
|
get_nlarge(void) {
|
||||||
{
|
|
||||||
return (get_nsizes_impl("arenas.nlextents"));
|
return (get_nsizes_impl("arenas.nlextents"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_size_impl(const char *cmd, size_t ind)
|
get_size_impl(const char *cmd, size_t ind) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
@ -116,19 +108,16 @@ get_size_impl(const char *cmd, size_t ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_small_size(size_t ind)
|
get_small_size(size_t ind) {
|
||||||
{
|
|
||||||
return (get_size_impl("arenas.bin.0.size", ind));
|
return (get_size_impl("arenas.bin.0.size", ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_large_size(size_t ind)
|
get_large_size(size_t ind) {
|
||||||
{
|
|
||||||
return (get_size_impl("arenas.lextent.0.size", ind));
|
return (get_size_impl("arenas.lextent.0.size", ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_size)
|
TEST_BEGIN(test_size) {
|
||||||
{
|
|
||||||
size_t small0, largemax;
|
size_t small0, largemax;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -157,8 +146,7 @@ TEST_BEGIN(test_size)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_size_extra_overflow)
|
TEST_BEGIN(test_size_extra_overflow) {
|
||||||
{
|
|
||||||
size_t small0, largemax;
|
size_t small0, largemax;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -189,8 +177,7 @@ TEST_BEGIN(test_size_extra_overflow)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_extra_small)
|
TEST_BEGIN(test_extra_small) {
|
||||||
{
|
|
||||||
size_t small0, small1, largemax;
|
size_t small0, small1, largemax;
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
@ -221,8 +208,7 @@ TEST_BEGIN(test_extra_small)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_extra_large)
|
TEST_BEGIN(test_extra_large) {
|
||||||
{
|
|
||||||
int flags = MALLOCX_ARENA(arena_ind());
|
int flags = MALLOCX_ARENA(arena_ind());
|
||||||
size_t smallmax, large1, large2, large3, largemax;
|
size_t smallmax, large1, large2, large3, largemax;
|
||||||
void *p;
|
void *p;
|
||||||
@ -292,8 +278,7 @@ TEST_BEGIN(test_extra_large)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
print_filled_extents(const void *p, uint8_t c, size_t len)
|
print_filled_extents(const void *p, uint8_t c, size_t len) {
|
||||||
{
|
|
||||||
const uint8_t *pc = (const uint8_t *)p;
|
const uint8_t *pc = (const uint8_t *)p;
|
||||||
size_t i, range0;
|
size_t i, range0;
|
||||||
uint8_t c0;
|
uint8_t c0;
|
||||||
@ -312,26 +297,26 @@ print_filled_extents(const void *p, uint8_t c, size_t len)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
validate_fill(const void *p, uint8_t c, size_t offset, size_t len)
|
validate_fill(const void *p, uint8_t c, size_t offset, size_t len) {
|
||||||
{
|
|
||||||
const uint8_t *pc = (const uint8_t *)p;
|
const uint8_t *pc = (const uint8_t *)p;
|
||||||
bool err;
|
bool err;
|
||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
for (i = offset, err = false; i < offset+len; i++) {
|
for (i = offset, err = false; i < offset+len; i++) {
|
||||||
if (pc[i] != c)
|
if (pc[i] != c) {
|
||||||
err = true;
|
err = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (err)
|
if (err) {
|
||||||
print_filled_extents(p, c, offset + len);
|
print_filled_extents(p, c, offset + len);
|
||||||
|
}
|
||||||
|
|
||||||
return (err);
|
return (err);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_zero(size_t szmin, size_t szmax)
|
test_zero(size_t szmin, size_t szmax) {
|
||||||
{
|
|
||||||
int flags = MALLOCX_ARENA(arena_ind()) | MALLOCX_ZERO;
|
int flags = MALLOCX_ARENA(arena_ind()) | MALLOCX_ZERO;
|
||||||
size_t sz, nsz;
|
size_t sz, nsz;
|
||||||
void *p;
|
void *p;
|
||||||
@ -378,8 +363,7 @@ test_zero(size_t szmin, size_t szmax)
|
|||||||
dallocx(p, flags);
|
dallocx(p, flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_zero_large)
|
TEST_BEGIN(test_zero_large) {
|
||||||
{
|
|
||||||
size_t large0, large1;
|
size_t large0, large1;
|
||||||
|
|
||||||
/* Get size classes. */
|
/* Get size classes. */
|
||||||
@ -391,8 +375,7 @@ TEST_BEGIN(test_zero_large)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_same_size,
|
test_same_size,
|
||||||
test_extra_no_move,
|
test_extra_no_move,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
void *
|
void *
|
||||||
btalloc(size_t size, unsigned bits)
|
btalloc(size_t size, unsigned bits) {
|
||||||
{
|
|
||||||
return (btalloc_0(size, bits));
|
return (btalloc_0(size, bits));
|
||||||
}
|
}
|
||||||
|
@ -5,8 +5,7 @@
|
|||||||
* time is guaranteed.
|
* time is guaranteed.
|
||||||
*/
|
*/
|
||||||
void
|
void
|
||||||
mq_nanosleep(unsigned ns)
|
mq_nanosleep(unsigned ns) {
|
||||||
{
|
|
||||||
assert(ns <= 1000*1000*1000);
|
assert(ns <= 1000*1000*1000);
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
|
@ -5,11 +5,12 @@
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
bool
|
bool
|
||||||
mtx_init(mtx_t *mtx)
|
mtx_init(mtx_t *mtx) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
if (!InitializeCriticalSectionAndSpinCount(&mtx->lock, _CRT_SPINCOUNT))
|
if (!InitializeCriticalSectionAndSpinCount(&mtx->lock,
|
||||||
|
_CRT_SPINCOUNT)) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
||||||
mtx->lock = OS_UNFAIR_LOCK_INIT;
|
mtx->lock = OS_UNFAIR_LOCK_INIT;
|
||||||
#elif (defined(JEMALLOC_OSSPIN))
|
#elif (defined(JEMALLOC_OSSPIN))
|
||||||
@ -17,8 +18,9 @@ mtx_init(mtx_t *mtx)
|
|||||||
#else
|
#else
|
||||||
pthread_mutexattr_t attr;
|
pthread_mutexattr_t attr;
|
||||||
|
|
||||||
if (pthread_mutexattr_init(&attr) != 0)
|
if (pthread_mutexattr_init(&attr) != 0) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_DEFAULT);
|
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_DEFAULT);
|
||||||
if (pthread_mutex_init(&mtx->lock, &attr) != 0) {
|
if (pthread_mutex_init(&mtx->lock, &attr) != 0) {
|
||||||
pthread_mutexattr_destroy(&attr);
|
pthread_mutexattr_destroy(&attr);
|
||||||
@ -30,8 +32,7 @@ mtx_init(mtx_t *mtx)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
mtx_fini(mtx_t *mtx)
|
mtx_fini(mtx_t *mtx) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
||||||
#elif (defined(JEMALLOC_OSSPIN))
|
#elif (defined(JEMALLOC_OSSPIN))
|
||||||
@ -41,8 +42,7 @@ mtx_fini(mtx_t *mtx)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
mtx_lock(mtx_t *mtx)
|
mtx_lock(mtx_t *mtx) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
EnterCriticalSection(&mtx->lock);
|
EnterCriticalSection(&mtx->lock);
|
||||||
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
||||||
@ -55,8 +55,7 @@ mtx_lock(mtx_t *mtx)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
mtx_unlock(mtx_t *mtx)
|
mtx_unlock(mtx_t *mtx) {
|
||||||
{
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
LeaveCriticalSection(&mtx->lock);
|
LeaveCriticalSection(&mtx->lock);
|
||||||
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
||||||
|
@ -7,8 +7,7 @@ static const char * test_name = "";
|
|||||||
|
|
||||||
JEMALLOC_FORMAT_PRINTF(1, 2)
|
JEMALLOC_FORMAT_PRINTF(1, 2)
|
||||||
void
|
void
|
||||||
test_skip(const char *format, ...)
|
test_skip(const char *format, ...) {
|
||||||
{
|
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
va_start(ap, format);
|
va_start(ap, format);
|
||||||
@ -20,8 +19,7 @@ test_skip(const char *format, ...)
|
|||||||
|
|
||||||
JEMALLOC_FORMAT_PRINTF(1, 2)
|
JEMALLOC_FORMAT_PRINTF(1, 2)
|
||||||
void
|
void
|
||||||
test_fail(const char *format, ...)
|
test_fail(const char *format, ...) {
|
||||||
{
|
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
va_start(ap, format);
|
va_start(ap, format);
|
||||||
@ -32,8 +30,7 @@ test_fail(const char *format, ...)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static const char *
|
static const char *
|
||||||
test_status_string(test_status_t test_status)
|
test_status_string(test_status_t test_status) {
|
||||||
{
|
|
||||||
switch (test_status) {
|
switch (test_status) {
|
||||||
case test_status_pass: return "pass";
|
case test_status_pass: return "pass";
|
||||||
case test_status_skip: return "skip";
|
case test_status_skip: return "skip";
|
||||||
@ -43,23 +40,20 @@ test_status_string(test_status_t test_status)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
p_test_init(const char *name)
|
p_test_init(const char *name) {
|
||||||
{
|
|
||||||
test_count++;
|
test_count++;
|
||||||
test_status = test_status_pass;
|
test_status = test_status_pass;
|
||||||
test_name = name;
|
test_name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
p_test_fini(void)
|
p_test_fini(void) {
|
||||||
{
|
|
||||||
test_counts[test_status]++;
|
test_counts[test_status]++;
|
||||||
malloc_printf("%s: %s\n", test_name, test_status_string(test_status));
|
malloc_printf("%s: %s\n", test_name, test_status_string(test_status));
|
||||||
}
|
}
|
||||||
|
|
||||||
static test_status_t
|
static test_status_t
|
||||||
p_test_impl(bool do_malloc_init, test_t *t, va_list ap)
|
p_test_impl(bool do_malloc_init, test_t *t, va_list ap) {
|
||||||
{
|
|
||||||
test_status_t ret;
|
test_status_t ret;
|
||||||
|
|
||||||
if (do_malloc_init) {
|
if (do_malloc_init) {
|
||||||
@ -78,9 +72,10 @@ p_test_impl(bool do_malloc_init, test_t *t, va_list ap)
|
|||||||
ret = test_status_pass;
|
ret = test_status_pass;
|
||||||
for (; t != NULL; t = va_arg(ap, test_t *)) {
|
for (; t != NULL; t = va_arg(ap, test_t *)) {
|
||||||
t();
|
t();
|
||||||
if (test_status > ret)
|
if (test_status > ret) {
|
||||||
ret = test_status;
|
ret = test_status;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
malloc_printf("--- %s: %u/%u, %s: %u/%u, %s: %u/%u ---\n",
|
malloc_printf("--- %s: %u/%u, %s: %u/%u, %s: %u/%u ---\n",
|
||||||
test_status_string(test_status_pass),
|
test_status_string(test_status_pass),
|
||||||
@ -94,8 +89,7 @@ p_test_impl(bool do_malloc_init, test_t *t, va_list ap)
|
|||||||
}
|
}
|
||||||
|
|
||||||
test_status_t
|
test_status_t
|
||||||
p_test(test_t *t, ...)
|
p_test(test_t *t, ...) {
|
||||||
{
|
|
||||||
test_status_t ret;
|
test_status_t ret;
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
@ -108,8 +102,7 @@ p_test(test_t *t, ...)
|
|||||||
}
|
}
|
||||||
|
|
||||||
test_status_t
|
test_status_t
|
||||||
p_test_no_malloc_init(test_t *t, ...)
|
p_test_no_malloc_init(test_t *t, ...) {
|
||||||
{
|
|
||||||
test_status_t ret;
|
test_status_t ret;
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
|
||||||
@ -122,8 +115,7 @@ p_test_no_malloc_init(test_t *t, ...)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
p_test_fail(const char *prefix, const char *message)
|
p_test_fail(const char *prefix, const char *message) {
|
||||||
{
|
|
||||||
malloc_cprintf(NULL, NULL, "%s%s\n", prefix, message);
|
malloc_cprintf(NULL, NULL, "%s%s\n", prefix, message);
|
||||||
test_status = test_status_fail;
|
test_status = test_status_fail;
|
||||||
}
|
}
|
||||||
|
@ -2,17 +2,16 @@
|
|||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
void
|
void
|
||||||
thd_create(thd_t *thd, void *(*proc)(void *), void *arg)
|
thd_create(thd_t *thd, void *(*proc)(void *), void *arg) {
|
||||||
{
|
|
||||||
LPTHREAD_START_ROUTINE routine = (LPTHREAD_START_ROUTINE)proc;
|
LPTHREAD_START_ROUTINE routine = (LPTHREAD_START_ROUTINE)proc;
|
||||||
*thd = CreateThread(NULL, 0, routine, arg, 0, NULL);
|
*thd = CreateThread(NULL, 0, routine, arg, 0, NULL);
|
||||||
if (*thd == NULL)
|
if (*thd == NULL) {
|
||||||
test_fail("Error in CreateThread()\n");
|
test_fail("Error in CreateThread()\n");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
thd_join(thd_t thd, void **ret)
|
thd_join(thd_t thd, void **ret) {
|
||||||
{
|
|
||||||
if (WaitForSingleObject(thd, INFINITE) == WAIT_OBJECT_0 && ret) {
|
if (WaitForSingleObject(thd, INFINITE) == WAIT_OBJECT_0 && ret) {
|
||||||
DWORD exit_code;
|
DWORD exit_code;
|
||||||
GetExitCodeThread(thd, (LPDWORD) &exit_code);
|
GetExitCodeThread(thd, (LPDWORD) &exit_code);
|
||||||
@ -22,15 +21,14 @@ thd_join(thd_t thd, void **ret)
|
|||||||
|
|
||||||
#else
|
#else
|
||||||
void
|
void
|
||||||
thd_create(thd_t *thd, void *(*proc)(void *), void *arg)
|
thd_create(thd_t *thd, void *(*proc)(void *), void *arg) {
|
||||||
{
|
if (pthread_create(thd, NULL, proc, arg) != 0) {
|
||||||
if (pthread_create(thd, NULL, proc, arg) != 0)
|
|
||||||
test_fail("Error in pthread_create()\n");
|
test_fail("Error in pthread_create()\n");
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
thd_join(thd_t thd, void **ret)
|
thd_join(thd_t thd, void **ret) {
|
||||||
{
|
|
||||||
pthread_join(thd, ret);
|
pthread_join(thd, ret);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -1,22 +1,19 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
void
|
void
|
||||||
timer_start(timedelta_t *timer)
|
timer_start(timedelta_t *timer) {
|
||||||
{
|
|
||||||
nstime_init(&timer->t0, 0);
|
nstime_init(&timer->t0, 0);
|
||||||
nstime_update(&timer->t0);
|
nstime_update(&timer->t0);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
timer_stop(timedelta_t *timer)
|
timer_stop(timedelta_t *timer) {
|
||||||
{
|
|
||||||
nstime_copy(&timer->t1, &timer->t0);
|
nstime_copy(&timer->t1, &timer->t0);
|
||||||
nstime_update(&timer->t1);
|
nstime_update(&timer->t1);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t
|
uint64_t
|
||||||
timer_usec(const timedelta_t *timer)
|
timer_usec(const timedelta_t *timer) {
|
||||||
{
|
|
||||||
nstime_t delta;
|
nstime_t delta;
|
||||||
|
|
||||||
nstime_copy(&delta, &timer->t1);
|
nstime_copy(&delta, &timer->t1);
|
||||||
@ -25,8 +22,7 @@ timer_usec(const timedelta_t *timer)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
timer_ratio(timedelta_t *a, timedelta_t *b, char *buf, size_t buflen)
|
timer_ratio(timedelta_t *a, timedelta_t *b, char *buf, size_t buflen) {
|
||||||
{
|
|
||||||
uint64_t t0 = timer_usec(a);
|
uint64_t t0 = timer_usec(a);
|
||||||
uint64_t t1 = timer_usec(b);
|
uint64_t t1 = timer_usec(b);
|
||||||
uint64_t mult;
|
uint64_t mult;
|
||||||
@ -36,11 +32,13 @@ timer_ratio(timedelta_t *a, timedelta_t *b, char *buf, size_t buflen)
|
|||||||
/* Whole. */
|
/* Whole. */
|
||||||
n = malloc_snprintf(&buf[i], buflen-i, "%"FMTu64, t0 / t1);
|
n = malloc_snprintf(&buf[i], buflen-i, "%"FMTu64, t0 / t1);
|
||||||
i += n;
|
i += n;
|
||||||
if (i >= buflen)
|
if (i >= buflen) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
mult = 1;
|
mult = 1;
|
||||||
for (j = 0; j < n; j++)
|
for (j = 0; j < n; j++) {
|
||||||
mult *= 10;
|
mult *= 10;
|
||||||
|
}
|
||||||
|
|
||||||
/* Decimal. */
|
/* Decimal. */
|
||||||
n = malloc_snprintf(&buf[i], buflen-i, ".");
|
n = malloc_snprintf(&buf[i], buflen-i, ".");
|
||||||
|
@ -2,22 +2,22 @@
|
|||||||
|
|
||||||
JEMALLOC_INLINE_C void
|
JEMALLOC_INLINE_C void
|
||||||
time_func(timedelta_t *timer, uint64_t nwarmup, uint64_t niter,
|
time_func(timedelta_t *timer, uint64_t nwarmup, uint64_t niter,
|
||||||
void (*func)(void))
|
void (*func)(void)) {
|
||||||
{
|
|
||||||
uint64_t i;
|
uint64_t i;
|
||||||
|
|
||||||
for (i = 0; i < nwarmup; i++)
|
for (i = 0; i < nwarmup; i++) {
|
||||||
func();
|
func();
|
||||||
|
}
|
||||||
timer_start(timer);
|
timer_start(timer);
|
||||||
for (i = 0; i < niter; i++)
|
for (i = 0; i < niter; i++) {
|
||||||
func();
|
func();
|
||||||
|
}
|
||||||
timer_stop(timer);
|
timer_stop(timer);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
compare_funcs(uint64_t nwarmup, uint64_t niter, const char *name_a,
|
compare_funcs(uint64_t nwarmup, uint64_t niter, const char *name_a,
|
||||||
void (*func_a), const char *name_b, void (*func_b))
|
void (*func_a), const char *name_b, void (*func_b)) {
|
||||||
{
|
|
||||||
timedelta_t timer_a, timer_b;
|
timedelta_t timer_a, timer_b;
|
||||||
char ratio_buf[6];
|
char ratio_buf[6];
|
||||||
void *p;
|
void *p;
|
||||||
@ -41,8 +41,7 @@ compare_funcs(uint64_t nwarmup, uint64_t niter, const char *name_a,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_free(void)
|
malloc_free(void) {
|
||||||
{
|
|
||||||
/* The compiler can optimize away free(malloc(1))! */
|
/* The compiler can optimize away free(malloc(1))! */
|
||||||
void *p = malloc(1);
|
void *p = malloc(1);
|
||||||
if (p == NULL) {
|
if (p == NULL) {
|
||||||
@ -53,8 +52,7 @@ malloc_free(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
mallocx_free(void)
|
mallocx_free(void) {
|
||||||
{
|
|
||||||
void *p = mallocx(1, 0);
|
void *p = mallocx(1, 0);
|
||||||
if (p == NULL) {
|
if (p == NULL) {
|
||||||
test_fail("Unexpected mallocx() failure");
|
test_fail("Unexpected mallocx() failure");
|
||||||
@ -63,16 +61,14 @@ mallocx_free(void)
|
|||||||
free(p);
|
free(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_malloc_vs_mallocx)
|
TEST_BEGIN(test_malloc_vs_mallocx) {
|
||||||
{
|
|
||||||
compare_funcs(10*1000*1000, 100*1000*1000, "malloc",
|
compare_funcs(10*1000*1000, 100*1000*1000, "malloc",
|
||||||
malloc_free, "mallocx", mallocx_free);
|
malloc_free, "mallocx", mallocx_free);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_dallocx(void)
|
malloc_dallocx(void) {
|
||||||
{
|
|
||||||
void *p = malloc(1);
|
void *p = malloc(1);
|
||||||
if (p == NULL) {
|
if (p == NULL) {
|
||||||
test_fail("Unexpected malloc() failure");
|
test_fail("Unexpected malloc() failure");
|
||||||
@ -82,8 +78,7 @@ malloc_dallocx(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_sdallocx(void)
|
malloc_sdallocx(void) {
|
||||||
{
|
|
||||||
void *p = malloc(1);
|
void *p = malloc(1);
|
||||||
if (p == NULL) {
|
if (p == NULL) {
|
||||||
test_fail("Unexpected malloc() failure");
|
test_fail("Unexpected malloc() failure");
|
||||||
@ -92,23 +87,20 @@ malloc_sdallocx(void)
|
|||||||
sdallocx(p, 1, 0);
|
sdallocx(p, 1, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_free_vs_dallocx)
|
TEST_BEGIN(test_free_vs_dallocx) {
|
||||||
{
|
|
||||||
compare_funcs(10*1000*1000, 100*1000*1000, "free", malloc_free,
|
compare_funcs(10*1000*1000, 100*1000*1000, "free", malloc_free,
|
||||||
"dallocx", malloc_dallocx);
|
"dallocx", malloc_dallocx);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_dallocx_vs_sdallocx)
|
TEST_BEGIN(test_dallocx_vs_sdallocx) {
|
||||||
{
|
|
||||||
compare_funcs(10*1000*1000, 100*1000*1000, "dallocx", malloc_dallocx,
|
compare_funcs(10*1000*1000, 100*1000*1000, "dallocx", malloc_dallocx,
|
||||||
"sdallocx", malloc_sdallocx);
|
"sdallocx", malloc_sdallocx);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_mus_free(void)
|
malloc_mus_free(void) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
p = malloc(1);
|
p = malloc(1);
|
||||||
@ -121,8 +113,7 @@ malloc_mus_free(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_sallocx_free(void)
|
malloc_sallocx_free(void) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
p = malloc(1);
|
p = malloc(1);
|
||||||
@ -130,21 +121,20 @@ malloc_sallocx_free(void)
|
|||||||
test_fail("Unexpected malloc() failure");
|
test_fail("Unexpected malloc() failure");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (sallocx(p, 0) < 1)
|
if (sallocx(p, 0) < 1) {
|
||||||
test_fail("Unexpected sallocx() failure");
|
test_fail("Unexpected sallocx() failure");
|
||||||
|
}
|
||||||
free(p);
|
free(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_mus_vs_sallocx)
|
TEST_BEGIN(test_mus_vs_sallocx) {
|
||||||
{
|
|
||||||
compare_funcs(10*1000*1000, 100*1000*1000, "malloc_usable_size",
|
compare_funcs(10*1000*1000, 100*1000*1000, "malloc_usable_size",
|
||||||
malloc_mus_free, "sallocx", malloc_sallocx_free);
|
malloc_mus_free, "sallocx", malloc_sallocx_free);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
malloc_nallocx_free(void)
|
malloc_nallocx_free(void) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
p = malloc(1);
|
p = malloc(1);
|
||||||
@ -152,21 +142,20 @@ malloc_nallocx_free(void)
|
|||||||
test_fail("Unexpected malloc() failure");
|
test_fail("Unexpected malloc() failure");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (nallocx(1, 0) < 1)
|
if (nallocx(1, 0) < 1) {
|
||||||
test_fail("Unexpected nallocx() failure");
|
test_fail("Unexpected nallocx() failure");
|
||||||
|
}
|
||||||
free(p);
|
free(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_sallocx_vs_nallocx)
|
TEST_BEGIN(test_sallocx_vs_nallocx) {
|
||||||
{
|
|
||||||
compare_funcs(10*1000*1000, 100*1000*1000, "sallocx",
|
compare_funcs(10*1000*1000, 100*1000*1000, "sallocx",
|
||||||
malloc_sallocx_free, "nallocx", malloc_nallocx_free);
|
malloc_sallocx_free, "nallocx", malloc_nallocx_free);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_malloc_vs_mallocx,
|
test_malloc_vs_mallocx,
|
||||||
test_free_vs_dallocx,
|
test_free_vs_dallocx,
|
||||||
|
@ -1449,8 +1449,7 @@ static const uint64_t init_by_array_64_expected[] = {
|
|||||||
KQU(15570163926716513029), KQU(13356980519185762498)
|
KQU(15570163926716513029), KQU(13356980519185762498)
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_gen_rand_32)
|
TEST_BEGIN(test_gen_rand_32) {
|
||||||
{
|
|
||||||
uint32_t array32[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
uint32_t array32[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
||||||
uint32_t array32_2[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
uint32_t array32_2[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
||||||
int i;
|
int i;
|
||||||
@ -1484,8 +1483,7 @@ TEST_BEGIN(test_gen_rand_32)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_by_array_32)
|
TEST_BEGIN(test_by_array_32) {
|
||||||
{
|
|
||||||
uint32_t array32[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
uint32_t array32[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
||||||
uint32_t array32_2[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
uint32_t array32_2[BLOCK_SIZE] JEMALLOC_ATTR(aligned(16));
|
||||||
int i;
|
int i;
|
||||||
@ -1520,8 +1518,7 @@ TEST_BEGIN(test_by_array_32)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_gen_rand_64)
|
TEST_BEGIN(test_gen_rand_64) {
|
||||||
{
|
|
||||||
uint64_t array64[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
uint64_t array64[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
||||||
uint64_t array64_2[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
uint64_t array64_2[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
||||||
int i;
|
int i;
|
||||||
@ -1556,8 +1553,7 @@ TEST_BEGIN(test_gen_rand_64)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_by_array_64)
|
TEST_BEGIN(test_by_array_64) {
|
||||||
{
|
|
||||||
uint64_t array64[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
uint64_t array64[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
||||||
uint64_t array64_2[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
uint64_t array64_2[BLOCK_SIZE64] JEMALLOC_ATTR(aligned(16));
|
||||||
int i;
|
int i;
|
||||||
@ -1594,8 +1590,7 @@ TEST_BEGIN(test_by_array_64)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_gen_rand_32,
|
test_gen_rand_32,
|
||||||
test_by_array_32,
|
test_by_array_32,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_a0)
|
TEST_BEGIN(test_a0) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
p = a0malloc(1);
|
p = a0malloc(1);
|
||||||
@ -11,8 +10,7 @@ TEST_BEGIN(test_a0)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test_no_malloc_init(
|
return (test_no_malloc_init(
|
||||||
test_a0));
|
test_a0));
|
||||||
}
|
}
|
||||||
|
@ -5,8 +5,7 @@
|
|||||||
#include "test/extent_hooks.h"
|
#include "test/extent_hooks.h"
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nsizes_impl(const char *cmd)
|
get_nsizes_impl(const char *cmd) {
|
||||||
{
|
|
||||||
unsigned ret;
|
unsigned ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
|
|
||||||
@ -18,20 +17,17 @@ get_nsizes_impl(const char *cmd)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nsmall(void)
|
get_nsmall(void) {
|
||||||
{
|
|
||||||
return (get_nsizes_impl("arenas.nbins"));
|
return (get_nsizes_impl("arenas.nbins"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
get_nlarge(void)
|
get_nlarge(void) {
|
||||||
{
|
|
||||||
return (get_nsizes_impl("arenas.nlextents"));
|
return (get_nsizes_impl("arenas.nlextents"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_size_impl(const char *cmd, size_t ind)
|
get_size_impl(const char *cmd, size_t ind) {
|
||||||
{
|
|
||||||
size_t ret;
|
size_t ret;
|
||||||
size_t z;
|
size_t z;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
@ -49,35 +45,33 @@ get_size_impl(const char *cmd, size_t ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_small_size(size_t ind)
|
get_small_size(size_t ind) {
|
||||||
{
|
|
||||||
return (get_size_impl("arenas.bin.0.size", ind));
|
return (get_size_impl("arenas.bin.0.size", ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
get_large_size(size_t ind)
|
get_large_size(size_t ind) {
|
||||||
{
|
|
||||||
return (get_size_impl("arenas.lextent.0.size", ind));
|
return (get_size_impl("arenas.lextent.0.size", ind));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Like ivsalloc(), but safe to call on discarded allocations. */
|
/* Like ivsalloc(), but safe to call on discarded allocations. */
|
||||||
static size_t
|
static size_t
|
||||||
vsalloc(tsdn_t *tsdn, const void *ptr)
|
vsalloc(tsdn_t *tsdn, const void *ptr) {
|
||||||
{
|
|
||||||
extent_t *extent;
|
extent_t *extent;
|
||||||
|
|
||||||
extent = extent_lookup(tsdn, ptr, false);
|
extent = extent_lookup(tsdn, ptr, false);
|
||||||
if (extent == NULL)
|
if (extent == NULL) {
|
||||||
return (0);
|
return (0);
|
||||||
if (!extent_active_get(extent))
|
}
|
||||||
|
if (!extent_active_get(extent)) {
|
||||||
return (0);
|
return (0);
|
||||||
|
}
|
||||||
|
|
||||||
return (isalloc(tsdn, extent, ptr));
|
return (isalloc(tsdn, extent, ptr));
|
||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
do_arena_create(extent_hooks_t *h)
|
do_arena_create(extent_hooks_t *h) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
size_t sz = sizeof(unsigned);
|
size_t sz = sizeof(unsigned);
|
||||||
assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz,
|
assert_d_eq(mallctl("arenas.create", (void *)&arena_ind, &sz,
|
||||||
@ -87,8 +81,7 @@ do_arena_create(extent_hooks_t *h)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
do_arena_reset_pre(unsigned arena_ind, void ***ptrs, unsigned *nptrs)
|
do_arena_reset_pre(unsigned arena_ind, void ***ptrs, unsigned *nptrs) {
|
||||||
{
|
|
||||||
#define NLARGE 32
|
#define NLARGE 32
|
||||||
unsigned nsmall, nlarge, i;
|
unsigned nsmall, nlarge, i;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
@ -127,8 +120,7 @@ do_arena_reset_pre(unsigned arena_ind, void ***ptrs, unsigned *nptrs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
do_arena_reset_post(void **ptrs, unsigned nptrs)
|
do_arena_reset_post(void **ptrs, unsigned nptrs) {
|
||||||
{
|
|
||||||
tsdn_t *tsdn;
|
tsdn_t *tsdn;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -144,8 +136,7 @@ do_arena_reset_post(void **ptrs, unsigned nptrs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
do_arena_reset_destroy(const char *name, unsigned arena_ind)
|
do_arena_reset_destroy(const char *name, unsigned arena_ind) {
|
||||||
{
|
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
size_t miblen;
|
size_t miblen;
|
||||||
|
|
||||||
@ -158,19 +149,16 @@ do_arena_reset_destroy(const char *name, unsigned arena_ind)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
do_arena_reset(unsigned arena_ind)
|
do_arena_reset(unsigned arena_ind) {
|
||||||
{
|
|
||||||
do_arena_reset_destroy("arena.0.reset", arena_ind);
|
do_arena_reset_destroy("arena.0.reset", arena_ind);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
do_arena_destroy(unsigned arena_ind)
|
do_arena_destroy(unsigned arena_ind) {
|
||||||
{
|
|
||||||
do_arena_reset_destroy("arena.0.destroy", arena_ind);
|
do_arena_reset_destroy("arena.0.destroy", arena_ind);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_reset)
|
TEST_BEGIN(test_arena_reset) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
void **ptrs;
|
void **ptrs;
|
||||||
unsigned nptrs;
|
unsigned nptrs;
|
||||||
@ -183,8 +171,7 @@ TEST_BEGIN(test_arena_reset)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
arena_i_initialized(unsigned arena_ind, bool refresh)
|
arena_i_initialized(unsigned arena_ind, bool refresh) {
|
||||||
{
|
|
||||||
bool initialized;
|
bool initialized;
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
size_t miblen, sz;
|
size_t miblen, sz;
|
||||||
@ -206,15 +193,13 @@ arena_i_initialized(unsigned arena_ind, bool refresh)
|
|||||||
return (initialized);
|
return (initialized);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_destroy_initial)
|
TEST_BEGIN(test_arena_destroy_initial) {
|
||||||
{
|
|
||||||
assert_false(arena_i_initialized(MALLCTL_ARENAS_DESTROYED, false),
|
assert_false(arena_i_initialized(MALLCTL_ARENAS_DESTROYED, false),
|
||||||
"Destroyed arena stats should not be initialized");
|
"Destroyed arena stats should not be initialized");
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_destroy_hooks_default)
|
TEST_BEGIN(test_arena_destroy_hooks_default) {
|
||||||
{
|
|
||||||
unsigned arena_ind, arena_ind_another, arena_ind_prev;
|
unsigned arena_ind, arena_ind_another, arena_ind_prev;
|
||||||
void **ptrs;
|
void **ptrs;
|
||||||
unsigned nptrs;
|
unsigned nptrs;
|
||||||
@ -260,8 +245,7 @@ TEST_END
|
|||||||
*/
|
*/
|
||||||
static bool
|
static bool
|
||||||
extent_dalloc_unmap(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
extent_dalloc_unmap(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
||||||
bool committed, unsigned arena_ind)
|
bool committed, unsigned arena_ind) {
|
||||||
{
|
|
||||||
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, committed=%s, "
|
TRACE_HOOK("%s(extent_hooks=%p, addr=%p, size=%zu, committed=%s, "
|
||||||
"arena_ind=%u)\n", __func__, extent_hooks, addr, size, committed ?
|
"arena_ind=%u)\n", __func__, extent_hooks, addr, size, committed ?
|
||||||
"true" : "false", arena_ind);
|
"true" : "false", arena_ind);
|
||||||
@ -270,8 +254,9 @@ extent_dalloc_unmap(extent_hooks_t *extent_hooks, void *addr, size_t size,
|
|||||||
assert_ptr_eq(extent_hooks->dalloc, extent_dalloc_unmap,
|
assert_ptr_eq(extent_hooks->dalloc, extent_dalloc_unmap,
|
||||||
"Wrong hook function");
|
"Wrong hook function");
|
||||||
called_dalloc = true;
|
called_dalloc = true;
|
||||||
if (!try_dalloc)
|
if (!try_dalloc) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
pages_unmap(addr, size);
|
pages_unmap(addr, size);
|
||||||
did_dalloc = true;
|
did_dalloc = true;
|
||||||
return (false);
|
return (false);
|
||||||
@ -290,8 +275,7 @@ static extent_hooks_t hooks_unmap = {
|
|||||||
extent_merge_hook
|
extent_merge_hook
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_destroy_hooks_unmap)
|
TEST_BEGIN(test_arena_destroy_hooks_unmap) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
void **ptrs;
|
void **ptrs;
|
||||||
unsigned nptrs;
|
unsigned nptrs;
|
||||||
@ -328,8 +312,7 @@ TEST_BEGIN(test_arena_destroy_hooks_unmap)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_arena_reset,
|
test_arena_reset,
|
||||||
test_arena_destroy_initial,
|
test_arena_destroy_initial,
|
||||||
|
@ -66,8 +66,7 @@ typedef struct p##_test_s p##_test_t;
|
|||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
TEST_STRUCT(u64, uint64_t)
|
TEST_STRUCT(u64, uint64_t)
|
||||||
TEST_BEGIN(test_atomic_u64)
|
TEST_BEGIN(test_atomic_u64) {
|
||||||
{
|
|
||||||
#if !(LG_SIZEOF_PTR == 3 || LG_SIZEOF_INT == 3)
|
#if !(LG_SIZEOF_PTR == 3 || LG_SIZEOF_INT == 3)
|
||||||
test_skip("64-bit atomic operations not supported");
|
test_skip("64-bit atomic operations not supported");
|
||||||
#else
|
#else
|
||||||
@ -77,36 +76,31 @@ TEST_BEGIN(test_atomic_u64)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_STRUCT(u32, uint32_t)
|
TEST_STRUCT(u32, uint32_t)
|
||||||
TEST_BEGIN(test_atomic_u32)
|
TEST_BEGIN(test_atomic_u32) {
|
||||||
{
|
|
||||||
TEST_BODY(u32, uint32_t, uint32_t, u32, "#"FMTx32);
|
TEST_BODY(u32, uint32_t, uint32_t, u32, "#"FMTx32);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_STRUCT(p, void *)
|
TEST_STRUCT(p, void *)
|
||||||
TEST_BEGIN(test_atomic_p)
|
TEST_BEGIN(test_atomic_p) {
|
||||||
{
|
|
||||||
TEST_BODY(p, void *, uintptr_t, ptr, "p");
|
TEST_BODY(p, void *, uintptr_t, ptr, "p");
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_STRUCT(zu, size_t)
|
TEST_STRUCT(zu, size_t)
|
||||||
TEST_BEGIN(test_atomic_zu)
|
TEST_BEGIN(test_atomic_zu) {
|
||||||
{
|
|
||||||
TEST_BODY(zu, size_t, size_t, zu, "#zx");
|
TEST_BODY(zu, size_t, size_t, zu, "#zx");
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_STRUCT(u, unsigned)
|
TEST_STRUCT(u, unsigned)
|
||||||
TEST_BEGIN(test_atomic_u)
|
TEST_BEGIN(test_atomic_u) {
|
||||||
{
|
|
||||||
TEST_BODY(u, unsigned, unsigned, u, "#x");
|
TEST_BODY(u, unsigned, unsigned, u, "#x");
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_atomic_u64,
|
test_atomic_u64,
|
||||||
test_atomic_u32,
|
test_atomic_u32,
|
||||||
|
@ -24,8 +24,7 @@ static extent_hooks_t hooks_not_null = {
|
|||||||
NULL /* merge */
|
NULL /* merge */
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_base_hooks_default)
|
TEST_BEGIN(test_base_hooks_default) {
|
||||||
{
|
|
||||||
tsdn_t *tsdn;
|
tsdn_t *tsdn;
|
||||||
base_t *base;
|
base_t *base;
|
||||||
size_t allocated0, allocated1, resident, mapped;
|
size_t allocated0, allocated1, resident, mapped;
|
||||||
@ -52,8 +51,7 @@ TEST_BEGIN(test_base_hooks_default)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_base_hooks_null)
|
TEST_BEGIN(test_base_hooks_null) {
|
||||||
{
|
|
||||||
extent_hooks_t hooks_orig;
|
extent_hooks_t hooks_orig;
|
||||||
tsdn_t *tsdn;
|
tsdn_t *tsdn;
|
||||||
base_t *base;
|
base_t *base;
|
||||||
@ -92,8 +90,7 @@ TEST_BEGIN(test_base_hooks_null)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_base_hooks_not_null)
|
TEST_BEGIN(test_base_hooks_not_null) {
|
||||||
{
|
|
||||||
extent_hooks_t hooks_orig;
|
extent_hooks_t hooks_orig;
|
||||||
tsdn_t *tsdn;
|
tsdn_t *tsdn;
|
||||||
base_t *base;
|
base_t *base;
|
||||||
@ -214,8 +211,7 @@ TEST_BEGIN(test_base_hooks_not_null)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_base_hooks_default,
|
test_base_hooks_default,
|
||||||
test_base_hooks_null,
|
test_base_hooks_null,
|
||||||
|
@ -93,8 +93,7 @@
|
|||||||
NB(16384) \
|
NB(16384) \
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_bitmap_initializer_body(const bitmap_info_t *binfo, size_t nbits)
|
test_bitmap_initializer_body(const bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
bitmap_info_t binfo_dyn;
|
bitmap_info_t binfo_dyn;
|
||||||
bitmap_info_init(&binfo_dyn, nbits);
|
bitmap_info_init(&binfo_dyn, nbits);
|
||||||
|
|
||||||
@ -124,8 +123,7 @@ test_bitmap_initializer_body(const bitmap_info_t *binfo, size_t nbits)
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_bitmap_initializer)
|
TEST_BEGIN(test_bitmap_initializer) {
|
||||||
{
|
|
||||||
#define NB(nbits) { \
|
#define NB(nbits) { \
|
||||||
if (nbits <= BITMAP_MAXBITS) { \
|
if (nbits <= BITMAP_MAXBITS) { \
|
||||||
bitmap_info_t binfo = \
|
bitmap_info_t binfo = \
|
||||||
@ -140,8 +138,7 @@ TEST_END
|
|||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
test_bitmap_size_body(const bitmap_info_t *binfo, size_t nbits,
|
test_bitmap_size_body(const bitmap_info_t *binfo, size_t nbits,
|
||||||
size_t prev_size)
|
size_t prev_size) {
|
||||||
{
|
|
||||||
size_t size = bitmap_size(binfo);
|
size_t size = bitmap_size(binfo);
|
||||||
assert_zu_ge(size, (nbits >> 3),
|
assert_zu_ge(size, (nbits >> 3),
|
||||||
"Bitmap size is smaller than expected");
|
"Bitmap size is smaller than expected");
|
||||||
@ -149,8 +146,7 @@ test_bitmap_size_body(const bitmap_info_t *binfo, size_t nbits,
|
|||||||
return (size);
|
return (size);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_bitmap_size)
|
TEST_BEGIN(test_bitmap_size) {
|
||||||
{
|
|
||||||
size_t nbits, prev_size;
|
size_t nbits, prev_size;
|
||||||
|
|
||||||
prev_size = 0;
|
prev_size = 0;
|
||||||
@ -171,8 +167,7 @@ TEST_BEGIN(test_bitmap_size)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_bitmap_init_body(const bitmap_info_t *binfo, size_t nbits)
|
test_bitmap_init_body(const bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
||||||
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
||||||
@ -185,8 +180,7 @@ test_bitmap_init_body(const bitmap_info_t *binfo, size_t nbits)
|
|||||||
free(bitmap);
|
free(bitmap);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_bitmap_init)
|
TEST_BEGIN(test_bitmap_init) {
|
||||||
{
|
|
||||||
size_t nbits;
|
size_t nbits;
|
||||||
|
|
||||||
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
||||||
@ -204,21 +198,20 @@ TEST_BEGIN(test_bitmap_init)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_bitmap_set_body(const bitmap_info_t *binfo, size_t nbits)
|
test_bitmap_set_body(const bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
||||||
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
||||||
bitmap_init(bitmap, binfo);
|
bitmap_init(bitmap, binfo);
|
||||||
|
|
||||||
for (i = 0; i < nbits; i++)
|
for (i = 0; i < nbits; i++) {
|
||||||
bitmap_set(bitmap, binfo, i);
|
bitmap_set(bitmap, binfo, i);
|
||||||
|
}
|
||||||
assert_true(bitmap_full(bitmap, binfo), "All bits should be set");
|
assert_true(bitmap_full(bitmap, binfo), "All bits should be set");
|
||||||
free(bitmap);
|
free(bitmap);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_bitmap_set)
|
TEST_BEGIN(test_bitmap_set) {
|
||||||
{
|
|
||||||
size_t nbits;
|
size_t nbits;
|
||||||
|
|
||||||
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
||||||
@ -236,26 +229,27 @@ TEST_BEGIN(test_bitmap_set)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_bitmap_unset_body(const bitmap_info_t *binfo, size_t nbits)
|
test_bitmap_unset_body(const bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
||||||
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
||||||
bitmap_init(bitmap, binfo);
|
bitmap_init(bitmap, binfo);
|
||||||
|
|
||||||
for (i = 0; i < nbits; i++)
|
for (i = 0; i < nbits; i++) {
|
||||||
bitmap_set(bitmap, binfo, i);
|
bitmap_set(bitmap, binfo, i);
|
||||||
|
}
|
||||||
assert_true(bitmap_full(bitmap, binfo), "All bits should be set");
|
assert_true(bitmap_full(bitmap, binfo), "All bits should be set");
|
||||||
for (i = 0; i < nbits; i++)
|
for (i = 0; i < nbits; i++) {
|
||||||
bitmap_unset(bitmap, binfo, i);
|
bitmap_unset(bitmap, binfo, i);
|
||||||
for (i = 0; i < nbits; i++)
|
}
|
||||||
|
for (i = 0; i < nbits; i++) {
|
||||||
bitmap_set(bitmap, binfo, i);
|
bitmap_set(bitmap, binfo, i);
|
||||||
|
}
|
||||||
assert_true(bitmap_full(bitmap, binfo), "All bits should be set");
|
assert_true(bitmap_full(bitmap, binfo), "All bits should be set");
|
||||||
free(bitmap);
|
free(bitmap);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_bitmap_unset)
|
TEST_BEGIN(test_bitmap_unset) {
|
||||||
{
|
|
||||||
size_t nbits;
|
size_t nbits;
|
||||||
|
|
||||||
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
||||||
@ -273,8 +267,7 @@ TEST_BEGIN(test_bitmap_unset)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_bitmap_sfu_body(const bitmap_info_t *binfo, size_t nbits)
|
test_bitmap_sfu_body(const bitmap_info_t *binfo, size_t nbits) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
bitmap_t *bitmap = (bitmap_t *)malloc(bitmap_size(binfo));
|
||||||
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
assert_ptr_not_null(bitmap, "Unexpected malloc() failure");
|
||||||
@ -317,8 +310,7 @@ test_bitmap_sfu_body(const bitmap_info_t *binfo, size_t nbits)
|
|||||||
free(bitmap);
|
free(bitmap);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_bitmap_sfu)
|
TEST_BEGIN(test_bitmap_sfu) {
|
||||||
{
|
|
||||||
size_t nbits;
|
size_t nbits;
|
||||||
|
|
||||||
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
for (nbits = 1; nbits <= BITMAP_MAXBITS; nbits++) {
|
||||||
@ -336,8 +328,7 @@ TEST_BEGIN(test_bitmap_sfu)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_bitmap_initializer,
|
test_bitmap_initializer,
|
||||||
test_bitmap_size,
|
test_bitmap_size,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_new_delete)
|
TEST_BEGIN(test_new_delete) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
ckh_t ckh;
|
ckh_t ckh;
|
||||||
|
|
||||||
@ -17,8 +16,7 @@ TEST_BEGIN(test_new_delete)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_count_insert_search_remove)
|
TEST_BEGIN(test_count_insert_search_remove) {
|
||||||
{
|
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
ckh_t ckh;
|
ckh_t ckh;
|
||||||
const char *strs[] = {
|
const char *strs[] = {
|
||||||
@ -105,8 +103,7 @@ TEST_BEGIN(test_count_insert_search_remove)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_insert_iter_remove)
|
TEST_BEGIN(test_insert_iter_remove) {
|
||||||
{
|
|
||||||
#define NITEMS ZU(1000)
|
#define NITEMS ZU(1000)
|
||||||
tsd_t *tsd;
|
tsd_t *tsd;
|
||||||
ckh_t ckh;
|
ckh_t ckh;
|
||||||
@ -174,12 +171,14 @@ TEST_BEGIN(test_insert_iter_remove)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (j = 0; j < i + 1; j++)
|
for (j = 0; j < i + 1; j++) {
|
||||||
assert_true(seen[j], "Item %zu not seen", j);
|
assert_true(seen[j], "Item %zu not seen", j);
|
||||||
for (; j < NITEMS; j++)
|
}
|
||||||
|
for (; j < NITEMS; j++) {
|
||||||
assert_false(seen[j], "Item %zu seen", j);
|
assert_false(seen[j], "Item %zu seen", j);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (i = 0; i < NITEMS; i++) {
|
for (i = 0; i < NITEMS; i++) {
|
||||||
assert_false(ckh_search(&ckh, p[i], NULL, NULL),
|
assert_false(ckh_search(&ckh, p[i], NULL, NULL),
|
||||||
@ -204,8 +203,7 @@ TEST_BEGIN(test_insert_iter_remove)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_new_delete,
|
test_new_delete,
|
||||||
test_count_insert_search_remove,
|
test_count_insert_search_remove,
|
||||||
|
@ -10,22 +10,20 @@ static nstime_t time_mock;
|
|||||||
static bool monotonic_mock;
|
static bool monotonic_mock;
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
nstime_monotonic_mock(void)
|
nstime_monotonic_mock(void) {
|
||||||
{
|
|
||||||
return (monotonic_mock);
|
return (monotonic_mock);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
nstime_update_mock(nstime_t *time)
|
nstime_update_mock(nstime_t *time) {
|
||||||
{
|
|
||||||
nupdates_mock++;
|
nupdates_mock++;
|
||||||
if (monotonic_mock)
|
if (monotonic_mock) {
|
||||||
nstime_copy(time, &time_mock);
|
nstime_copy(time, &time_mock);
|
||||||
|
}
|
||||||
return (!monotonic_mock);
|
return (!monotonic_mock);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_decay_ticks)
|
TEST_BEGIN(test_decay_ticks) {
|
||||||
{
|
|
||||||
ticker_t *decay_ticker;
|
ticker_t *decay_ticker;
|
||||||
unsigned tick0, tick1;
|
unsigned tick0, tick1;
|
||||||
size_t sz, large0;
|
size_t sz, large0;
|
||||||
@ -197,8 +195,7 @@ TEST_BEGIN(test_decay_ticks)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_decay_ticker)
|
TEST_BEGIN(test_decay_ticker) {
|
||||||
{
|
|
||||||
#define NPS 1024
|
#define NPS 1024
|
||||||
int flags = (MALLOCX_ARENA(0) | MALLOCX_TCACHE_NONE);
|
int flags = (MALLOCX_ARENA(0) | MALLOCX_TCACHE_NONE);
|
||||||
void *ps[NPS];
|
void *ps[NPS];
|
||||||
@ -284,14 +281,14 @@ TEST_BEGIN(test_decay_ticker)
|
|||||||
nstime_update(&time);
|
nstime_update(&time);
|
||||||
} while (nstime_compare(&time, &deadline) <= 0 && npurge1 == npurge0);
|
} while (nstime_compare(&time, &deadline) <= 0 && npurge1 == npurge0);
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats) {
|
||||||
assert_u64_gt(npurge1, npurge0, "Expected purging to occur");
|
assert_u64_gt(npurge1, npurge0, "Expected purging to occur");
|
||||||
|
}
|
||||||
#undef NPS
|
#undef NPS
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_decay_nonmonotonic)
|
TEST_BEGIN(test_decay_nonmonotonic) {
|
||||||
{
|
|
||||||
#define NPS (SMOOTHSTEP_NSTEPS + 1)
|
#define NPS (SMOOTHSTEP_NSTEPS + 1)
|
||||||
int flags = (MALLOCX_ARENA(0) | MALLOCX_TCACHE_NONE);
|
int flags = (MALLOCX_ARENA(0) | MALLOCX_TCACHE_NONE);
|
||||||
void *ps[NPS];
|
void *ps[NPS];
|
||||||
@ -343,8 +340,9 @@ TEST_BEGIN(test_decay_nonmonotonic)
|
|||||||
assert_d_eq(mallctl("stats.arenas.0.npurge", (void *)&npurge1, &sz,
|
assert_d_eq(mallctl("stats.arenas.0.npurge", (void *)&npurge1, &sz,
|
||||||
NULL, 0), config_stats ? 0 : ENOENT, "Unexpected mallctl result");
|
NULL, 0), config_stats ? 0 : ENOENT, "Unexpected mallctl result");
|
||||||
|
|
||||||
if (config_stats)
|
if (config_stats) {
|
||||||
assert_u64_eq(npurge0, npurge1, "Unexpected purging occurred");
|
assert_u64_eq(npurge0, npurge1, "Unexpected purging occurred");
|
||||||
|
}
|
||||||
|
|
||||||
nstime_monotonic = nstime_monotonic_orig;
|
nstime_monotonic = nstime_monotonic_orig;
|
||||||
nstime_update = nstime_update_orig;
|
nstime_update = nstime_update_orig;
|
||||||
@ -353,8 +351,7 @@ TEST_BEGIN(test_decay_nonmonotonic)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_decay_ticks,
|
test_decay_ticks,
|
||||||
test_decay_ticker,
|
test_decay_ticker,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_small_extent_size)
|
TEST_BEGIN(test_small_extent_size) {
|
||||||
{
|
|
||||||
unsigned nbins, i;
|
unsigned nbins, i;
|
||||||
size_t sz, extent_size;
|
size_t sz, extent_size;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
@ -35,8 +34,7 @@ TEST_BEGIN(test_small_extent_size)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_large_extent_size)
|
TEST_BEGIN(test_large_extent_size) {
|
||||||
{
|
|
||||||
bool cache_oblivious;
|
bool cache_oblivious;
|
||||||
unsigned nlextents, i;
|
unsigned nlextents, i;
|
||||||
size_t sz, extent_size_prev, ceil_prev;
|
size_t sz, extent_size_prev, ceil_prev;
|
||||||
@ -100,8 +98,7 @@ TEST_BEGIN(test_large_extent_size)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_monotonic)
|
TEST_BEGIN(test_monotonic) {
|
||||||
{
|
|
||||||
#define SZ_MAX ZU(4 * 1024 * 1024)
|
#define SZ_MAX ZU(4 * 1024 * 1024)
|
||||||
unsigned i;
|
unsigned i;
|
||||||
size_t floor_prev, ceil_prev;
|
size_t floor_prev, ceil_prev;
|
||||||
@ -136,8 +133,7 @@ TEST_BEGIN(test_monotonic)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_small_extent_size,
|
test_small_extent_size,
|
||||||
test_large_extent_size,
|
test_large_extent_size,
|
||||||
|
@ -4,8 +4,7 @@
|
|||||||
#include <sys/wait.h>
|
#include <sys/wait.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
TEST_BEGIN(test_fork)
|
TEST_BEGIN(test_fork) {
|
||||||
{
|
|
||||||
#ifndef _WIN32
|
#ifndef _WIN32
|
||||||
void *p;
|
void *p;
|
||||||
pid_t pid;
|
pid_t pid;
|
||||||
@ -32,8 +31,9 @@ TEST_BEGIN(test_fork)
|
|||||||
|
|
||||||
/* Parent. */
|
/* Parent. */
|
||||||
while (true) {
|
while (true) {
|
||||||
if (waitpid(pid, &status, 0) == -1)
|
if (waitpid(pid, &status, 0) == -1) {
|
||||||
test_fail("Unexpected waitpid() failure");
|
test_fail("Unexpected waitpid() failure");
|
||||||
|
}
|
||||||
if (WIFSIGNALED(status)) {
|
if (WIFSIGNALED(status)) {
|
||||||
test_fail("Unexpected child termination due to "
|
test_fail("Unexpected child termination due to "
|
||||||
"signal %d", WTERMSIG(status));
|
"signal %d", WTERMSIG(status));
|
||||||
@ -56,8 +56,7 @@ TEST_BEGIN(test_fork)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_fork));
|
test_fork));
|
||||||
}
|
}
|
||||||
|
@ -36,8 +36,7 @@ typedef enum {
|
|||||||
} hash_variant_t;
|
} hash_variant_t;
|
||||||
|
|
||||||
static int
|
static int
|
||||||
hash_variant_bits(hash_variant_t variant)
|
hash_variant_bits(hash_variant_t variant) {
|
||||||
{
|
|
||||||
switch (variant) {
|
switch (variant) {
|
||||||
case hash_variant_x86_32: return (32);
|
case hash_variant_x86_32: return (32);
|
||||||
case hash_variant_x86_128: return (128);
|
case hash_variant_x86_128: return (128);
|
||||||
@ -47,8 +46,7 @@ hash_variant_bits(hash_variant_t variant)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static const char *
|
static const char *
|
||||||
hash_variant_string(hash_variant_t variant)
|
hash_variant_string(hash_variant_t variant) {
|
||||||
{
|
|
||||||
switch (variant) {
|
switch (variant) {
|
||||||
case hash_variant_x86_32: return ("hash_x86_32");
|
case hash_variant_x86_32: return ("hash_x86_32");
|
||||||
case hash_variant_x86_128: return ("hash_x86_128");
|
case hash_variant_x86_128: return ("hash_x86_128");
|
||||||
@ -59,8 +57,7 @@ hash_variant_string(hash_variant_t variant)
|
|||||||
|
|
||||||
#define KEY_SIZE 256
|
#define KEY_SIZE 256
|
||||||
static void
|
static void
|
||||||
hash_variant_verify_key(hash_variant_t variant, uint8_t *key)
|
hash_variant_verify_key(hash_variant_t variant, uint8_t *key) {
|
||||||
{
|
|
||||||
const int hashbytes = hash_variant_bits(variant) / 8;
|
const int hashbytes = hash_variant_bits(variant) / 8;
|
||||||
const int hashes_size = hashbytes * 256;
|
const int hashes_size = hashbytes * 256;
|
||||||
VARIABLE_ARRAY(uint8_t, hashes, hashes_size);
|
VARIABLE_ARRAY(uint8_t, hashes, hashes_size);
|
||||||
@ -139,39 +136,35 @@ hash_variant_verify_key(hash_variant_t variant, uint8_t *key)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
hash_variant_verify(hash_variant_t variant)
|
hash_variant_verify(hash_variant_t variant) {
|
||||||
{
|
|
||||||
#define MAX_ALIGN 16
|
#define MAX_ALIGN 16
|
||||||
uint8_t key[KEY_SIZE + (MAX_ALIGN - 1)];
|
uint8_t key[KEY_SIZE + (MAX_ALIGN - 1)];
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 0; i < MAX_ALIGN; i++)
|
for (i = 0; i < MAX_ALIGN; i++) {
|
||||||
hash_variant_verify_key(variant, &key[i]);
|
hash_variant_verify_key(variant, &key[i]);
|
||||||
|
}
|
||||||
#undef MAX_ALIGN
|
#undef MAX_ALIGN
|
||||||
}
|
}
|
||||||
#undef KEY_SIZE
|
#undef KEY_SIZE
|
||||||
|
|
||||||
TEST_BEGIN(test_hash_x86_32)
|
TEST_BEGIN(test_hash_x86_32) {
|
||||||
{
|
|
||||||
hash_variant_verify(hash_variant_x86_32);
|
hash_variant_verify(hash_variant_x86_32);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_hash_x86_128)
|
TEST_BEGIN(test_hash_x86_128) {
|
||||||
{
|
|
||||||
hash_variant_verify(hash_variant_x86_128);
|
hash_variant_verify(hash_variant_x86_128);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_hash_x64_128)
|
TEST_BEGIN(test_hash_x64_128) {
|
||||||
{
|
|
||||||
hash_variant_verify(hash_variant_x64_128);
|
hash_variant_verify(hash_variant_x64_128);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_hash_x86_32,
|
test_hash_x86_32,
|
||||||
test_hash_x86_128,
|
test_hash_x86_128,
|
||||||
|
@ -15,15 +15,13 @@ static void *watch_for_junking;
|
|||||||
static bool saw_junking;
|
static bool saw_junking;
|
||||||
|
|
||||||
static void
|
static void
|
||||||
watch_junking(void *p)
|
watch_junking(void *p) {
|
||||||
{
|
|
||||||
watch_for_junking = p;
|
watch_for_junking = p;
|
||||||
saw_junking = false;
|
saw_junking = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
|
arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
arena_dalloc_junk_small_orig(ptr, bin_info);
|
arena_dalloc_junk_small_orig(ptr, bin_info);
|
||||||
@ -32,13 +30,13 @@ arena_dalloc_junk_small_intercept(void *ptr, const arena_bin_info_t *bin_info)
|
|||||||
"Missing junk fill for byte %zu/%zu of deallocated region",
|
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||||
i, bin_info->reg_size);
|
i, bin_info->reg_size);
|
||||||
}
|
}
|
||||||
if (ptr == watch_for_junking)
|
if (ptr == watch_for_junking) {
|
||||||
saw_junking = true;
|
saw_junking = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
large_dalloc_junk_intercept(void *ptr, size_t usize)
|
large_dalloc_junk_intercept(void *ptr, size_t usize) {
|
||||||
{
|
|
||||||
size_t i;
|
size_t i;
|
||||||
|
|
||||||
large_dalloc_junk_orig(ptr, usize);
|
large_dalloc_junk_orig(ptr, usize);
|
||||||
@ -47,21 +45,21 @@ large_dalloc_junk_intercept(void *ptr, size_t usize)
|
|||||||
"Missing junk fill for byte %zu/%zu of deallocated region",
|
"Missing junk fill for byte %zu/%zu of deallocated region",
|
||||||
i, usize);
|
i, usize);
|
||||||
}
|
}
|
||||||
if (ptr == watch_for_junking)
|
if (ptr == watch_for_junking) {
|
||||||
saw_junking = true;
|
saw_junking = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
large_dalloc_maybe_junk_intercept(void *ptr, size_t usize)
|
large_dalloc_maybe_junk_intercept(void *ptr, size_t usize) {
|
||||||
{
|
|
||||||
large_dalloc_maybe_junk_orig(ptr, usize);
|
large_dalloc_maybe_junk_orig(ptr, usize);
|
||||||
if (ptr == watch_for_junking)
|
if (ptr == watch_for_junking) {
|
||||||
saw_junking = true;
|
saw_junking = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_junk(size_t sz_min, size_t sz_max)
|
test_junk(size_t sz_min, size_t sz_max) {
|
||||||
{
|
|
||||||
uint8_t *s;
|
uint8_t *s;
|
||||||
size_t sz_prev, sz, i;
|
size_t sz_prev, sz, i;
|
||||||
|
|
||||||
@ -126,23 +124,20 @@ test_junk(size_t sz_min, size_t sz_max)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_junk_small)
|
TEST_BEGIN(test_junk_small) {
|
||||||
{
|
|
||||||
test_skip_if(!config_fill);
|
test_skip_if(!config_fill);
|
||||||
test_junk(1, SMALL_MAXCLASS-1);
|
test_junk(1, SMALL_MAXCLASS-1);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_junk_large)
|
TEST_BEGIN(test_junk_large) {
|
||||||
{
|
|
||||||
test_skip_if(!config_fill);
|
test_skip_if(!config_fill);
|
||||||
test_junk(SMALL_MAXCLASS+1, (1U << (LG_LARGE_MINCLASS+1)));
|
test_junk(SMALL_MAXCLASS+1, (1U << (LG_LARGE_MINCLASS+1)));
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_junk_small,
|
test_junk_small,
|
||||||
test_junk_large));
|
test_junk_large));
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctl_errors)
|
TEST_BEGIN(test_mallctl_errors) {
|
||||||
{
|
|
||||||
uint64_t epoch;
|
uint64_t epoch;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
@ -28,8 +27,7 @@ TEST_BEGIN(test_mallctl_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctlnametomib_errors)
|
TEST_BEGIN(test_mallctlnametomib_errors) {
|
||||||
{
|
|
||||||
size_t mib[1];
|
size_t mib[1];
|
||||||
size_t miblen;
|
size_t miblen;
|
||||||
|
|
||||||
@ -39,8 +37,7 @@ TEST_BEGIN(test_mallctlnametomib_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctlbymib_errors)
|
TEST_BEGIN(test_mallctlbymib_errors) {
|
||||||
{
|
|
||||||
uint64_t epoch;
|
uint64_t epoch;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
size_t mib[1];
|
size_t mib[1];
|
||||||
@ -76,8 +73,7 @@ TEST_BEGIN(test_mallctlbymib_errors)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctl_read_write)
|
TEST_BEGIN(test_mallctl_read_write) {
|
||||||
{
|
|
||||||
uint64_t old_epoch, new_epoch;
|
uint64_t old_epoch, new_epoch;
|
||||||
size_t sz = sizeof(old_epoch);
|
size_t sz = sizeof(old_epoch);
|
||||||
|
|
||||||
@ -104,8 +100,7 @@ TEST_BEGIN(test_mallctl_read_write)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctlnametomib_short_mib)
|
TEST_BEGIN(test_mallctlnametomib_short_mib) {
|
||||||
{
|
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
size_t miblen;
|
size_t miblen;
|
||||||
|
|
||||||
@ -119,8 +114,7 @@ TEST_BEGIN(test_mallctlnametomib_short_mib)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctl_config)
|
TEST_BEGIN(test_mallctl_config) {
|
||||||
{
|
|
||||||
#define TEST_MALLCTL_CONFIG(config, t) do { \
|
#define TEST_MALLCTL_CONFIG(config, t) do { \
|
||||||
t oldval; \
|
t oldval; \
|
||||||
size_t sz = sizeof(oldval); \
|
size_t sz = sizeof(oldval); \
|
||||||
@ -149,8 +143,7 @@ TEST_BEGIN(test_mallctl_config)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_mallctl_opt)
|
TEST_BEGIN(test_mallctl_opt) {
|
||||||
{
|
|
||||||
bool config_always = true;
|
bool config_always = true;
|
||||||
|
|
||||||
#define TEST_MALLCTL_OPT(t, opt, config) do { \
|
#define TEST_MALLCTL_OPT(t, opt, config) do { \
|
||||||
@ -189,8 +182,7 @@ TEST_BEGIN(test_mallctl_opt)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_manpage_example)
|
TEST_BEGIN(test_manpage_example) {
|
||||||
{
|
|
||||||
unsigned nbins, i;
|
unsigned nbins, i;
|
||||||
size_t mib[4];
|
size_t mib[4];
|
||||||
size_t len, miblen;
|
size_t len, miblen;
|
||||||
@ -214,8 +206,7 @@ TEST_BEGIN(test_manpage_example)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_tcache_none)
|
TEST_BEGIN(test_tcache_none) {
|
||||||
{
|
|
||||||
void *p0, *q, *p1;
|
void *p0, *q, *p1;
|
||||||
|
|
||||||
test_skip_if(!config_tcache);
|
test_skip_if(!config_tcache);
|
||||||
@ -240,8 +231,7 @@ TEST_BEGIN(test_tcache_none)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_tcache)
|
TEST_BEGIN(test_tcache) {
|
||||||
{
|
|
||||||
#define NTCACHES 10
|
#define NTCACHES 10
|
||||||
unsigned tis[NTCACHES];
|
unsigned tis[NTCACHES];
|
||||||
void *ps[NTCACHES];
|
void *ps[NTCACHES];
|
||||||
@ -312,11 +302,13 @@ TEST_BEGIN(test_tcache)
|
|||||||
assert_ptr_eq(qs[i], q0,
|
assert_ptr_eq(qs[i], q0,
|
||||||
"Expected rallocx() to allocate cached region, i=%u", i);
|
"Expected rallocx() to allocate cached region, i=%u", i);
|
||||||
/* Avoid undefined behavior in case of test failure. */
|
/* Avoid undefined behavior in case of test failure. */
|
||||||
if (qs[i] == NULL)
|
if (qs[i] == NULL) {
|
||||||
qs[i] = ps[i];
|
qs[i] = ps[i];
|
||||||
}
|
}
|
||||||
for (i = 0; i < NTCACHES; i++)
|
}
|
||||||
|
for (i = 0; i < NTCACHES; i++) {
|
||||||
dallocx(qs[i], MALLOCX_TCACHE(tis[i]));
|
dallocx(qs[i], MALLOCX_TCACHE(tis[i]));
|
||||||
|
}
|
||||||
|
|
||||||
/* Flush some non-empty tcaches. */
|
/* Flush some non-empty tcaches. */
|
||||||
for (i = 0; i < NTCACHES/2; i++) {
|
for (i = 0; i < NTCACHES/2; i++) {
|
||||||
@ -334,8 +326,7 @@ TEST_BEGIN(test_tcache)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_thread_arena)
|
TEST_BEGIN(test_thread_arena) {
|
||||||
{
|
|
||||||
unsigned arena_old, arena_new, narenas;
|
unsigned arena_old, arena_new, narenas;
|
||||||
size_t sz = sizeof(unsigned);
|
size_t sz = sizeof(unsigned);
|
||||||
|
|
||||||
@ -353,8 +344,7 @@ TEST_BEGIN(test_thread_arena)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_i_initialized)
|
TEST_BEGIN(test_arena_i_initialized) {
|
||||||
{
|
|
||||||
unsigned narenas, i;
|
unsigned narenas, i;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
@ -392,8 +382,7 @@ TEST_BEGIN(test_arena_i_initialized)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_i_decay_time)
|
TEST_BEGIN(test_arena_i_decay_time) {
|
||||||
{
|
|
||||||
ssize_t decay_time, orig_decay_time, prev_decay_time;
|
ssize_t decay_time, orig_decay_time, prev_decay_time;
|
||||||
size_t sz = sizeof(ssize_t);
|
size_t sz = sizeof(ssize_t);
|
||||||
|
|
||||||
@ -423,8 +412,7 @@ TEST_BEGIN(test_arena_i_decay_time)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_i_purge)
|
TEST_BEGIN(test_arena_i_purge) {
|
||||||
{
|
|
||||||
unsigned narenas;
|
unsigned narenas;
|
||||||
size_t sz = sizeof(unsigned);
|
size_t sz = sizeof(unsigned);
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
@ -447,8 +435,7 @@ TEST_BEGIN(test_arena_i_purge)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_i_decay)
|
TEST_BEGIN(test_arena_i_decay) {
|
||||||
{
|
|
||||||
unsigned narenas;
|
unsigned narenas;
|
||||||
size_t sz = sizeof(unsigned);
|
size_t sz = sizeof(unsigned);
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
@ -471,8 +458,7 @@ TEST_BEGIN(test_arena_i_decay)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arena_i_dss)
|
TEST_BEGIN(test_arena_i_dss) {
|
||||||
{
|
|
||||||
const char *dss_prec_old, *dss_prec_new;
|
const char *dss_prec_old, *dss_prec_new;
|
||||||
size_t sz = sizeof(dss_prec_old);
|
size_t sz = sizeof(dss_prec_old);
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
@ -517,8 +503,7 @@ TEST_BEGIN(test_arena_i_dss)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arenas_decay_time)
|
TEST_BEGIN(test_arenas_decay_time) {
|
||||||
{
|
|
||||||
ssize_t decay_time, orig_decay_time, prev_decay_time;
|
ssize_t decay_time, orig_decay_time, prev_decay_time;
|
||||||
size_t sz = sizeof(ssize_t);
|
size_t sz = sizeof(ssize_t);
|
||||||
|
|
||||||
@ -548,8 +533,7 @@ TEST_BEGIN(test_arenas_decay_time)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arenas_constants)
|
TEST_BEGIN(test_arenas_constants) {
|
||||||
{
|
|
||||||
#define TEST_ARENAS_CONSTANT(t, name, expected) do { \
|
#define TEST_ARENAS_CONSTANT(t, name, expected) do { \
|
||||||
t name; \
|
t name; \
|
||||||
size_t sz = sizeof(t); \
|
size_t sz = sizeof(t); \
|
||||||
@ -567,8 +551,7 @@ TEST_BEGIN(test_arenas_constants)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arenas_bin_constants)
|
TEST_BEGIN(test_arenas_bin_constants) {
|
||||||
{
|
|
||||||
#define TEST_ARENAS_BIN_CONSTANT(t, name, expected) do { \
|
#define TEST_ARENAS_BIN_CONSTANT(t, name, expected) do { \
|
||||||
t name; \
|
t name; \
|
||||||
size_t sz = sizeof(t); \
|
size_t sz = sizeof(t); \
|
||||||
@ -586,8 +569,7 @@ TEST_BEGIN(test_arenas_bin_constants)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arenas_lextent_constants)
|
TEST_BEGIN(test_arenas_lextent_constants) {
|
||||||
{
|
|
||||||
#define TEST_ARENAS_LEXTENT_CONSTANT(t, name, expected) do { \
|
#define TEST_ARENAS_LEXTENT_CONSTANT(t, name, expected) do { \
|
||||||
t name; \
|
t name; \
|
||||||
size_t sz = sizeof(t); \
|
size_t sz = sizeof(t); \
|
||||||
@ -602,8 +584,7 @@ TEST_BEGIN(test_arenas_lextent_constants)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_arenas_create)
|
TEST_BEGIN(test_arenas_create) {
|
||||||
{
|
|
||||||
unsigned narenas_before, arena, narenas_after;
|
unsigned narenas_before, arena, narenas_after;
|
||||||
size_t sz = sizeof(unsigned);
|
size_t sz = sizeof(unsigned);
|
||||||
|
|
||||||
@ -620,8 +601,7 @@ TEST_BEGIN(test_arenas_create)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_stats_arenas)
|
TEST_BEGIN(test_stats_arenas) {
|
||||||
{
|
|
||||||
#define TEST_STATS_ARENAS(t, name) do { \
|
#define TEST_STATS_ARENAS(t, name) do { \
|
||||||
t name; \
|
t name; \
|
||||||
size_t sz = sizeof(t); \
|
size_t sz = sizeof(t); \
|
||||||
@ -640,8 +620,7 @@ TEST_BEGIN(test_stats_arenas)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_mallctl_errors,
|
test_mallctl_errors,
|
||||||
test_mallctlnametomib_errors,
|
test_mallctlnametomib_errors,
|
||||||
|
@ -14,30 +14,29 @@
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
double_eq_rel(double a, double b, double max_rel_err, double max_abs_err)
|
double_eq_rel(double a, double b, double max_rel_err, double max_abs_err) {
|
||||||
{
|
|
||||||
double rel_err;
|
double rel_err;
|
||||||
|
|
||||||
if (fabs(a - b) < max_abs_err)
|
if (fabs(a - b) < max_abs_err) {
|
||||||
return (true);
|
return (true);
|
||||||
|
}
|
||||||
rel_err = (fabs(b) > fabs(a)) ? fabs((a-b)/b) : fabs((a-b)/a);
|
rel_err = (fabs(b) > fabs(a)) ? fabs((a-b)/b) : fabs((a-b)/a);
|
||||||
return (rel_err < max_rel_err);
|
return (rel_err < max_rel_err);
|
||||||
}
|
}
|
||||||
|
|
||||||
static uint64_t
|
static uint64_t
|
||||||
factorial(unsigned x)
|
factorial(unsigned x) {
|
||||||
{
|
|
||||||
uint64_t ret = 1;
|
uint64_t ret = 1;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 2; i <= x; i++)
|
for (i = 2; i <= x; i++) {
|
||||||
ret *= (uint64_t)i;
|
ret *= (uint64_t)i;
|
||||||
|
}
|
||||||
|
|
||||||
return (ret);
|
return (ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_ln_gamma_factorial)
|
TEST_BEGIN(test_ln_gamma_factorial) {
|
||||||
{
|
|
||||||
unsigned x;
|
unsigned x;
|
||||||
|
|
||||||
/* exp(ln_gamma(x)) == (x-1)! for integer x. */
|
/* exp(ln_gamma(x)) == (x-1)! for integer x. */
|
||||||
@ -188,8 +187,7 @@ static const double ln_gamma_misc_expected[] = {
|
|||||||
359.13420536957539753
|
359.13420536957539753
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_ln_gamma_misc)
|
TEST_BEGIN(test_ln_gamma_misc) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 1; i < sizeof(ln_gamma_misc_expected)/sizeof(double); i++) {
|
for (i = 1; i < sizeof(ln_gamma_misc_expected)/sizeof(double); i++) {
|
||||||
@ -239,8 +237,7 @@ static const double pt_norm_expected[] = {
|
|||||||
1.88079360815125041, 2.05374891063182208, 2.32634787404084076
|
1.88079360815125041, 2.05374891063182208, 2.32634787404084076
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_pt_norm)
|
TEST_BEGIN(test_pt_norm) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
for (i = 1; i < sizeof(pt_norm_expected)/sizeof(double); i++) {
|
for (i = 1; i < sizeof(pt_norm_expected)/sizeof(double); i++) {
|
||||||
@ -289,8 +286,7 @@ static const double pt_chi2_expected[] = {
|
|||||||
1046.4872561869577, 1063.5717461999654, 1107.0741966053859
|
1046.4872561869577, 1063.5717461999654, 1107.0741966053859
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_pt_chi2)
|
TEST_BEGIN(test_pt_chi2) {
|
||||||
{
|
|
||||||
unsigned i, j;
|
unsigned i, j;
|
||||||
unsigned e = 0;
|
unsigned e = 0;
|
||||||
|
|
||||||
@ -351,8 +347,7 @@ static const double pt_gamma_expected[] = {
|
|||||||
4.7230515633946677, 5.6417477865306020, 8.4059469148854635
|
4.7230515633946677, 5.6417477865306020, 8.4059469148854635
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_BEGIN(test_pt_gamma_shape)
|
TEST_BEGIN(test_pt_gamma_shape) {
|
||||||
{
|
|
||||||
unsigned i, j;
|
unsigned i, j;
|
||||||
unsigned e = 0;
|
unsigned e = 0;
|
||||||
|
|
||||||
@ -371,8 +366,7 @@ TEST_BEGIN(test_pt_gamma_shape)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_pt_gamma_scale)
|
TEST_BEGIN(test_pt_gamma_scale) {
|
||||||
{
|
|
||||||
double shape = 1.0;
|
double shape = 1.0;
|
||||||
double ln_gamma_shape = ln_gamma(shape);
|
double ln_gamma_shape = ln_gamma(shape);
|
||||||
|
|
||||||
@ -385,8 +379,7 @@ TEST_BEGIN(test_pt_gamma_scale)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_ln_gamma_factorial,
|
test_ln_gamma_factorial,
|
||||||
test_ln_gamma_misc,
|
test_ln_gamma_misc,
|
||||||
|
@ -9,8 +9,7 @@ struct mq_msg_s {
|
|||||||
};
|
};
|
||||||
mq_gen(static, mq_, mq_t, mq_msg_t, link)
|
mq_gen(static, mq_, mq_t, mq_msg_t, link)
|
||||||
|
|
||||||
TEST_BEGIN(test_mq_basic)
|
TEST_BEGIN(test_mq_basic) {
|
||||||
{
|
|
||||||
mq_t mq;
|
mq_t mq;
|
||||||
mq_msg_t msg;
|
mq_msg_t msg;
|
||||||
|
|
||||||
@ -31,8 +30,7 @@ TEST_BEGIN(test_mq_basic)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
thd_receiver_start(void *arg)
|
thd_receiver_start(void *arg) {
|
||||||
{
|
|
||||||
mq_t *mq = (mq_t *)arg;
|
mq_t *mq = (mq_t *)arg;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -45,8 +43,7 @@ thd_receiver_start(void *arg)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
thd_sender_start(void *arg)
|
thd_sender_start(void *arg) {
|
||||||
{
|
|
||||||
mq_t *mq = (mq_t *)arg;
|
mq_t *mq = (mq_t *)arg;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -61,8 +58,7 @@ thd_sender_start(void *arg)
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_mq_threaded)
|
TEST_BEGIN(test_mq_threaded) {
|
||||||
{
|
|
||||||
mq_t mq;
|
mq_t mq;
|
||||||
thd_t receiver;
|
thd_t receiver;
|
||||||
thd_t senders[NSENDERS];
|
thd_t senders[NSENDERS];
|
||||||
@ -71,20 +67,21 @@ TEST_BEGIN(test_mq_threaded)
|
|||||||
assert_false(mq_init(&mq), "Unexpected mq_init() failure");
|
assert_false(mq_init(&mq), "Unexpected mq_init() failure");
|
||||||
|
|
||||||
thd_create(&receiver, thd_receiver_start, (void *)&mq);
|
thd_create(&receiver, thd_receiver_start, (void *)&mq);
|
||||||
for (i = 0; i < NSENDERS; i++)
|
for (i = 0; i < NSENDERS; i++) {
|
||||||
thd_create(&senders[i], thd_sender_start, (void *)&mq);
|
thd_create(&senders[i], thd_sender_start, (void *)&mq);
|
||||||
|
}
|
||||||
|
|
||||||
thd_join(receiver, NULL);
|
thd_join(receiver, NULL);
|
||||||
for (i = 0; i < NSENDERS; i++)
|
for (i = 0; i < NSENDERS; i++) {
|
||||||
thd_join(senders[i], NULL);
|
thd_join(senders[i], NULL);
|
||||||
|
}
|
||||||
|
|
||||||
mq_fini(&mq);
|
mq_fini(&mq);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_mq_basic,
|
test_mq_basic,
|
||||||
test_mq_threaded));
|
test_mq_threaded));
|
||||||
|
@ -3,8 +3,7 @@
|
|||||||
#define NTHREADS 2
|
#define NTHREADS 2
|
||||||
#define NINCRS 2000000
|
#define NINCRS 2000000
|
||||||
|
|
||||||
TEST_BEGIN(test_mtx_basic)
|
TEST_BEGIN(test_mtx_basic) {
|
||||||
{
|
|
||||||
mtx_t mtx;
|
mtx_t mtx;
|
||||||
|
|
||||||
assert_false(mtx_init(&mtx), "Unexpected mtx_init() failure");
|
assert_false(mtx_init(&mtx), "Unexpected mtx_init() failure");
|
||||||
@ -20,8 +19,7 @@ typedef struct {
|
|||||||
} thd_start_arg_t;
|
} thd_start_arg_t;
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
thd_start(void *varg)
|
thd_start(void *varg) {
|
||||||
{
|
|
||||||
thd_start_arg_t *arg = (thd_start_arg_t *)varg;
|
thd_start_arg_t *arg = (thd_start_arg_t *)varg;
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
@ -33,26 +31,26 @@ thd_start(void *varg)
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_mtx_race)
|
TEST_BEGIN(test_mtx_race) {
|
||||||
{
|
|
||||||
thd_start_arg_t arg;
|
thd_start_arg_t arg;
|
||||||
thd_t thds[NTHREADS];
|
thd_t thds[NTHREADS];
|
||||||
unsigned i;
|
unsigned i;
|
||||||
|
|
||||||
assert_false(mtx_init(&arg.mtx), "Unexpected mtx_init() failure");
|
assert_false(mtx_init(&arg.mtx), "Unexpected mtx_init() failure");
|
||||||
arg.x = 0;
|
arg.x = 0;
|
||||||
for (i = 0; i < NTHREADS; i++)
|
for (i = 0; i < NTHREADS; i++) {
|
||||||
thd_create(&thds[i], thd_start, (void *)&arg);
|
thd_create(&thds[i], thd_start, (void *)&arg);
|
||||||
for (i = 0; i < NTHREADS; i++)
|
}
|
||||||
|
for (i = 0; i < NTHREADS; i++) {
|
||||||
thd_join(thds[i], NULL);
|
thd_join(thds[i], NULL);
|
||||||
|
}
|
||||||
assert_u_eq(arg.x, NTHREADS * NINCRS,
|
assert_u_eq(arg.x, NTHREADS * NINCRS,
|
||||||
"Race-related counter corruption");
|
"Race-related counter corruption");
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_mtx_basic,
|
test_mtx_basic,
|
||||||
test_mtx_race));
|
test_mtx_race));
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
|
|
||||||
#define BILLION UINT64_C(1000000000)
|
#define BILLION UINT64_C(1000000000)
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_init)
|
TEST_BEGIN(test_nstime_init) {
|
||||||
{
|
|
||||||
nstime_t nst;
|
nstime_t nst;
|
||||||
|
|
||||||
nstime_init(&nst, 42000000043);
|
nstime_init(&nst, 42000000043);
|
||||||
@ -13,8 +12,7 @@ TEST_BEGIN(test_nstime_init)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_init2)
|
TEST_BEGIN(test_nstime_init2) {
|
||||||
{
|
|
||||||
nstime_t nst;
|
nstime_t nst;
|
||||||
|
|
||||||
nstime_init2(&nst, 42, 43);
|
nstime_init2(&nst, 42, 43);
|
||||||
@ -23,8 +21,7 @@ TEST_BEGIN(test_nstime_init2)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_copy)
|
TEST_BEGIN(test_nstime_copy) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb;
|
nstime_t nsta, nstb;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -35,8 +32,7 @@ TEST_BEGIN(test_nstime_copy)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_compare)
|
TEST_BEGIN(test_nstime_compare) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb;
|
nstime_t nsta, nstb;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -70,8 +66,7 @@ TEST_BEGIN(test_nstime_compare)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_add)
|
TEST_BEGIN(test_nstime_add) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb;
|
nstime_t nsta, nstb;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -90,8 +85,7 @@ TEST_BEGIN(test_nstime_add)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_subtract)
|
TEST_BEGIN(test_nstime_subtract) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb;
|
nstime_t nsta, nstb;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -110,8 +104,7 @@ TEST_BEGIN(test_nstime_subtract)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_imultiply)
|
TEST_BEGIN(test_nstime_imultiply) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb;
|
nstime_t nsta, nstb;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -128,8 +121,7 @@ TEST_BEGIN(test_nstime_imultiply)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_idivide)
|
TEST_BEGIN(test_nstime_idivide) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb;
|
nstime_t nsta, nstb;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -148,8 +140,7 @@ TEST_BEGIN(test_nstime_idivide)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_divide)
|
TEST_BEGIN(test_nstime_divide) {
|
||||||
{
|
|
||||||
nstime_t nsta, nstb, nstc;
|
nstime_t nsta, nstb, nstc;
|
||||||
|
|
||||||
nstime_init2(&nsta, 42, 43);
|
nstime_init2(&nsta, 42, 43);
|
||||||
@ -176,14 +167,12 @@ TEST_BEGIN(test_nstime_divide)
|
|||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_monotonic)
|
TEST_BEGIN(test_nstime_monotonic) {
|
||||||
{
|
|
||||||
nstime_monotonic();
|
nstime_monotonic();
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_nstime_update)
|
TEST_BEGIN(test_nstime_update) {
|
||||||
{
|
|
||||||
nstime_t nst;
|
nstime_t nst;
|
||||||
|
|
||||||
nstime_init(&nst, 0);
|
nstime_init(&nst, 0);
|
||||||
@ -208,8 +197,7 @@ TEST_BEGIN(test_nstime_update)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_nstime_init,
|
test_nstime_init,
|
||||||
test_nstime_init2,
|
test_nstime_init2,
|
||||||
|
@ -20,8 +20,7 @@ const char *malloc_conf = "decay_time:-1";
|
|||||||
#define NSLABS 8
|
#define NSLABS 8
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
binind_compute(void)
|
binind_compute(void) {
|
||||||
{
|
|
||||||
size_t sz;
|
size_t sz;
|
||||||
unsigned nbins, i;
|
unsigned nbins, i;
|
||||||
|
|
||||||
@ -41,17 +40,17 @@ binind_compute(void)
|
|||||||
sz = sizeof(size);
|
sz = sizeof(size);
|
||||||
assert_d_eq(mallctlbymib(mib, miblen, (void *)&size, &sz, NULL,
|
assert_d_eq(mallctlbymib(mib, miblen, (void *)&size, &sz, NULL,
|
||||||
0), 0, "Unexpected mallctlbymib failure");
|
0), 0, "Unexpected mallctlbymib failure");
|
||||||
if (size == SZ)
|
if (size == SZ) {
|
||||||
return (i);
|
return (i);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
test_fail("Unable to compute nregs_per_run");
|
test_fail("Unable to compute nregs_per_run");
|
||||||
return (0);
|
return (0);
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
static size_t
|
||||||
nregs_per_run_compute(void)
|
nregs_per_run_compute(void) {
|
||||||
{
|
|
||||||
uint32_t nregs;
|
uint32_t nregs;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
unsigned binind = binind_compute();
|
unsigned binind = binind_compute();
|
||||||
@ -68,8 +67,7 @@ nregs_per_run_compute(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
arenas_create_mallctl(void)
|
arenas_create_mallctl(void) {
|
||||||
{
|
|
||||||
unsigned arena_ind;
|
unsigned arena_ind;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
@ -81,8 +79,7 @@ arenas_create_mallctl(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
arena_reset_mallctl(unsigned arena_ind)
|
arena_reset_mallctl(unsigned arena_ind) {
|
||||||
{
|
|
||||||
size_t mib[3];
|
size_t mib[3];
|
||||||
size_t miblen = sizeof(mib)/sizeof(size_t);
|
size_t miblen = sizeof(mib)/sizeof(size_t);
|
||||||
|
|
||||||
@ -93,8 +90,7 @@ arena_reset_mallctl(unsigned arena_ind)
|
|||||||
"Unexpected mallctlbymib() failure");
|
"Unexpected mallctlbymib() failure");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_pack)
|
TEST_BEGIN(test_pack) {
|
||||||
{
|
|
||||||
unsigned arena_ind = arenas_create_mallctl();
|
unsigned arena_ind = arenas_create_mallctl();
|
||||||
size_t nregs_per_run = nregs_per_run_compute();
|
size_t nregs_per_run = nregs_per_run_compute();
|
||||||
size_t nregs = nregs_per_run * NSLABS;
|
size_t nregs = nregs_per_run * NSLABS;
|
||||||
@ -125,8 +121,9 @@ TEST_BEGIN(test_pack)
|
|||||||
i++, offset = (offset + 1) % nregs_per_run) {
|
i++, offset = (offset + 1) % nregs_per_run) {
|
||||||
for (j = 0; j < nregs_per_run; j++) {
|
for (j = 0; j < nregs_per_run; j++) {
|
||||||
void *p = ptrs[(i * nregs_per_run) + j];
|
void *p = ptrs[(i * nregs_per_run) + j];
|
||||||
if (offset == j)
|
if (offset == j) {
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
dallocx(p, MALLOCX_ARENA(arena_ind) |
|
dallocx(p, MALLOCX_ARENA(arena_ind) |
|
||||||
MALLOCX_TCACHE_NONE);
|
MALLOCX_TCACHE_NONE);
|
||||||
}
|
}
|
||||||
@ -143,8 +140,9 @@ TEST_BEGIN(test_pack)
|
|||||||
for (j = 0; j < nregs_per_run; j++) {
|
for (j = 0; j < nregs_per_run; j++) {
|
||||||
void *p;
|
void *p;
|
||||||
|
|
||||||
if (offset == j)
|
if (offset == j) {
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
p = mallocx(SZ, MALLOCX_ARENA(arena_ind) |
|
p = mallocx(SZ, MALLOCX_ARENA(arena_ind) |
|
||||||
MALLOCX_TCACHE_NONE);
|
MALLOCX_TCACHE_NONE);
|
||||||
assert_ptr_eq(p, ptrs[(i * nregs_per_run) + j],
|
assert_ptr_eq(p, ptrs[(i * nregs_per_run) + j],
|
||||||
@ -159,8 +157,7 @@ TEST_BEGIN(test_pack)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_pack));
|
test_pack));
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
TEST_BEGIN(test_pages_huge)
|
TEST_BEGIN(test_pages_huge) {
|
||||||
{
|
|
||||||
size_t alloc_size;
|
size_t alloc_size;
|
||||||
bool commit;
|
bool commit;
|
||||||
void *pages, *hugepage;
|
void *pages, *hugepage;
|
||||||
@ -22,8 +21,7 @@ TEST_BEGIN(test_pages_huge)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_pages_huge));
|
test_pages_huge));
|
||||||
}
|
}
|
||||||
|
@ -10,8 +10,7 @@ struct node_s {
|
|||||||
};
|
};
|
||||||
|
|
||||||
static int
|
static int
|
||||||
node_cmp(const node_t *a, const node_t *b)
|
node_cmp(const node_t *a, const node_t *b) {
|
||||||
{
|
|
||||||
int ret;
|
int ret;
|
||||||
|
|
||||||
ret = (a->key > b->key) - (a->key < b->key);
|
ret = (a->key > b->key) - (a->key < b->key);
|
||||||
@ -39,18 +38,19 @@ typedef ph(node_t) heap_t;
|
|||||||
ph_gen(static, heap_, heap_t, node_t, link, node_cmp_magic);
|
ph_gen(static, heap_, heap_t, node_t, link, node_cmp_magic);
|
||||||
|
|
||||||
static void
|
static void
|
||||||
node_print(const node_t *node, unsigned depth)
|
node_print(const node_t *node, unsigned depth) {
|
||||||
{
|
|
||||||
unsigned i;
|
unsigned i;
|
||||||
node_t *leftmost_child, *sibling;
|
node_t *leftmost_child, *sibling;
|
||||||
|
|
||||||
for (i = 0; i < depth; i++)
|
for (i = 0; i < depth; i++) {
|
||||||
malloc_printf("\t");
|
malloc_printf("\t");
|
||||||
|
}
|
||||||
malloc_printf("%2"FMTu64"\n", node->key);
|
malloc_printf("%2"FMTu64"\n", node->key);
|
||||||
|
|
||||||
leftmost_child = phn_lchild_get(node_t, link, node);
|
leftmost_child = phn_lchild_get(node_t, link, node);
|
||||||
if (leftmost_child == NULL)
|
if (leftmost_child == NULL) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
node_print(leftmost_child, depth + 1);
|
node_print(leftmost_child, depth + 1);
|
||||||
|
|
||||||
for (sibling = phn_next_get(node_t, link, leftmost_child); sibling !=
|
for (sibling = phn_next_get(node_t, link, leftmost_child); sibling !=
|
||||||
@ -60,13 +60,13 @@ node_print(const node_t *node, unsigned depth)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
heap_print(const heap_t *heap)
|
heap_print(const heap_t *heap) {
|
||||||
{
|
|
||||||
node_t *auxelm;
|
node_t *auxelm;
|
||||||
|
|
||||||
malloc_printf("vvv heap %p vvv\n", heap);
|
malloc_printf("vvv heap %p vvv\n", heap);
|
||||||
if (heap->ph_root == NULL)
|
if (heap->ph_root == NULL) {
|
||||||
goto label_return;
|
goto label_return;
|
||||||
|
}
|
||||||
|
|
||||||
node_print(heap->ph_root, 0);
|
node_print(heap->ph_root, 0);
|
||||||
|
|
||||||
@ -83,8 +83,7 @@ label_return:
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
node_validate(const node_t *node, const node_t *parent)
|
node_validate(const node_t *node, const node_t *parent) {
|
||||||
{
|
|
||||||
unsigned nnodes = 1;
|
unsigned nnodes = 1;
|
||||||
node_t *leftmost_child, *sibling;
|
node_t *leftmost_child, *sibling;
|
||||||
|
|
||||||
@ -94,8 +93,9 @@ node_validate(const node_t *node, const node_t *parent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
leftmost_child = phn_lchild_get(node_t, link, node);
|
leftmost_child = phn_lchild_get(node_t, link, node);
|
||||||
if (leftmost_child == NULL)
|
if (leftmost_child == NULL) {
|
||||||
return (nnodes);
|
return (nnodes);
|
||||||
|
}
|
||||||
assert_ptr_eq((void *)phn_prev_get(node_t, link, leftmost_child),
|
assert_ptr_eq((void *)phn_prev_get(node_t, link, leftmost_child),
|
||||||
(void *)node, "Leftmost child does not link to node");
|
(void *)node, "Leftmost child does not link to node");
|
||||||
nnodes += node_validate(leftmost_child, node);
|
nnodes += node_validate(leftmost_child, node);
|
||||||
@ -111,13 +111,13 @@ node_validate(const node_t *node, const node_t *parent)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static unsigned
|
static unsigned
|
||||||
heap_validate(const heap_t *heap)
|
heap_validate(const heap_t *heap) {
|
||||||
{
|
|
||||||
unsigned nnodes = 0;
|
unsigned nnodes = 0;
|
||||||
node_t *auxelm;
|
node_t *auxelm;
|
||||||
|
|
||||||
if (heap->ph_root == NULL)
|
if (heap->ph_root == NULL) {
|
||||||
goto label_return;
|
goto label_return;
|
||||||
|
}
|
||||||
|
|
||||||
nnodes += node_validate(heap->ph_root, NULL);
|
nnodes += node_validate(heap->ph_root, NULL);
|
||||||
|
|
||||||
@ -130,13 +130,13 @@ heap_validate(const heap_t *heap)
|
|||||||
}
|
}
|
||||||
|
|
||||||
label_return:
|
label_return:
|
||||||
if (false)
|
if (false) {
|
||||||
heap_print(heap);
|
heap_print(heap);
|
||||||
|
}
|
||||||
return (nnodes);
|
return (nnodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_ph_empty)
|
TEST_BEGIN(test_ph_empty) {
|
||||||
{
|
|
||||||
heap_t heap;
|
heap_t heap;
|
||||||
|
|
||||||
heap_new(&heap);
|
heap_new(&heap);
|
||||||
@ -146,23 +146,20 @@ TEST_BEGIN(test_ph_empty)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
node_remove(heap_t *heap, node_t *node)
|
node_remove(heap_t *heap, node_t *node) {
|
||||||
{
|
|
||||||
heap_remove(heap, node);
|
heap_remove(heap, node);
|
||||||
|
|
||||||
node->magic = 0;
|
node->magic = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
static node_t *
|
static node_t *
|
||||||
node_remove_first(heap_t *heap)
|
node_remove_first(heap_t *heap) {
|
||||||
{
|
|
||||||
node_t *node = heap_remove_first(heap);
|
node_t *node = heap_remove_first(heap);
|
||||||
node->magic = 0;
|
node->magic = 0;
|
||||||
return (node);
|
return (node);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_ph_random)
|
TEST_BEGIN(test_ph_random) {
|
||||||
{
|
|
||||||
#define NNODES 25
|
#define NNODES 25
|
||||||
#define NBAGS 250
|
#define NBAGS 250
|
||||||
#define SEED 42
|
#define SEED 42
|
||||||
@ -177,18 +174,21 @@ TEST_BEGIN(test_ph_random)
|
|||||||
switch (i) {
|
switch (i) {
|
||||||
case 0:
|
case 0:
|
||||||
/* Insert in order. */
|
/* Insert in order. */
|
||||||
for (j = 0; j < NNODES; j++)
|
for (j = 0; j < NNODES; j++) {
|
||||||
bag[j] = j;
|
bag[j] = j;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
/* Insert in reverse order. */
|
/* Insert in reverse order. */
|
||||||
for (j = 0; j < NNODES; j++)
|
for (j = 0; j < NNODES; j++) {
|
||||||
bag[j] = NNODES - j - 1;
|
bag[j] = NNODES - j - 1;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
for (j = 0; j < NNODES; j++)
|
for (j = 0; j < NNODES; j++) {
|
||||||
bag[j] = gen_rand64_range(sfmt, NNODES);
|
bag[j] = gen_rand64_range(sfmt, NNODES);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for (j = 1; j <= NNODES; j++) {
|
for (j = 1; j <= NNODES; j++) {
|
||||||
/* Initialize heap and nodes. */
|
/* Initialize heap and nodes. */
|
||||||
@ -280,8 +280,7 @@ TEST_BEGIN(test_ph_random)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_ph_empty,
|
test_ph_empty,
|
||||||
test_ph_random));
|
test_ph_random));
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
#include "test/jemalloc_test.h"
|
#include "test/jemalloc_test.h"
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_prng_lg_range_u32(bool atomic)
|
test_prng_lg_range_u32(bool atomic) {
|
||||||
{
|
|
||||||
uint32_t sa, sb, ra, rb;
|
uint32_t sa, sb, ra, rb;
|
||||||
unsigned lg_range;
|
unsigned lg_range;
|
||||||
|
|
||||||
@ -38,8 +37,7 @@ test_prng_lg_range_u32(bool atomic)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_prng_lg_range_u64(void)
|
test_prng_lg_range_u64(void) {
|
||||||
{
|
|
||||||
uint64_t sa, sb, ra, rb;
|
uint64_t sa, sb, ra, rb;
|
||||||
unsigned lg_range;
|
unsigned lg_range;
|
||||||
|
|
||||||
@ -75,8 +73,7 @@ test_prng_lg_range_u64(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_prng_lg_range_zu(bool atomic)
|
test_prng_lg_range_zu(bool atomic) {
|
||||||
{
|
|
||||||
size_t sa, sb, ra, rb;
|
size_t sa, sb, ra, rb;
|
||||||
unsigned lg_range;
|
unsigned lg_range;
|
||||||
|
|
||||||
@ -112,39 +109,33 @@ test_prng_lg_range_zu(bool atomic)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_lg_range_u32_nonatomic)
|
TEST_BEGIN(test_prng_lg_range_u32_nonatomic) {
|
||||||
{
|
|
||||||
test_prng_lg_range_u32(false);
|
test_prng_lg_range_u32(false);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_lg_range_u32_atomic)
|
TEST_BEGIN(test_prng_lg_range_u32_atomic) {
|
||||||
{
|
|
||||||
test_prng_lg_range_u32(true);
|
test_prng_lg_range_u32(true);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_lg_range_u64_nonatomic)
|
TEST_BEGIN(test_prng_lg_range_u64_nonatomic) {
|
||||||
{
|
|
||||||
test_prng_lg_range_u64();
|
test_prng_lg_range_u64();
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_lg_range_zu_nonatomic)
|
TEST_BEGIN(test_prng_lg_range_zu_nonatomic) {
|
||||||
{
|
|
||||||
test_prng_lg_range_zu(false);
|
test_prng_lg_range_zu(false);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_lg_range_zu_atomic)
|
TEST_BEGIN(test_prng_lg_range_zu_atomic) {
|
||||||
{
|
|
||||||
test_prng_lg_range_zu(true);
|
test_prng_lg_range_zu(true);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_prng_range_u32(bool atomic)
|
test_prng_range_u32(bool atomic) {
|
||||||
{
|
|
||||||
uint32_t range;
|
uint32_t range;
|
||||||
#define MAX_RANGE 10000000
|
#define MAX_RANGE 10000000
|
||||||
#define RANGE_STEP 97
|
#define RANGE_STEP 97
|
||||||
@ -164,8 +155,7 @@ test_prng_range_u32(bool atomic)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_prng_range_u64(void)
|
test_prng_range_u64(void) {
|
||||||
{
|
|
||||||
uint64_t range;
|
uint64_t range;
|
||||||
#define MAX_RANGE 10000000
|
#define MAX_RANGE 10000000
|
||||||
#define RANGE_STEP 97
|
#define RANGE_STEP 97
|
||||||
@ -185,8 +175,7 @@ test_prng_range_u64(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
test_prng_range_zu(bool atomic)
|
test_prng_range_zu(bool atomic) {
|
||||||
{
|
|
||||||
size_t range;
|
size_t range;
|
||||||
#define MAX_RANGE 10000000
|
#define MAX_RANGE 10000000
|
||||||
#define RANGE_STEP 97
|
#define RANGE_STEP 97
|
||||||
@ -205,39 +194,33 @@ test_prng_range_zu(bool atomic)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_range_u32_nonatomic)
|
TEST_BEGIN(test_prng_range_u32_nonatomic) {
|
||||||
{
|
|
||||||
test_prng_range_u32(false);
|
test_prng_range_u32(false);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_range_u32_atomic)
|
TEST_BEGIN(test_prng_range_u32_atomic) {
|
||||||
{
|
|
||||||
test_prng_range_u32(true);
|
test_prng_range_u32(true);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_range_u64_nonatomic)
|
TEST_BEGIN(test_prng_range_u64_nonatomic) {
|
||||||
{
|
|
||||||
test_prng_range_u64();
|
test_prng_range_u64();
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_range_zu_nonatomic)
|
TEST_BEGIN(test_prng_range_zu_nonatomic) {
|
||||||
{
|
|
||||||
test_prng_range_zu(false);
|
test_prng_range_zu(false);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
TEST_BEGIN(test_prng_range_zu_atomic)
|
TEST_BEGIN(test_prng_range_zu_atomic) {
|
||||||
{
|
|
||||||
test_prng_range_zu(true);
|
test_prng_range_zu(true);
|
||||||
}
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_prng_lg_range_u32_nonatomic,
|
test_prng_lg_range_u32_nonatomic,
|
||||||
test_prng_lg_range_u32_atomic,
|
test_prng_lg_range_u32_atomic,
|
||||||
|
@ -11,8 +11,7 @@ const char *malloc_conf =
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
static int
|
static int
|
||||||
prof_dump_open_intercept(bool propagate_err, const char *filename)
|
prof_dump_open_intercept(bool propagate_err, const char *filename) {
|
||||||
{
|
|
||||||
int fd;
|
int fd;
|
||||||
|
|
||||||
fd = open("/dev/null", O_WRONLY);
|
fd = open("/dev/null", O_WRONLY);
|
||||||
@ -22,14 +21,12 @@ prof_dump_open_intercept(bool propagate_err, const char *filename)
|
|||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
alloc_from_permuted_backtrace(unsigned thd_ind, unsigned iteration)
|
alloc_from_permuted_backtrace(unsigned thd_ind, unsigned iteration) {
|
||||||
{
|
|
||||||
return (btalloc(1, thd_ind*NALLOCS_PER_THREAD + iteration));
|
return (btalloc(1, thd_ind*NALLOCS_PER_THREAD + iteration));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void *
|
static void *
|
||||||
thd_start(void *varg)
|
thd_start(void *varg) {
|
||||||
{
|
|
||||||
unsigned thd_ind = *(unsigned *)varg;
|
unsigned thd_ind = *(unsigned *)varg;
|
||||||
size_t bt_count_prev, bt_count;
|
size_t bt_count_prev, bt_count;
|
||||||
unsigned i_prev, i;
|
unsigned i_prev, i;
|
||||||
@ -57,8 +54,7 @@ thd_start(void *varg)
|
|||||||
return (NULL);
|
return (NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_BEGIN(test_idump)
|
TEST_BEGIN(test_idump) {
|
||||||
{
|
|
||||||
bool active;
|
bool active;
|
||||||
thd_t thds[NTHREADS];
|
thd_t thds[NTHREADS];
|
||||||
unsigned thd_args[NTHREADS];
|
unsigned thd_args[NTHREADS];
|
||||||
@ -77,14 +73,14 @@ TEST_BEGIN(test_idump)
|
|||||||
thd_args[i] = i;
|
thd_args[i] = i;
|
||||||
thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
|
thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
|
||||||
}
|
}
|
||||||
for (i = 0; i < NTHREADS; i++)
|
for (i = 0; i < NTHREADS; i++) {
|
||||||
thd_join(thds[i], NULL);
|
thd_join(thds[i], NULL);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_idump));
|
test_idump));
|
||||||
}
|
}
|
||||||
|
@ -6,8 +6,7 @@ const char *malloc_conf =
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void
|
static void
|
||||||
mallctl_bool_get(const char *name, bool expected, const char *func, int line)
|
mallctl_bool_get(const char *name, bool expected, const char *func, int line) {
|
||||||
{
|
|
||||||
bool old;
|
bool old;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
@ -20,8 +19,7 @@ mallctl_bool_get(const char *name, bool expected, const char *func, int line)
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
mallctl_bool_set(const char *name, bool old_expected, bool val_new,
|
mallctl_bool_set(const char *name, bool old_expected, bool val_new,
|
||||||
const char *func, int line)
|
const char *func, int line) {
|
||||||
{
|
|
||||||
bool old;
|
bool old;
|
||||||
size_t sz;
|
size_t sz;
|
||||||
|
|
||||||
@ -36,8 +34,7 @@ mallctl_bool_set(const char *name, bool old_expected, bool val_new,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
mallctl_prof_active_get_impl(bool prof_active_old_expected, const char *func,
|
mallctl_prof_active_get_impl(bool prof_active_old_expected, const char *func,
|
||||||
int line)
|
int line) {
|
||||||
{
|
|
||||||
mallctl_bool_get("prof.active", prof_active_old_expected, func, line);
|
mallctl_bool_get("prof.active", prof_active_old_expected, func, line);
|
||||||
}
|
}
|
||||||
#define mallctl_prof_active_get(a) \
|
#define mallctl_prof_active_get(a) \
|
||||||
@ -45,8 +42,7 @@ mallctl_prof_active_get_impl(bool prof_active_old_expected, const char *func,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
mallctl_prof_active_set_impl(bool prof_active_old_expected,
|
mallctl_prof_active_set_impl(bool prof_active_old_expected,
|
||||||
bool prof_active_new, const char *func, int line)
|
bool prof_active_new, const char *func, int line) {
|
||||||
{
|
|
||||||
mallctl_bool_set("prof.active", prof_active_old_expected,
|
mallctl_bool_set("prof.active", prof_active_old_expected,
|
||||||
prof_active_new, func, line);
|
prof_active_new, func, line);
|
||||||
}
|
}
|
||||||
@ -55,8 +51,7 @@ mallctl_prof_active_set_impl(bool prof_active_old_expected,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
mallctl_thread_prof_active_get_impl(bool thread_prof_active_old_expected,
|
mallctl_thread_prof_active_get_impl(bool thread_prof_active_old_expected,
|
||||||
const char *func, int line)
|
const char *func, int line) {
|
||||||
{
|
|
||||||
mallctl_bool_get("thread.prof.active", thread_prof_active_old_expected,
|
mallctl_bool_get("thread.prof.active", thread_prof_active_old_expected,
|
||||||
func, line);
|
func, line);
|
||||||
}
|
}
|
||||||
@ -65,8 +60,7 @@ mallctl_thread_prof_active_get_impl(bool thread_prof_active_old_expected,
|
|||||||
|
|
||||||
static void
|
static void
|
||||||
mallctl_thread_prof_active_set_impl(bool thread_prof_active_old_expected,
|
mallctl_thread_prof_active_set_impl(bool thread_prof_active_old_expected,
|
||||||
bool thread_prof_active_new, const char *func, int line)
|
bool thread_prof_active_new, const char *func, int line) {
|
||||||
{
|
|
||||||
mallctl_bool_set("thread.prof.active", thread_prof_active_old_expected,
|
mallctl_bool_set("thread.prof.active", thread_prof_active_old_expected,
|
||||||
thread_prof_active_new, func, line);
|
thread_prof_active_new, func, line);
|
||||||
}
|
}
|
||||||
@ -74,8 +68,7 @@ mallctl_thread_prof_active_set_impl(bool thread_prof_active_old_expected,
|
|||||||
mallctl_thread_prof_active_set_impl(a, b, __func__, __LINE__)
|
mallctl_thread_prof_active_set_impl(a, b, __func__, __LINE__)
|
||||||
|
|
||||||
static void
|
static void
|
||||||
prof_sampling_probe_impl(bool expect_sample, const char *func, int line)
|
prof_sampling_probe_impl(bool expect_sample, const char *func, int line) {
|
||||||
{
|
|
||||||
void *p;
|
void *p;
|
||||||
size_t expected_backtraces = expect_sample ? 1 : 0;
|
size_t expected_backtraces = expect_sample ? 1 : 0;
|
||||||
|
|
||||||
@ -90,8 +83,7 @@ prof_sampling_probe_impl(bool expect_sample, const char *func, int line)
|
|||||||
#define prof_sampling_probe(a) \
|
#define prof_sampling_probe(a) \
|
||||||
prof_sampling_probe_impl(a, __func__, __LINE__)
|
prof_sampling_probe_impl(a, __func__, __LINE__)
|
||||||
|
|
||||||
TEST_BEGIN(test_prof_active)
|
TEST_BEGIN(test_prof_active) {
|
||||||
{
|
|
||||||
test_skip_if(!config_prof);
|
test_skip_if(!config_prof);
|
||||||
|
|
||||||
mallctl_prof_active_get(true);
|
mallctl_prof_active_get(true);
|
||||||
@ -124,8 +116,7 @@ TEST_BEGIN(test_prof_active)
|
|||||||
TEST_END
|
TEST_END
|
||||||
|
|
||||||
int
|
int
|
||||||
main(void)
|
main(void) {
|
||||||
{
|
|
||||||
return (test(
|
return (test(
|
||||||
test_prof_active));
|
test_prof_active));
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user