Minor update to locked int

This commit is contained in:
Yinan Zhang 2020-04-14 14:52:20 -07:00
parent f533ab6da6
commit b543c20a94
2 changed files with 20 additions and 13 deletions

View File

@ -90,7 +90,7 @@ arena_stats_init(tsdn_t *tsdn, arena_stats_t *arena_stats) {
assert(((char *)arena_stats)[i] == 0);
}
}
if (LOCKEDINT_MTX_INIT(LOCKEDINT_MTX(arena_stats->mtx), "arena_stats",
if (LOCKEDINT_MTX_INIT(arena_stats->mtx, "arena_stats",
WITNESS_RANK_ARENA_STATS, malloc_mutex_rank_exclusive)) {
return true;
}

View File

@ -26,8 +26,8 @@ struct locked_zu_s {
#ifndef JEMALLOC_ATOMIC_U64
# define LOCKEDINT_MTX_DECLARE(name) malloc_mutex_t name;
# define LOCKEDINT_MTX_INIT(ptr, name, rank, rank_mode) \
malloc_mutex_init(ptr, name, rank, rank_mode)
# define LOCKEDINT_MTX_INIT(mu, name, rank, rank_mode) \
malloc_mutex_init(&(mu), name, rank, rank_mode)
# define LOCKEDINT_MTX(mtx) (&(mtx))
# define LOCKEDINT_MTX_LOCK(tsdn, mu) malloc_mutex_lock(tsdn, &(mu))
# define LOCKEDINT_MTX_UNLOCK(tsdn, mu) malloc_mutex_unlock(tsdn, &(mu))
@ -38,21 +38,28 @@ struct locked_zu_s {
malloc_mutex_postfork_child(tsdn, &(mu))
#else
# define LOCKEDINT_MTX_DECLARE(name)
# define LOCKEDINT_MTX(ptr) NULL
# define LOCKEDINT_MTX_INIT(ptr, name, rank, rank_mode) false
# define LOCKEDINT_MTX_LOCK(tsdn, mu) do {} while (0)
# define LOCKEDINT_MTX_UNLOCK(tsdn, mu) do {} while (0)
# define LOCKEDINT_MTX(mtx) NULL
# define LOCKEDINT_MTX_INIT(mu, name, rank, rank_mode) false
# define LOCKEDINT_MTX_LOCK(tsdn, mu)
# define LOCKEDINT_MTX_UNLOCK(tsdn, mu)
# define LOCKEDINT_MTX_PREFORK(tsdn, mu)
# define LOCKEDINT_MTX_POSTFORK_PARENT(tsdn, mu)
# define LOCKEDINT_MTX_POSTFORK_CHILD(tsdn, mu)
#endif
#ifdef JEMALLOC_ATOMIC_U64
# define LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx) assert((mtx) == NULL)
#else
# define LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx) \
malloc_mutex_assert_owner(tsdn, (mtx))
#endif
static inline uint64_t
locked_read_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p) {
LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
#ifdef JEMALLOC_ATOMIC_U64
return atomic_load_u64(&p->val, ATOMIC_RELAXED);
#else
malloc_mutex_assert_owner(tsdn, mtx);
return p->val;
#endif
}
@ -60,10 +67,10 @@ locked_read_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p) {
static inline void
locked_inc_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p,
uint64_t x) {
LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
#ifdef JEMALLOC_ATOMIC_U64
atomic_fetch_add_u64(&p->val, x, ATOMIC_RELAXED);
#else
malloc_mutex_assert_owner(tsdn, mtx);
p->val += x;
#endif
}
@ -71,11 +78,11 @@ locked_inc_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p,
static inline void
locked_dec_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p,
uint64_t x) {
LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
#ifdef JEMALLOC_ATOMIC_U64
uint64_t r = atomic_fetch_sub_u64(&p->val, x, ATOMIC_RELAXED);
assert(r - x <= r);
#else
malloc_mutex_assert_owner(tsdn, mtx);
p->val -= x;
assert(p->val + x >= p->val);
#endif
@ -108,10 +115,10 @@ locked_read_u64_unsynchronized(locked_u64_t *p) {
static inline size_t
locked_read_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p) {
LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
#ifdef JEMALLOC_ATOMIC_U64
return atomic_load_zu(&p->val, ATOMIC_RELAXED);
#else
malloc_mutex_assert_owner(tsdn, mtx);
return atomic_load_zu(&p->val, ATOMIC_RELAXED);
#endif
}
@ -119,10 +126,10 @@ locked_read_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p) {
static inline void
locked_inc_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p,
size_t x) {
LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
#ifdef JEMALLOC_ATOMIC_U64
atomic_fetch_add_zu(&p->val, x, ATOMIC_RELAXED);
#else
malloc_mutex_assert_owner(tsdn, mtx);
size_t cur = atomic_load_zu(&p->val, ATOMIC_RELAXED);
atomic_store_zu(&p->val, cur + x, ATOMIC_RELAXED);
#endif
@ -131,11 +138,11 @@ locked_inc_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p,
static inline void
locked_dec_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p,
size_t x) {
LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
#ifdef JEMALLOC_ATOMIC_U64
size_t r = atomic_fetch_sub_zu(&p->val, x, ATOMIC_RELAXED);
assert(r - x <= r);
#else
malloc_mutex_assert_owner(tsdn, mtx);
size_t cur = atomic_load_zu(&p->val, ATOMIC_RELAXED);
atomic_store_zu(&p->val, cur - x, ATOMIC_RELAXED);
#endif