Fallback to 32-bit when 8-bit atomics are missing for TSD.

When it happens, this might cause a slowdown on the fast path operations.
However such case is very rare.
This commit is contained in:
Qi Wang 2019-03-07 16:01:55 -08:00 committed by Qi Wang
parent 06f0850427
commit b804d0f019
2 changed files with 24 additions and 8 deletions

View File

@ -169,6 +169,18 @@ enum {
*/ */
#define TSD_MANGLE(n) cant_access_tsd_items_directly_use_a_getter_or_setter_##n #define TSD_MANGLE(n) cant_access_tsd_items_directly_use_a_getter_or_setter_##n
#ifdef JEMALLOC_U8_ATOMICS
# define tsd_state_t atomic_u8_t
# define tsd_atomic_load atomic_load_u8
# define tsd_atomic_store atomic_store_u8
# define tsd_atomic_exchange atomic_exchange_u8
#else
# define tsd_state_t atomic_u32_t
# define tsd_atomic_load atomic_load_u32
# define tsd_atomic_store atomic_store_u32
# define tsd_atomic_exchange atomic_exchange_u32
#endif
/* The actual tsd. */ /* The actual tsd. */
struct tsd_s { struct tsd_s {
/* /*
@ -177,8 +189,11 @@ struct tsd_s {
* setters below. * setters below.
*/ */
/* We manually limit the state to just a single byte. */ /*
atomic_u8_t state; * We manually limit the state to just a single byte. Unless the 8-bit
* atomics are unavailable (which is rare).
*/
tsd_state_t state;
#define O(n, t, nt) \ #define O(n, t, nt) \
t TSD_MANGLE(n); t TSD_MANGLE(n);
MALLOC_TSD MALLOC_TSD

View File

@ -113,9 +113,9 @@ tsd_force_recompute(tsdn_t *tsdn) {
malloc_mutex_lock(tsdn, &tsd_nominal_tsds_lock); malloc_mutex_lock(tsdn, &tsd_nominal_tsds_lock);
tsd_t *remote_tsd; tsd_t *remote_tsd;
ql_foreach(remote_tsd, &tsd_nominal_tsds, TSD_MANGLE(tcache).tsd_link) { ql_foreach(remote_tsd, &tsd_nominal_tsds, TSD_MANGLE(tcache).tsd_link) {
assert(atomic_load_u8(&remote_tsd->state, ATOMIC_RELAXED) assert(tsd_atomic_load(&remote_tsd->state, ATOMIC_RELAXED)
<= tsd_state_nominal_max); <= tsd_state_nominal_max);
atomic_store_u8(&remote_tsd->state, tsd_state_nominal_recompute, tsd_atomic_store(&remote_tsd->state, tsd_state_nominal_recompute,
ATOMIC_RELAXED); ATOMIC_RELAXED);
} }
malloc_mutex_unlock(tsdn, &tsd_nominal_tsds_lock); malloc_mutex_unlock(tsdn, &tsd_nominal_tsds_lock);
@ -172,7 +172,7 @@ tsd_slow_update(tsd_t *tsd) {
uint8_t old_state; uint8_t old_state;
do { do {
uint8_t new_state = tsd_state_compute(tsd); uint8_t new_state = tsd_state_compute(tsd);
old_state = atomic_exchange_u8(&tsd->state, new_state, old_state = tsd_atomic_exchange(&tsd->state, new_state,
ATOMIC_ACQUIRE); ATOMIC_ACQUIRE);
} while (old_state == tsd_state_nominal_recompute); } while (old_state == tsd_state_nominal_recompute);
} }
@ -181,14 +181,14 @@ void
tsd_state_set(tsd_t *tsd, uint8_t new_state) { tsd_state_set(tsd_t *tsd, uint8_t new_state) {
/* Only the tsd module can change the state *to* recompute. */ /* Only the tsd module can change the state *to* recompute. */
assert(new_state != tsd_state_nominal_recompute); assert(new_state != tsd_state_nominal_recompute);
uint8_t old_state = atomic_load_u8(&tsd->state, ATOMIC_RELAXED); uint8_t old_state = tsd_atomic_load(&tsd->state, ATOMIC_RELAXED);
if (old_state > tsd_state_nominal_max) { if (old_state > tsd_state_nominal_max) {
/* /*
* Not currently in the nominal list, but it might need to be * Not currently in the nominal list, but it might need to be
* inserted there. * inserted there.
*/ */
assert(!tsd_in_nominal_list(tsd)); assert(!tsd_in_nominal_list(tsd));
atomic_store_u8(&tsd->state, new_state, ATOMIC_RELAXED); tsd_atomic_store(&tsd->state, new_state, ATOMIC_RELAXED);
if (new_state <= tsd_state_nominal_max) { if (new_state <= tsd_state_nominal_max) {
tsd_add_nominal(tsd); tsd_add_nominal(tsd);
} }
@ -201,7 +201,8 @@ tsd_state_set(tsd_t *tsd, uint8_t new_state) {
assert(tsd_in_nominal_list(tsd)); assert(tsd_in_nominal_list(tsd));
if (new_state > tsd_state_nominal_max) { if (new_state > tsd_state_nominal_max) {
tsd_remove_nominal(tsd); tsd_remove_nominal(tsd);
atomic_store_u8(&tsd->state, new_state, ATOMIC_RELAXED); tsd_atomic_store(&tsd->state, new_state,
ATOMIC_RELAXED);
} else { } else {
/* /*
* This is the tricky case. We're transitioning from * This is the tricky case. We're transitioning from