Revert "Define 64-bits atomics unconditionally"

This reverts commit c2942e2c0e.

This resolves #495.
This commit is contained in:
Jason Evans 2016-11-07 10:53:35 -08:00
parent 04b463546e
commit 2e46b13ad5

View File

@ -66,7 +66,8 @@ void atomic_write_u(unsigned *p, unsigned x);
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ATOMIC_C_)) #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ATOMIC_C_))
/******************************************************************************/ /******************************************************************************/
/* 64-bit operations. */ /* 64-bit operations. */
#if (defined(__amd64__) || defined(__x86_64__)) #if (LG_SIZEOF_PTR == 3 || LG_SIZEOF_INT == 3)
# if (defined(__amd64__) || defined(__x86_64__))
JEMALLOC_INLINE uint64_t JEMALLOC_INLINE uint64_t
atomic_add_uint64(uint64_t *p, uint64_t x) atomic_add_uint64(uint64_t *p, uint64_t x)
{ {
@ -124,7 +125,7 @@ atomic_write_uint64(uint64_t *p, uint64_t x)
: "memory" /* Clobbers. */ : "memory" /* Clobbers. */
); );
} }
#elif (defined(JEMALLOC_C11ATOMICS)) # elif (defined(JEMALLOC_C11ATOMICS))
JEMALLOC_INLINE uint64_t JEMALLOC_INLINE uint64_t
atomic_add_uint64(uint64_t *p, uint64_t x) atomic_add_uint64(uint64_t *p, uint64_t x)
{ {
@ -152,7 +153,7 @@ atomic_write_uint64(uint64_t *p, uint64_t x)
volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p; volatile atomic_uint_least64_t *a = (volatile atomic_uint_least64_t *)p;
atomic_store(a, x); atomic_store(a, x);
} }
#elif (defined(JEMALLOC_ATOMIC9)) # elif (defined(JEMALLOC_ATOMIC9))
JEMALLOC_INLINE uint64_t JEMALLOC_INLINE uint64_t
atomic_add_uint64(uint64_t *p, uint64_t x) atomic_add_uint64(uint64_t *p, uint64_t x)
{ {
@ -192,7 +193,7 @@ atomic_write_uint64(uint64_t *p, uint64_t x)
atomic_store_rel_long(p, x); atomic_store_rel_long(p, x);
} }
#elif (defined(JEMALLOC_OSATOMIC)) # elif (defined(JEMALLOC_OSATOMIC))
JEMALLOC_INLINE uint64_t JEMALLOC_INLINE uint64_t
atomic_add_uint64(uint64_t *p, uint64_t x) atomic_add_uint64(uint64_t *p, uint64_t x)
{ {
@ -224,7 +225,7 @@ atomic_write_uint64(uint64_t *p, uint64_t x)
o = atomic_read_uint64(p); o = atomic_read_uint64(p);
} while (atomic_cas_uint64(p, o, x)); } while (atomic_cas_uint64(p, o, x));
} }
#elif (defined(_MSC_VER)) # elif (defined(_MSC_VER))
JEMALLOC_INLINE uint64_t JEMALLOC_INLINE uint64_t
atomic_add_uint64(uint64_t *p, uint64_t x) atomic_add_uint64(uint64_t *p, uint64_t x)
{ {
@ -254,7 +255,7 @@ atomic_write_uint64(uint64_t *p, uint64_t x)
InterlockedExchange64(p, x); InterlockedExchange64(p, x);
} }
#elif (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || \ # elif (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) || \
defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_8)) defined(JE_FORCE_SYNC_COMPARE_AND_SWAP_8))
JEMALLOC_INLINE uint64_t JEMALLOC_INLINE uint64_t
atomic_add_uint64(uint64_t *p, uint64_t x) atomic_add_uint64(uint64_t *p, uint64_t x)
@ -283,8 +284,9 @@ atomic_write_uint64(uint64_t *p, uint64_t x)
__sync_lock_test_and_set(p, x); __sync_lock_test_and_set(p, x);
} }
#else # else
# error "Missing implementation for 64-bit atomic operations" # error "Missing implementation for 64-bit atomic operations"
# endif
#endif #endif
/******************************************************************************/ /******************************************************************************/