2010-01-17 01:53:50 +08:00
|
|
|
/******************************************************************************/
|
|
|
|
#ifdef JEMALLOC_H_TYPES
|
|
|
|
|
2012-04-03 23:47:07 +08:00
|
|
|
typedef struct malloc_mutex_s malloc_mutex_t;
|
|
|
|
|
2012-04-22 12:27:46 +08:00
|
|
|
#ifdef _WIN32
|
|
|
|
# define MALLOC_MUTEX_INITIALIZER
|
2016-11-03 09:09:45 +08:00
|
|
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
|
|
|
# define MALLOC_MUTEX_INITIALIZER \
|
|
|
|
{OS_UNFAIR_LOCK_INIT, WITNESS_INITIALIZER("mutex", WITNESS_RANK_OMIT)}
|
2012-04-22 12:27:46 +08:00
|
|
|
#elif (defined(JEMALLOC_OSSPIN))
|
2016-04-18 03:55:10 +08:00
|
|
|
# define MALLOC_MUTEX_INITIALIZER \
|
|
|
|
{0, WITNESS_INITIALIZER("mutex", WITNESS_RANK_OMIT)}
|
2012-04-03 23:47:07 +08:00
|
|
|
#elif (defined(JEMALLOC_MUTEX_INIT_CB))
|
2016-04-14 14:36:15 +08:00
|
|
|
# define MALLOC_MUTEX_INITIALIZER \
|
2016-04-18 03:55:10 +08:00
|
|
|
{PTHREAD_MUTEX_INITIALIZER, NULL, \
|
|
|
|
WITNESS_INITIALIZER("mutex", WITNESS_RANK_OMIT)}
|
2011-03-19 10:30:18 +08:00
|
|
|
#else
|
2014-08-31 11:57:06 +08:00
|
|
|
# if (defined(JEMALLOC_HAVE_PTHREAD_MUTEX_ADAPTIVE_NP) && \
|
2012-02-03 14:04:57 +08:00
|
|
|
defined(PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP))
|
|
|
|
# define MALLOC_MUTEX_TYPE PTHREAD_MUTEX_ADAPTIVE_NP
|
2016-04-14 14:36:15 +08:00
|
|
|
# define MALLOC_MUTEX_INITIALIZER \
|
|
|
|
{PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP, \
|
2016-04-18 03:55:10 +08:00
|
|
|
WITNESS_INITIALIZER("mutex", WITNESS_RANK_OMIT)}
|
2012-02-11 12:22:09 +08:00
|
|
|
# else
|
2012-02-03 14:04:57 +08:00
|
|
|
# define MALLOC_MUTEX_TYPE PTHREAD_MUTEX_DEFAULT
|
2016-04-14 14:36:15 +08:00
|
|
|
# define MALLOC_MUTEX_INITIALIZER \
|
2016-04-18 03:55:10 +08:00
|
|
|
{PTHREAD_MUTEX_INITIALIZER, \
|
|
|
|
WITNESS_INITIALIZER("mutex", WITNESS_RANK_OMIT)}
|
2012-02-11 12:22:09 +08:00
|
|
|
# endif
|
2010-09-06 01:35:13 +08:00
|
|
|
#endif
|
|
|
|
|
2010-01-17 01:53:50 +08:00
|
|
|
#endif /* JEMALLOC_H_TYPES */
|
|
|
|
/******************************************************************************/
|
|
|
|
#ifdef JEMALLOC_H_STRUCTS
|
|
|
|
|
2012-04-03 23:47:07 +08:00
|
|
|
struct malloc_mutex_s {
|
2012-04-22 12:27:46 +08:00
|
|
|
#ifdef _WIN32
|
2015-06-26 04:53:58 +08:00
|
|
|
# if _WIN32_WINNT >= 0x0600
|
|
|
|
SRWLOCK lock;
|
|
|
|
# else
|
2012-04-22 12:27:46 +08:00
|
|
|
CRITICAL_SECTION lock;
|
2015-06-26 04:53:58 +08:00
|
|
|
# endif
|
2016-11-03 09:09:45 +08:00
|
|
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
|
|
|
os_unfair_lock lock;
|
2012-04-22 12:27:46 +08:00
|
|
|
#elif (defined(JEMALLOC_OSSPIN))
|
2012-04-03 23:47:07 +08:00
|
|
|
OSSpinLock lock;
|
|
|
|
#elif (defined(JEMALLOC_MUTEX_INIT_CB))
|
|
|
|
pthread_mutex_t lock;
|
|
|
|
malloc_mutex_t *postponed_next;
|
|
|
|
#else
|
|
|
|
pthread_mutex_t lock;
|
|
|
|
#endif
|
2016-04-14 14:36:15 +08:00
|
|
|
witness_t witness;
|
2012-04-03 23:47:07 +08:00
|
|
|
};
|
|
|
|
|
2010-01-17 01:53:50 +08:00
|
|
|
#endif /* JEMALLOC_H_STRUCTS */
|
|
|
|
/******************************************************************************/
|
|
|
|
#ifdef JEMALLOC_H_EXTERNS
|
|
|
|
|
|
|
|
#ifdef JEMALLOC_LAZY_LOCK
|
|
|
|
extern bool isthreaded;
|
|
|
|
#else
|
2012-04-21 05:12:30 +08:00
|
|
|
# undef isthreaded /* Undo private_namespace.h definition. */
|
2010-01-17 01:53:50 +08:00
|
|
|
# define isthreaded true
|
|
|
|
#endif
|
|
|
|
|
2016-04-14 14:36:15 +08:00
|
|
|
bool malloc_mutex_init(malloc_mutex_t *mutex, const char *name,
|
|
|
|
witness_rank_t rank);
|
2016-05-11 13:21:10 +08:00
|
|
|
void malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
|
|
|
void malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
|
|
|
void malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
2016-04-14 14:36:15 +08:00
|
|
|
bool malloc_mutex_boot(void);
|
2010-01-17 01:53:50 +08:00
|
|
|
|
|
|
|
#endif /* JEMALLOC_H_EXTERNS */
|
|
|
|
/******************************************************************************/
|
|
|
|
#ifdef JEMALLOC_H_INLINES
|
|
|
|
|
|
|
|
#ifndef JEMALLOC_ENABLE_INLINE
|
2016-05-11 13:21:10 +08:00
|
|
|
void malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
|
|
|
void malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
|
|
|
void malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
|
|
|
void malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex);
|
2010-01-17 01:53:50 +08:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_MUTEX_C_))
|
|
|
|
JEMALLOC_INLINE void
|
2016-05-11 13:21:10 +08:00
|
|
|
malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
2010-01-17 01:53:50 +08:00
|
|
|
{
|
|
|
|
|
2011-03-19 10:30:18 +08:00
|
|
|
if (isthreaded) {
|
2016-05-12 06:33:28 +08:00
|
|
|
witness_assert_not_owner(tsdn, &mutex->witness);
|
2012-04-22 12:27:46 +08:00
|
|
|
#ifdef _WIN32
|
2015-06-26 04:53:58 +08:00
|
|
|
# if _WIN32_WINNT >= 0x0600
|
|
|
|
AcquireSRWLockExclusive(&mutex->lock);
|
|
|
|
# else
|
2012-04-22 12:27:46 +08:00
|
|
|
EnterCriticalSection(&mutex->lock);
|
2015-06-26 04:53:58 +08:00
|
|
|
# endif
|
2016-11-03 09:09:45 +08:00
|
|
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
|
|
|
os_unfair_lock_lock(&mutex->lock);
|
2012-04-22 12:27:46 +08:00
|
|
|
#elif (defined(JEMALLOC_OSSPIN))
|
2012-04-03 23:47:07 +08:00
|
|
|
OSSpinLockLock(&mutex->lock);
|
2011-03-19 10:30:18 +08:00
|
|
|
#else
|
2012-04-03 23:47:07 +08:00
|
|
|
pthread_mutex_lock(&mutex->lock);
|
2011-03-19 10:30:18 +08:00
|
|
|
#endif
|
2016-05-12 06:33:28 +08:00
|
|
|
witness_lock(tsdn, &mutex->witness);
|
2011-03-19 10:30:18 +08:00
|
|
|
}
|
2010-01-17 01:53:50 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_INLINE void
|
2016-05-11 13:21:10 +08:00
|
|
|
malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
2010-01-17 01:53:50 +08:00
|
|
|
{
|
|
|
|
|
2011-03-19 10:30:18 +08:00
|
|
|
if (isthreaded) {
|
2016-05-12 06:33:28 +08:00
|
|
|
witness_unlock(tsdn, &mutex->witness);
|
2012-04-22 12:27:46 +08:00
|
|
|
#ifdef _WIN32
|
2015-06-26 04:53:58 +08:00
|
|
|
# if _WIN32_WINNT >= 0x0600
|
|
|
|
ReleaseSRWLockExclusive(&mutex->lock);
|
|
|
|
# else
|
2012-04-22 12:27:46 +08:00
|
|
|
LeaveCriticalSection(&mutex->lock);
|
2015-06-26 04:53:58 +08:00
|
|
|
# endif
|
2016-11-03 09:09:45 +08:00
|
|
|
#elif (defined(JEMALLOC_OS_UNFAIR_LOCK))
|
|
|
|
os_unfair_lock_unlock(&mutex->lock);
|
2012-04-22 12:27:46 +08:00
|
|
|
#elif (defined(JEMALLOC_OSSPIN))
|
2012-04-03 23:47:07 +08:00
|
|
|
OSSpinLockUnlock(&mutex->lock);
|
2011-03-19 10:30:18 +08:00
|
|
|
#else
|
2012-04-03 23:47:07 +08:00
|
|
|
pthread_mutex_unlock(&mutex->lock);
|
2011-03-19 10:30:18 +08:00
|
|
|
#endif
|
|
|
|
}
|
2010-01-17 01:53:50 +08:00
|
|
|
}
|
2016-04-14 14:36:15 +08:00
|
|
|
|
|
|
|
JEMALLOC_INLINE void
|
2016-05-11 13:21:10 +08:00
|
|
|
malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
2016-04-14 14:36:15 +08:00
|
|
|
{
|
|
|
|
|
2016-05-12 06:33:28 +08:00
|
|
|
if (isthreaded)
|
2016-05-11 13:21:10 +08:00
|
|
|
witness_assert_owner(tsdn, &mutex->witness);
|
2016-04-14 14:36:15 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
JEMALLOC_INLINE void
|
2016-05-11 13:21:10 +08:00
|
|
|
malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex)
|
2016-04-14 14:36:15 +08:00
|
|
|
{
|
|
|
|
|
2016-05-12 06:33:28 +08:00
|
|
|
if (isthreaded)
|
2016-05-11 13:21:10 +08:00
|
|
|
witness_assert_not_owner(tsdn, &mutex->witness);
|
2016-04-14 14:36:15 +08:00
|
|
|
}
|
2010-01-17 01:53:50 +08:00
|
|
|
#endif
|
|
|
|
|
|
|
|
#endif /* JEMALLOC_H_INLINES */
|
|
|
|
/******************************************************************************/
|